Merge in first OpenGL ES 3.2 tests

Change-Id: I5c6f8bc63da6203e36504cc868a746d7a90cf5e6
diff --git a/Android.mk b/Android.mk
index 1619790..808a7b5 100644
--- a/Android.mk
+++ b/Android.mk
@@ -33,6 +33,7 @@
 	executor/xeXMLWriter.cpp \
 	framework/common/tcuApp.cpp \
 	framework/common/tcuArray.cpp \
+	framework/common/tcuAstcUtil.cpp \
 	framework/common/tcuBilinearImageCompare.cpp \
 	framework/common/tcuCommandLine.cpp \
 	framework/common/tcuCompressedTexture.cpp \
@@ -81,6 +82,7 @@
 	framework/delibs/debase/deRandom.c \
 	framework/delibs/debase/deString.c \
 	framework/delibs/debase/deSha1.c \
+	framework/delibs/decpp/deAppendList.cpp \
 	framework/delibs/decpp/deArrayBuffer.cpp \
 	framework/delibs/decpp/deArrayUtil.cpp \
 	framework/delibs/decpp/deBlockBuffer.cpp \
@@ -230,7 +232,6 @@
 	framework/randomshaders/rsgShader.cpp \
 	framework/randomshaders/rsgShaderGenerator.cpp \
 	framework/randomshaders/rsgStatement.cpp \
-	framework/randomshaders/rsgTest.cpp \
 	framework/randomshaders/rsgToken.cpp \
 	framework/randomshaders/rsgUtils.cpp \
 	framework/randomshaders/rsgVariable.cpp \
@@ -660,6 +661,7 @@
 	modules/glshared/glsTextureTestUtil.cpp \
 	modules/glshared/glsUniformBlockCase.cpp \
 	modules/glshared/glsVertexArrayTests.cpp \
+	modules/internal/ditAstcTests.cpp \
 	modules/internal/ditBuildInfoTests.cpp \
 	modules/internal/ditSRGB8ConversionTest.cpp \
 	modules/internal/ditDelibsTests.cpp \
@@ -671,7 +673,129 @@
 	modules/internal/ditTestPackage.cpp \
 	modules/internal/ditSeedBuilderTests.cpp \
 	modules/internal/ditTestPackageEntry.cpp \
-	modules/internal/ditTextureFormatTests.cpp
+	modules/internal/ditTextureFormatTests.cpp \
+	modules/internal/ditVulkanTests.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawTests.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.cpp \
+	external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.cpp \
+	external/vulkancts/modules/vulkan/vktInfoTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.cpp \
+	external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.cpp \
+	external/vulkancts/modules/vulkan/vktRenderPassTests.cpp \
+	external/vulkancts/modules/vulkan/vktShaderLibrary.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.cpp \
+	external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/vktTestPackage.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.cpp \
+	external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.cpp \
+	external/vulkancts/modules/vulkan/vktTestCase.cpp \
+	external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp \
+	external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.cpp \
+	external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.cpp \
+	external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.cpp \
+	external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.cpp \
+	external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.cpp \
+	external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp \
+	external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.cpp \
+	external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.cpp \
+	external/vulkancts/modules/vulkan/memory/vktMemoryTests.cpp \
+	external/vulkancts/modules/vulkan/compute/vktComputeTests.cpp \
+	external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.cpp \
+	external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.cpp \
+	external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.cpp \
+	external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.cpp \
+	external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp \
+	external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.cpp \
+	external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.cpp \
+	external/vulkancts/modules/vulkan/vktTestCaseUtil.cpp \
+	external/vulkancts/modules/vulkan/vktTestGroupUtil.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageSizeTests.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageTestsUtil.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageTexture.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageTests.cpp \
+	external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.cpp \
+	external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.cpp \
+	external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.cpp \
+	external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.cpp \
+	external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.cpp \
+	external/vulkancts/modules/vulkan/vktTestPackageEntry.cpp \
+	external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.cpp \
+	external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiBufferTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiSmokeTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiTests.cpp \
+	external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.cpp \
+	external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.cpp \
+	external/vulkancts/framework/vulkan/vkBuilderUtil.cpp \
+	external/vulkancts/framework/vulkan/vkSpirVProgram.cpp \
+	external/vulkancts/framework/vulkan/vkPrograms.cpp \
+	external/vulkancts/framework/vulkan/vkApiVersion.cpp \
+	external/vulkancts/framework/vulkan/vkBinaryRegistry.cpp \
+	external/vulkancts/framework/vulkan/vkPlatform.cpp \
+	external/vulkancts/framework/vulkan/vkDeviceUtil.cpp \
+	external/vulkancts/framework/vulkan/vkGlslToSpirV.cpp \
+	external/vulkancts/framework/vulkan/vkDefs.cpp \
+	external/vulkancts/framework/vulkan/vkMemUtil.cpp \
+	external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.cpp \
+	external/vulkancts/framework/vulkan/vkRefUtil.cpp \
+	external/vulkancts/framework/vulkan/vkSpirVAsm.cpp \
+	external/vulkancts/framework/vulkan/vkNullDriver.cpp \
+	external/vulkancts/framework/vulkan/vkStrUtil.cpp \
+	external/vulkancts/framework/vulkan/vkTypeUtil.cpp \
+	external/vulkancts/framework/vulkan/vkImageUtil.cpp \
+	external/vulkancts/framework/vulkan/vkQueryUtil.cpp \
+	external/vulkancts/framework/vulkan/vkRef.cpp
 
 LOCAL_C_INCLUDES := \
 	frameworks/native/opengl/include \
@@ -710,7 +834,23 @@
 	$(deqp_dir)/modules/glshared \
 	$(deqp_dir)/modules/glusecases \
 	$(deqp_dir)/executor \
-	$(deqp_dir)/execserver
+	$(deqp_dir)/execserver \
+	$(deqp_dir)/external/vulkancts/framework/vulkan \
+	$(deqp_dir)/external/vulkancts/modules/vulkan \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/api \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/binding_model \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/compute \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/draw \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/dynamic_state \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/image \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/memory \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/pipeline \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/query_pool \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/shaderexecutor \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/shaderrender \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/spirv_assembly \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/ssbo \
+	$(deqp_dir)/external/vulkancts/modules/vulkan/ubo
 
 deqp_compile_flags := \
 	-DDEQP_SUPPORT_EGL=1 \
@@ -733,12 +873,15 @@
 		liblog \
 		libm \
 		libc \
-		libpng \
 		libz \
 		libdl
 
+LOCAL_STATIC_LIBRARIES := \
+		libpng_ndk \
+
 LOCAL_CFLAGS += \
-	$(deqp_compile_flags)
+	$(deqp_compile_flags) \
+	-Wno-error=date-time
 
 LOCAL_SDK_VERSION := 9
 LOCAL_CPPFLAGS += -Wno-non-virtual-dtor
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 9760d72..da4b5bf 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -66,7 +66,7 @@
 include_directories(${ZLIB_INCLUDE_PATH})
 
 # libpng
-find_path(PNG_INCLUDE_PATH	libpng.h)
+find_path(PNG_INCLUDE_PATH	png.h)
 find_library(PNG_LIBRARY	png)
 
 if (NOT PNG_INCLUDE_PATH OR NOT PNG_LIBRARY)
@@ -75,6 +75,12 @@
 	# \note PNG_LIBRARY and PNG_INCLUDE_PATH are promoted from external/libpng/CMakeLists.txt
 endif ()
 
+# glslang
+add_subdirectory(external/glslang)
+
+# spirv-tools
+add_subdirectory(external/spirv-tools)
+
 include_directories(${PNG_INCLUDE_PATH})
 
 message(STATUS "DEQP_TARGET_NAME        = ${DEQP_TARGET_NAME}")
@@ -186,6 +192,7 @@
 	framework/randomshaders
 	framework/egl
 	framework/egl/wrapper
+	external/vulkancts/framework/vulkan
 	)
 
 if (DE_OS_IS_ANDROID OR DE_OS_IS_IOS)
@@ -268,12 +275,14 @@
 endmacro (add_data_file)
 
 add_subdirectory(framework)
+add_subdirectory(external/vulkancts/framework/vulkan)
 
 if (DE_COMPILER_IS_MSC)
 	add_compile_options(/bigobj) # Required by glsBuiltinPrecisionTests.cpp
 endif ()
 
 add_subdirectory(modules)
+add_subdirectory(external/vulkancts/modules/vulkan)
 
 # Single-binary targets
 if (DE_OS_IS_ANDROID)
diff --git a/android/cts/master/com.drawelements.deqp.egl.xml b/android/cts/master/com.drawelements.deqp.egl.xml
index 47f672b..2d57fae 100644
--- a/android/cts/master/com.drawelements.deqp.egl.xml
+++ b/android/cts/master/com.drawelements.deqp.egl.xml
@@ -3010,21 +3010,6 @@
 					<Test name="create_image_gles2_renderbuffer_stencil_index8">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="create_image_gles2_android_native_rgb565">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="create_image_gles2_android_native_rgb8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="create_image_gles2_android_native_rgba4">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="create_image_gles2_android_native_rgb5_a1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="create_image_gles2_android_native_rgba8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="image_target_gles2_tex2d">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -3159,36 +3144,6 @@
 					<Test name="gles2_renderbuffer_stencil_stencil_buffer">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="gles2_android_native_rgb565_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb565_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb8_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb8_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba4_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba4_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb5_a1_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb5_a1_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba8_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba8_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 				</TestCase>
 				<TestCase name="modify">
 					<Test name="tex_rgb8_tex_subimage_rgb8">
@@ -3284,60 +3239,6 @@
 					<Test name="renderbuffer_stencil_renderbuffer_clear_stencil">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="android_native_rgb565_tex_subimage_rgb8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb565_tex_subimage_rgb565">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb565_renderbuffer_clear_color">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb8_tex_subimage_rgb8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb8_tex_subimage_rgb565">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb8_renderbuffer_clear_color">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba4_tex_subimage_rgba8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba4_tex_subimage_rgba5_a1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba4_tex_subimage_rgba4">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba4_renderbuffer_clear_color">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb5_a1_tex_subimage_rgba8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb5_a1_tex_subimage_rgba5_a1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb5_a1_tex_subimage_rgba4">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgb5_a1_renderbuffer_clear_color">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba8_tex_subimage_rgba8">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba8_tex_subimage_rgba5_a1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba8_tex_subimage_rgba4">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="android_native_rgba8_renderbuffer_clear_color">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 				</TestCase>
 				<TestCase name="render_multiple_contexts">
 					<Test name="gles2_texture_rgb8_texture">
@@ -3466,36 +3367,6 @@
 					<Test name="gles2_renderbuffer_stencil_stencil_buffer">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="gles2_android_native_rgb565_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb565_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb8_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb8_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba4_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba4_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb5_a1_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgb5_a1_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba8_texture">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gles2_android_native_rgba8_read_pixels">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 				</TestCase>
 			</TestSuite>
 			<TestSuite name="sharing">
@@ -7559,9 +7430,6 @@
 				<Test name="get_configs">
 					<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 				</Test>
-				<Test name="get_display">
-					<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-				</Test>
 				<Test name="initialize">
 					<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 				</Test>
diff --git a/android/cts/master/com.drawelements.deqp.gles2.xml b/android/cts/master/com.drawelements.deqp.gles2.xml
index 88e9dc8..b24db87 100644
--- a/android/cts/master/com.drawelements.deqp.gles2.xml
+++ b/android/cts/master/com.drawelements.deqp.gles2.xml
@@ -3389,6 +3389,15 @@
 						<Test name="mixed_if_elseif_else_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="constant_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="input_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="uniform_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 					</TestCase>
 					<TestCase name="invalid_if">
 						<Test name="missing_parenthesis_vertex">
@@ -5236,6 +5245,9 @@
 						<Test name="struct_nested_struct_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="struct_constructor_highp_in_fragment">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 					</TestCase>
 					<TestCase name="qualifiers">
 						<Test name="in_float_vertex">
@@ -8901,6 +8913,11 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
 					</TestCase>
+					<TestCase name="custom">
+						<Test name="continue_in_fragment_for_loop">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+					</TestCase>
 				</TestSuite>
 				<TestSuite name="operator">
 					<TestSuite name="unary_operator">
@@ -26601,9 +26618,6 @@
 					<Test name="clamp_clamp_nearest_pot_rgb888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_clamp_nearest_pot_rgba4444">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_clamp_nearest_pot_l8">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -26673,9 +26687,6 @@
 					<Test name="repeat_mirror_nearest_pot_rgb888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_mirror_nearest_pot_rgba4444">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_mirror_nearest_pot_l8">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -26709,57 +26720,30 @@
 					<Test name="mirror_mirror_linear_pot_rgba8888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -27863,9 +27847,6 @@
 							<Test name="8">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="9">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							</Test>
 						</TestCase>
 						<TestCase name="mixed">
 							<Test name="3">
diff --git a/android/cts/master/com.drawelements.deqp.gles3.xml b/android/cts/master/com.drawelements.deqp.gles3.xml
index 7238e96..ce42d24 100644
--- a/android/cts/master/com.drawelements.deqp.gles3.xml
+++ b/android/cts/master/com.drawelements.deqp.gles3.xml
@@ -3795,6 +3795,18 @@
 						<Test name="pragma_macro_exp_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="pragma_unrecognized_debug_vertex">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="pragma_unrecognized_debug_fragment">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="pragma_unrecognized_token_vertex">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="pragma_unrecognized_token_fragment">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 					</TestCase>
 					<TestCase name="extensions">
 						<Test name="basic_vertex">
@@ -8682,6 +8694,15 @@
 						<Test name="mixed_if_elseif_else_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="constant_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="input_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="uniform_conditional_assignment_to_matrix">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 					</TestCase>
 					<TestCase name="invalid_if">
 						<Test name="missing_parenthesis_vertex">
@@ -11024,6 +11045,9 @@
 						<Test name="struct_nested_struct_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="struct_constructor_highp_in_fragment">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 					</TestCase>
 					<TestCase name="qualifiers">
 						<Test name="in_float_vertex">
@@ -17873,6 +17897,11 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
 					</TestCase>
+					<TestCase name="custom">
+						<Test name="continue_in_fragment_for_loop">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+					</TestCase>
 				</TestSuite>
 				<TestSuite name="operator">
 					<TestSuite name="unary_operator">
@@ -46564,6 +46593,7 @@
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
+						<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 					</Test>
 					<Test name="pointcoord">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -47301,6 +47331,9 @@
 						<Test name="sampler3d_fixed_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="sampler3d_float_vertex">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 						<Test name="sampler3d_float_fragment">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
@@ -92550,6 +92583,9 @@
 					<Test name="texture_levels">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
+					<Test name="attachment_query_default_fbo">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
 					<Test name="attachment_query_empty_fbo">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -127651,6 +127687,9 @@
 					<Test name="compressedtexsubimage2d_invalid_buffer_target">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
+					<Test name="teximage3d">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
 					<Test name="teximage3d_neg_level">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -128948,6 +128987,26 @@
 			</TestSuite>
 			<TestSuite name="dither">
 				<TestCase name="disabled">
+					<Test name="gradient_white">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
+					<Test name="gradient_red">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
+					<Test name="gradient_green">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
+					<Test name="gradient_blue">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
+					<Test name="gradient_alpha">
+						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
+					</Test>
 					<Test name="unicolored_quad_white">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						<TestInstance glconfig="rgb565d0s0ms0" rotation="unspecified" surfacetype="window"/>
@@ -134557,9 +134616,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="buffer">
-							<Test name="index_byte">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="index_short">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
diff --git a/android/cts/master/com.drawelements.deqp.gles31.xml b/android/cts/master/com.drawelements.deqp.gles31.xml
index df29b69..75c65a5 100644
--- a/android/cts/master/com.drawelements.deqp.gles31.xml
+++ b/android/cts/master/com.drawelements.deqp.gles31.xml
@@ -12733,6 +12733,149 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
 					</TestCase>
+					<TestSuite name="uniform">
+						<TestCase name="basic">
+							<Test name="precision_conflict_1">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="struct">
+							<Test name="basic">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vertex_only">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="fragment_only">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vec4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vertex_only_vec4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="fragment_only_vec4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vec4_vec3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vertex_only_vec4_vec3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="fragment_only_vec4_vec3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec4_vec3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vec4_float">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="vertex_only_vec4_float">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="fragment_only_vec4_float">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec4_float">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec4_struct">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec4_vec3_struct">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec2_vec3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_vec2_int">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_int_float">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_bvec2_vec2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_ivec2_vec2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="partial_ivec2_ivec2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="type_conflict_1">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="type_conflict_2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="type_conflict_3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_1">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="precision_conflict_4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="light_struct_highp">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="light_struct_mediump">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="block">
+							<Test name="differing_precision">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="type_mismatch">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="members_mismatch">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="layout_qualifier_mismatch_1">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="layout_qualifier_mismatch_2">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="layout_qualifier_mismatch_3">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="layout_qualifier_mismatch_4">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="layout_qualifier_mismatch_5">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+					</TestSuite>
 				</TestSuite>
 				<TestSuite name="builtin_constants">
 					<TestCase name="core">
@@ -13030,6 +13173,15 @@
 						<Test name="lines_4_samples_fwidth">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="wide_lines_4_samples_dfdx">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_4_samples_dfdy">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_4_samples_fwidth">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 						<Test name="points_4_samples_dfdx">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
@@ -13066,6 +13218,15 @@
 						<Test name="lines_8_samples_fwidth">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="wide_lines_8_samples_dfdx">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_8_samples_dfdy">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_8_samples_fwidth">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 						<Test name="points_8_samples_dfdx">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
@@ -13102,6 +13263,15 @@
 						<Test name="lines_max_samples_fwidth">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
+						<Test name="wide_lines_max_samples_dfdx">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_max_samples_dfdy">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
+						<Test name="wide_lines_max_samples_fwidth">
+							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+						</Test>
 						<Test name="points_max_samples_dfdx">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 						</Test>
@@ -70477,30 +70647,6 @@
 				<TestSuite name="lines">
 					<TestSuite name="global_state">
 						<TestCase name="vertex_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70512,30 +70658,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70547,56 +70669,17 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
+							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_larger">
+							<Test name="fbo_bbox_larger">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
+							<Test name="fbo_bbox_smaller">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70610,30 +70693,6 @@
 					</TestSuite>
 					<TestSuite name="tessellation_set_per_draw">
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70645,30 +70704,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70936,30 +70971,6 @@
 				<TestSuite name="wide_lines">
 					<TestSuite name="global_state">
 						<TestCase name="vertex_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -70971,30 +70982,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -71006,56 +70993,17 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
+							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_larger">
+							<Test name="fbo_bbox_larger">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
+							<Test name="fbo_bbox_smaller">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -71069,30 +71017,6 @@
 					</TestSuite>
 					<TestSuite name="tessellation_set_per_draw">
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -71104,30 +71028,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -71894,6 +71794,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba32i_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -71956,6 +71868,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba32i_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -72018,6 +71945,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba32ui_rgba32f">
 							<Test name="texture2d_to_texture2d">
@@ -72068,6 +72010,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba32ui_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -72130,6 +72084,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba32ui_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -72192,6 +72161,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 					</TestSuite>
 					<TestSuite name="viewclass_96_bits">
@@ -72994,6 +72978,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32i_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -73056,6 +73052,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32i_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -73118,6 +73129,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32i_rgba16f">
 							<Test name="texture2d_to_texture2d">
@@ -73168,6 +73194,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32i_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -73230,6 +73268,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32i_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -73292,6 +73345,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rg32f">
 							<Test name="texture2d_to_texture2d">
@@ -73342,6 +73410,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -73404,6 +73484,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -73466,6 +73561,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rgba16f">
 							<Test name="texture2d_to_texture2d">
@@ -73516,6 +73626,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -73578,6 +73700,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg32ui_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -73640,6 +73777,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16f_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -73988,6 +74140,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16i_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -74050,6 +74214,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16i_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -74112,6 +74291,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16i_rgba16f">
 							<Test name="texture2d_to_texture2d">
@@ -74162,6 +74356,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16i_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -74224,6 +74430,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16i_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -74286,6 +74507,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rg32f">
 							<Test name="texture2d_to_texture2d">
@@ -74336,6 +74572,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -74398,6 +74646,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -74460,6 +74723,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rgba16f">
 							<Test name="texture2d_to_texture2d">
@@ -74510,6 +74788,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -74572,6 +74862,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba16ui_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -74634,6 +74939,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 					</TestSuite>
 					<TestSuite name="viewclass_48_bits">
@@ -75697,6 +76017,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="r32f_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="r32f_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -75808,6 +76178,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -75870,6 +76252,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -75932,6 +76329,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -75982,6 +76394,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -76044,6 +76468,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -76106,6 +76545,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -76168,6 +76622,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -76230,6 +76699,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -76292,6 +76776,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -76342,6 +76841,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -76404,6 +76915,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -76466,6 +76992,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r32i_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -76528,6 +77131,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32i_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -76578,6 +77196,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -76628,6 +77258,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -76690,6 +77332,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -76752,6 +77409,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -76802,6 +77474,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -76864,6 +77548,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -76926,6 +77625,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -76988,6 +77702,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -77050,6 +77779,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -77112,6 +77856,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -77162,6 +77921,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -77224,6 +77995,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -77286,6 +78072,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r32ui_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -77348,6 +78211,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r32ui_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -77398,6 +78276,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16f_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -78007,6 +78897,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="rg16f_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="rg16f_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -78118,6 +79058,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -78180,6 +79132,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -78242,6 +79209,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -78292,6 +79274,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -78354,6 +79348,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -78416,6 +79425,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -78478,6 +79502,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -78540,6 +79579,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -78602,6 +79656,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -78652,6 +79721,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -78714,6 +79795,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -78776,6 +79872,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg16i_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -78838,6 +80011,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16i_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -78888,6 +80076,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -78938,6 +80138,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -79000,6 +80212,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -79062,6 +80289,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -79112,6 +80354,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -79174,6 +80428,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -79236,6 +80505,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -79298,6 +80582,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -79360,6 +80659,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -79422,6 +80736,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -79472,6 +80801,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -79534,6 +80875,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -79596,6 +80952,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg16ui_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -79658,6 +81091,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg16ui_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -79708,6 +81156,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -79758,6 +81218,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -79820,6 +81292,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -79882,6 +81369,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -79932,6 +81434,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -79994,6 +81508,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -80056,6 +81585,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -80118,6 +81662,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -80180,6 +81739,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -80242,6 +81816,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -80292,6 +81881,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -80354,6 +81955,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -80416,6 +82032,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -80478,6 +82171,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -80528,6 +82236,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -80578,6 +82298,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -80640,6 +82372,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -80702,6 +82449,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -80752,6 +82514,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -80814,6 +82588,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -80876,6 +82665,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -80938,6 +82742,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -81000,6 +82819,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -81062,6 +82896,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -81112,6 +82961,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -81174,6 +83035,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -81236,6 +83112,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8i_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -81298,6 +83251,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8i_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -81348,6 +83316,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -81398,6 +83378,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -81460,6 +83452,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -81522,6 +83529,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -81572,6 +83594,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -81634,6 +83668,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -81696,6 +83745,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -81758,6 +83822,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -81820,6 +83899,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -81882,6 +83976,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -81932,6 +84041,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -81994,6 +84115,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -82056,6 +84192,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8ui_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -82118,6 +84331,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8ui_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -82168,6 +84396,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r11f_g11f_b10f_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -82777,6 +85017,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="r11f_g11f_b10f_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="r11f_g11f_b10f_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -82888,6 +85178,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -82950,6 +85252,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -83012,6 +85329,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -83062,6 +85394,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -83124,6 +85468,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -83186,6 +85545,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -83248,6 +85622,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -83310,6 +85699,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -83372,6 +85776,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -83422,6 +85841,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -83484,6 +85915,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -83546,6 +85992,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb10_a2ui_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -83608,6 +86131,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2ui_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -83658,6 +86196,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -83708,6 +86258,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -83770,6 +86332,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -83832,6 +86409,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -83882,6 +86474,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -83944,6 +86548,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -84006,6 +86625,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -84068,6 +86702,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -84130,6 +86779,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -84192,6 +86856,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -84242,6 +86921,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -84304,6 +86995,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -84366,6 +87072,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb10_a2_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -84428,6 +87211,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb10_a2_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -84478,6 +87276,888 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_r32f">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_r32i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_r32ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rg16f">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rg16i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rg16ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgba8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgba8i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgba8ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_r11f_g11f_b10f">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgb10_a2ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgb10_a2">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_srgb8_alpha8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgba8_snorm_rgb9_e5">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_r32f">
 							<Test name="texture2d_to_texture2d">
@@ -84528,6 +88208,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -84590,6 +88282,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_r32ui">
 							<Test name="texture2d_to_texture2d">
@@ -84652,6 +88359,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rg16f">
 							<Test name="texture2d_to_texture2d">
@@ -84702,6 +88424,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rg16i">
 							<Test name="texture2d_to_texture2d">
@@ -84764,6 +88498,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rg16ui">
 							<Test name="texture2d_to_texture2d">
@@ -84826,6 +88575,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgba8">
 							<Test name="texture2d_to_texture2d">
@@ -84888,6 +88652,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgba8i">
 							<Test name="texture2d_to_texture2d">
@@ -84950,6 +88729,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgba8ui">
 							<Test name="texture2d_to_texture2d">
@@ -85012,6 +88806,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_r11f_g11f_b10f">
 							<Test name="texture2d_to_texture2d">
@@ -85062,6 +88871,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgb10_a2ui">
 							<Test name="texture2d_to_texture2d">
@@ -85124,6 +88945,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgb10_a2">
 							<Test name="texture2d_to_texture2d">
@@ -85186,6 +89022,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="srgb8_alpha8_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
@@ -85248,6 +89161,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_rgb9_e5">
 							<Test name="texture2d_to_texture2d">
@@ -85298,6 +89226,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb9_e5_r32i">
 							<Test name="texture2d_to_texture2d">
@@ -85857,6 +89797,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="rgb9_e5_rgba8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="rgb9_e5_srgb8_alpha8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -86032,6 +90022,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb8_rgb8i">
 							<Test name="texture2d_to_texture2d">
@@ -86082,6 +90087,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb8_rgb8ui">
 							<Test name="texture2d_to_texture2d">
@@ -86132,6 +90149,80 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb8_rgb8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb8_srgb8">
 							<Test name="texture2d_to_texture2d">
@@ -86182,6 +90273,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgb8i_rgb8">
 							<Test name="texture2d_to_texture2d">
@@ -86345,6 +90448,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="rgb8i_rgb8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="rgb8i_srgb8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -86557,6 +90710,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="rgb8ui_rgb8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="rgb8ui_srgb8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -86607,6 +90810,268 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="rgb8_snorm_rgb8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb8_snorm_rgb8i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb8_snorm_rgb8ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb8_snorm_rgb8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rgb8_snorm_srgb8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="srgb8_rgb8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -86769,6 +91234,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="srgb8_rgb8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="srgb8_srgb8">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -87181,6 +91696,56 @@
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
+						<TestCase name="r16f_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
 						<TestCase name="r16i_r16f">
 							<Test name="texture2d_to_texture2d">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
@@ -87230,6 +91795,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16i_r16i">
 							<Test name="texture2d_to_texture2d">
@@ -87292,6 +91869,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16i_r16ui">
 							<Test name="texture2d_to_texture2d">
@@ -87354,6 +91946,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16i_rg8">
 							<Test name="texture2d_to_texture2d">
@@ -87416,6 +92023,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16i_rg8i">
 							<Test name="texture2d_to_texture2d">
@@ -87478,6 +92100,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16i_rg8ui">
 							<Test name="texture2d_to_texture2d">
@@ -87540,6 +92177,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r16i_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_r16f">
 							<Test name="texture2d_to_texture2d">
@@ -87590,6 +92304,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_r16i">
 							<Test name="texture2d_to_texture2d">
@@ -87652,6 +92378,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_r16ui">
 							<Test name="texture2d_to_texture2d">
@@ -87714,6 +92455,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_rg8">
 							<Test name="texture2d_to_texture2d">
@@ -87776,6 +92532,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_rg8i">
 							<Test name="texture2d_to_texture2d">
@@ -87838,6 +92609,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r16ui_rg8ui">
 							<Test name="texture2d_to_texture2d">
@@ -87900,6 +92686,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r16ui_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_r16f">
 							<Test name="texture2d_to_texture2d">
@@ -87950,6 +92813,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_r16i">
 							<Test name="texture2d_to_texture2d">
@@ -88012,6 +92887,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_r16ui">
 							<Test name="texture2d_to_texture2d">
@@ -88074,6 +92964,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_rg8">
 							<Test name="texture2d_to_texture2d">
@@ -88136,6 +93041,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_rg8i">
 							<Test name="texture2d_to_texture2d">
@@ -88198,6 +93118,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8_rg8ui">
 							<Test name="texture2d_to_texture2d">
@@ -88260,6 +93195,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_r16f">
 							<Test name="texture2d_to_texture2d">
@@ -88310,6 +93322,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_r16i">
 							<Test name="texture2d_to_texture2d">
@@ -88372,6 +93396,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_r16ui">
 							<Test name="texture2d_to_texture2d">
@@ -88434,6 +93473,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_rg8">
 							<Test name="texture2d_to_texture2d">
@@ -88496,6 +93550,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_rg8i">
 							<Test name="texture2d_to_texture2d">
@@ -88558,6 +93627,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8i_rg8ui">
 							<Test name="texture2d_to_texture2d">
@@ -88620,6 +93704,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8i_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_r16f">
 							<Test name="texture2d_to_texture2d">
@@ -88670,6 +93831,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_r16i">
 							<Test name="texture2d_to_texture2d">
@@ -88732,6 +93905,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_r16ui">
 							<Test name="texture2d_to_texture2d">
@@ -88794,6 +93982,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_rg8">
 							<Test name="texture2d_to_texture2d">
@@ -88856,6 +94059,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_rg8i">
 							<Test name="texture2d_to_texture2d">
@@ -88918,6 +94136,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg8ui_rg8ui">
 							<Test name="texture2d_to_texture2d">
@@ -88980,6 +94213,493 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8ui_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_r16f">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_r16i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_r16ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_rg8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_rg8i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_rg8ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="rg8_snorm_rg8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 					</TestSuite>
 					<TestSuite name="viewclass_8_bits">
@@ -89044,6 +94764,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8_r8i">
 							<Test name="texture2d_to_texture2d">
@@ -89106,6 +94841,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8_r8ui">
 							<Test name="texture2d_to_texture2d">
@@ -89168,6 +94918,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8_r8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8i_r8">
 							<Test name="texture2d_to_texture2d">
@@ -89230,6 +95057,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8i_r8i">
 							<Test name="texture2d_to_texture2d">
@@ -89292,6 +95134,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8i_r8ui">
 							<Test name="texture2d_to_texture2d">
@@ -89354,6 +95211,83 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8i_r8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8ui_r8">
 							<Test name="texture2d_to_texture2d">
@@ -89416,6 +95350,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8ui_r8i">
 							<Test name="texture2d_to_texture2d">
@@ -89478,6 +95427,21 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r8ui_r8ui">
 							<Test name="texture2d_to_texture2d">
@@ -89540,6 +95504,319 @@
 							<Test name="texture2d_array_to_renderbuffer">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8ui_r8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8_snorm_r8">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8_snorm_r8i">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8_snorm_r8ui">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_renderbuffer">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+						</TestCase>
+						<TestCase name="r8_snorm_r8_snorm">
+							<Test name="texture2d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture3d_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="cubemap_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="texture2d_array_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 					</TestSuite>
 				</TestSuite>
@@ -95698,6 +101975,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_etc2_eac_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -95756,6 +102039,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_etc2_eac_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -95814,6 +102103,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg11_eac_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -95872,6 +102167,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_rg11_eac_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -95954,6 +102255,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_4x4_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96066,6 +102379,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_5x4_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96178,6 +102503,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_5x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96290,6 +102627,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_6x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96402,6 +102751,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_6x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96514,6 +102875,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96626,6 +102999,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96738,6 +103123,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x8_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96850,6 +103247,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -96962,6 +103371,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97074,6 +103495,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x8_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97186,6 +103619,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x10_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97298,6 +103743,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_12x10_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97410,6 +103867,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_12x12_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97522,6 +103991,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_4x4_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97634,6 +104115,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_5x4_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97746,6 +104239,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_5x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97858,6 +104363,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_6x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -97970,6 +104487,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_6x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98082,6 +104611,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98194,6 +104735,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98306,6 +104859,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x8_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98418,6 +104983,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x5_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98530,6 +105107,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x6_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98642,6 +105231,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x8_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98754,6 +105355,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x10_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98866,6 +105479,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_12x10_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -98978,6 +105603,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_12x12_khr_rgba32ui">
 							<Test name="texture2d_to_texture2d">
@@ -99066,6 +105703,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba8_etc2_eac_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99124,6 +105767,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_etc2_eac_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99182,6 +105831,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rg11_eac_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99240,6 +105895,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_rg11_eac_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99322,6 +105983,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_4x4_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99434,6 +106107,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_5x4_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99546,6 +106231,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_5x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99658,6 +106355,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_6x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99770,6 +106479,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_6x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99882,6 +106603,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -99994,6 +106727,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100106,6 +106851,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_8x8_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100218,6 +106975,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100330,6 +107099,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100442,6 +107223,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x8_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100554,6 +107347,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_10x10_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100666,6 +107471,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_12x10_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100778,6 +107595,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="rgba_astc_12x12_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -100890,6 +107719,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_4x4_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101002,6 +107843,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_5x4_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101114,6 +107967,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_5x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101226,6 +108091,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_6x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101338,6 +108215,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_6x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101450,6 +108339,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101562,6 +108463,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101674,6 +108587,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_8x8_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101786,6 +108711,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x5_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -101898,6 +108835,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x6_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -102010,6 +108959,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x8_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -102122,6 +109083,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_10x10_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -102234,6 +109207,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_12x10_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -102346,6 +109331,18 @@
 							<Test name="texture2d_array_to_texture2d_array">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture3d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_texture2d_array">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="srgb8_alpha8_astc_12x12_khr_rgba32i">
 							<Test name="texture2d_to_texture2d">
@@ -102540,6 +109537,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r11_eac_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -102598,6 +109601,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_r11_eac_rgba16ui">
 							<Test name="texture2d_to_texture2d">
@@ -102656,6 +109665,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r11_eac_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -102714,6 +109729,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_r11_eac_rgba16i">
 							<Test name="texture2d_to_texture2d">
@@ -102876,6 +109897,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r11_eac_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -102934,6 +109961,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_r11_eac_rg32ui">
 							<Test name="texture2d_to_texture2d">
@@ -102992,6 +110025,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="r11_eac_rg32i">
 							<Test name="texture2d_to_texture2d">
@@ -103050,6 +110089,12 @@
 							<Test name="texture2d_array_to_cubemap">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
+							<Test name="renderbuffer_to_texture2d">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
+							<Test name="renderbuffer_to_cubemap">
+								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
+							</Test>
 						</TestCase>
 						<TestCase name="signed_r11_eac_rg32i">
 							<Test name="texture2d_to_texture2d">
diff --git a/android/cts/master/egl-master.txt b/android/cts/master/egl-master.txt
index df9f6cd..87398d9 100644
--- a/android/cts/master/egl-master.txt
+++ b/android/cts/master/egl-master.txt
@@ -946,11 +946,6 @@
 dEQP-EGL.functional.image.api.create_image_gles2_renderbuffer_rgb5_a1
 dEQP-EGL.functional.image.api.create_image_gles2_renderbuffer_rgb565
 dEQP-EGL.functional.image.api.create_image_gles2_renderbuffer_stencil_index8
-dEQP-EGL.functional.image.api.create_image_gles2_android_native_rgb565
-dEQP-EGL.functional.image.api.create_image_gles2_android_native_rgb8
-dEQP-EGL.functional.image.api.create_image_gles2_android_native_rgba4
-dEQP-EGL.functional.image.api.create_image_gles2_android_native_rgb5_a1
-dEQP-EGL.functional.image.api.create_image_gles2_android_native_rgba8
 dEQP-EGL.functional.image.api.image_target_gles2_tex2d
 dEQP-EGL.functional.image.api.image_target_gles2_renderbuffer
 dEQP-EGL.functional.image.create.gles2_texture_rgb8_texture
@@ -995,16 +990,6 @@
 dEQP-EGL.functional.image.create.gles2_renderbuffer_rgb565_read_pixels
 dEQP-EGL.functional.image.create.gles2_renderbuffer_depth16_depth_buffer
 dEQP-EGL.functional.image.create.gles2_renderbuffer_stencil_stencil_buffer
-dEQP-EGL.functional.image.create.gles2_android_native_rgb565_texture
-dEQP-EGL.functional.image.create.gles2_android_native_rgb565_read_pixels
-dEQP-EGL.functional.image.create.gles2_android_native_rgb8_texture
-dEQP-EGL.functional.image.create.gles2_android_native_rgb8_read_pixels
-dEQP-EGL.functional.image.create.gles2_android_native_rgba4_texture
-dEQP-EGL.functional.image.create.gles2_android_native_rgba4_read_pixels
-dEQP-EGL.functional.image.create.gles2_android_native_rgb5_a1_texture
-dEQP-EGL.functional.image.create.gles2_android_native_rgb5_a1_read_pixels
-dEQP-EGL.functional.image.create.gles2_android_native_rgba8_texture
-dEQP-EGL.functional.image.create.gles2_android_native_rgba8_read_pixels
 dEQP-EGL.functional.image.modify.tex_rgb8_tex_subimage_rgb8
 dEQP-EGL.functional.image.modify.tex_rgb8_tex_subimage_rgb565
 dEQP-EGL.functional.image.modify.tex_rgb8_renderbuffer_clear_color
@@ -1036,24 +1021,6 @@
 dEQP-EGL.functional.image.modify.renderbuffer_rgb565_renderbuffer_clear_color
 dEQP-EGL.functional.image.modify.renderbuffer_depth16_renderbuffer_clear_depth
 dEQP-EGL.functional.image.modify.renderbuffer_stencil_renderbuffer_clear_stencil
-dEQP-EGL.functional.image.modify.android_native_rgb565_tex_subimage_rgb8
-dEQP-EGL.functional.image.modify.android_native_rgb565_tex_subimage_rgb565
-dEQP-EGL.functional.image.modify.android_native_rgb565_renderbuffer_clear_color
-dEQP-EGL.functional.image.modify.android_native_rgb8_tex_subimage_rgb8
-dEQP-EGL.functional.image.modify.android_native_rgb8_tex_subimage_rgb565
-dEQP-EGL.functional.image.modify.android_native_rgb8_renderbuffer_clear_color
-dEQP-EGL.functional.image.modify.android_native_rgba4_tex_subimage_rgba8
-dEQP-EGL.functional.image.modify.android_native_rgba4_tex_subimage_rgba5_a1
-dEQP-EGL.functional.image.modify.android_native_rgba4_tex_subimage_rgba4
-dEQP-EGL.functional.image.modify.android_native_rgba4_renderbuffer_clear_color
-dEQP-EGL.functional.image.modify.android_native_rgb5_a1_tex_subimage_rgba8
-dEQP-EGL.functional.image.modify.android_native_rgb5_a1_tex_subimage_rgba5_a1
-dEQP-EGL.functional.image.modify.android_native_rgb5_a1_tex_subimage_rgba4
-dEQP-EGL.functional.image.modify.android_native_rgb5_a1_renderbuffer_clear_color
-dEQP-EGL.functional.image.modify.android_native_rgba8_tex_subimage_rgba8
-dEQP-EGL.functional.image.modify.android_native_rgba8_tex_subimage_rgba5_a1
-dEQP-EGL.functional.image.modify.android_native_rgba8_tex_subimage_rgba4
-dEQP-EGL.functional.image.modify.android_native_rgba8_renderbuffer_clear_color
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_texture_rgb8_texture
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_texture_rgb8_read_pixels
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_texture_rgb565_texture
@@ -1096,16 +1063,6 @@
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_renderbuffer_rgb565_read_pixels
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_renderbuffer_depth16_depth_buffer
 dEQP-EGL.functional.image.render_multiple_contexts.gles2_renderbuffer_stencil_stencil_buffer
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb565_texture
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb565_read_pixels
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb8_texture
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb8_read_pixels
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgba4_texture
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgba4_read_pixels
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb5_a1_texture
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgb5_a1_read_pixels
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgba8_texture
-dEQP-EGL.functional.image.render_multiple_contexts.gles2_android_native_rgba8_read_pixels
 dEQP-EGL.functional.sharing.gles2.context.create_destroy
 dEQP-EGL.functional.sharing.gles2.context.create_destroy_mixed
 dEQP-EGL.functional.sharing.gles2.buffer.create_delete
@@ -2390,7 +2347,6 @@
 dEQP-EGL.functional.negative_api.destroy_surface
 dEQP-EGL.functional.negative_api.get_config_attrib
 dEQP-EGL.functional.negative_api.get_configs
-dEQP-EGL.functional.negative_api.get_display
 dEQP-EGL.functional.negative_api.initialize
 dEQP-EGL.functional.negative_api.make_current
 dEQP-EGL.functional.negative_api.get_current_context
diff --git a/android/cts/master/gles2-master.txt b/android/cts/master/gles2-master.txt
index 4fa9c32..667f0c8 100644
--- a/android/cts/master/gles2-master.txt
+++ b/android/cts/master/gles2-master.txt
@@ -1086,6 +1086,9 @@
 dEQP-GLES2.functional.shaders.conditionals.if.if_elseif_else_fragment
 dEQP-GLES2.functional.shaders.conditionals.if.mixed_if_elseif_else_vertex
 dEQP-GLES2.functional.shaders.conditionals.if.mixed_if_elseif_else_fragment
+dEQP-GLES2.functional.shaders.conditionals.if.constant_conditional_assignment_to_matrix
+dEQP-GLES2.functional.shaders.conditionals.if.input_conditional_assignment_to_matrix
+dEQP-GLES2.functional.shaders.conditionals.if.uniform_conditional_assignment_to_matrix
 dEQP-GLES2.functional.shaders.conditionals.invalid_if.missing_parenthesis_vertex
 dEQP-GLES2.functional.shaders.conditionals.invalid_if.missing_parenthesis_fragment
 dEQP-GLES2.functional.shaders.conditionals.invalid_if.unclosed_parenthesis_vertex
@@ -1697,6 +1700,7 @@
 dEQP-GLES2.functional.shaders.functions.datatypes.struct_struct_fragment
 dEQP-GLES2.functional.shaders.functions.datatypes.struct_nested_struct_vertex
 dEQP-GLES2.functional.shaders.functions.datatypes.struct_nested_struct_fragment
+dEQP-GLES2.functional.shaders.functions.datatypes.struct_constructor_highp_in_fragment
 dEQP-GLES2.functional.shaders.functions.qualifiers.in_float_vertex
 dEQP-GLES2.functional.shaders.functions.qualifiers.in_float_fragment
 dEQP-GLES2.functional.shaders.functions.qualifiers.out_float_vertex
@@ -2893,6 +2897,7 @@
 dEQP-GLES2.functional.shaders.loops.do_while_dynamic_iterations.conditional_body_fragment
 dEQP-GLES2.functional.shaders.loops.do_while_dynamic_iterations.function_call_return_fragment
 dEQP-GLES2.functional.shaders.loops.do_while_dynamic_iterations.function_call_inout_fragment
+dEQP-GLES2.functional.shaders.loops.custom.continue_in_fragment_for_loop
 dEQP-GLES2.functional.shaders.operator.unary_operator.plus.lowp_float_vertex
 dEQP-GLES2.functional.shaders.operator.unary_operator.plus.lowp_float_fragment
 dEQP-GLES2.functional.shaders.operator.unary_operator.plus.mediump_float_vertex
@@ -8675,7 +8680,6 @@
 dEQP-GLES2.functional.texture.size.cube.512x512_rgba8888_mipmap
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgb888
@@ -8699,7 +8703,6 @@
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgb888
@@ -8711,23 +8714,14 @@
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_rgba8888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_etc1
 dEQP-GLES2.functional.texture.filtering.2d.nearest_mipmap_nearest_linear_clamp_etc1
 dEQP-GLES2.functional.texture.filtering.2d.linear_mipmap_nearest_linear_clamp_etc1
@@ -9077,7 +9071,6 @@
 dEQP-GLES2.functional.texture.units.2_units.only_cube.6
 dEQP-GLES2.functional.texture.units.2_units.only_cube.7
 dEQP-GLES2.functional.texture.units.2_units.only_cube.8
-dEQP-GLES2.functional.texture.units.2_units.only_cube.9
 dEQP-GLES2.functional.texture.units.2_units.mixed.3
 dEQP-GLES2.functional.texture.units.2_units.mixed.4
 dEQP-GLES2.functional.texture.units.2_units.mixed.6
diff --git a/android/cts/master/gles3-565-no-depth-no-stencil.txt b/android/cts/master/gles3-565-no-depth-no-stencil.txt
index c4c7aae..f86d1fb 100644
--- a/android/cts/master/gles3-565-no-depth-no-stencil.txt
+++ b/android/cts/master/gles3-565-no-depth-no-stencil.txt
@@ -2173,6 +2173,11 @@
 dEQP-GLES3.functional.read_pixels.skip.choose_3_0
 dEQP-GLES3.functional.read_pixels.skip.choose_3_3
 dEQP-GLES3.functional.read_pixels.skip.choose_3_5
+dEQP-GLES3.functional.dither.disabled.gradient_white
+dEQP-GLES3.functional.dither.disabled.gradient_red
+dEQP-GLES3.functional.dither.disabled.gradient_green
+dEQP-GLES3.functional.dither.disabled.gradient_blue
+dEQP-GLES3.functional.dither.disabled.gradient_alpha
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_white
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_red
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_green
diff --git a/android/cts/master/gles3-master.txt b/android/cts/master/gles3-master.txt
index 11a2697..abe4ac0 100644
--- a/android/cts/master/gles3-master.txt
+++ b/android/cts/master/gles3-master.txt
@@ -1146,6 +1146,10 @@
 dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_fragment
 dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_macro_exp_vertex
 dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_macro_exp_fragment
+dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_debug_vertex
+dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_debug_fragment
+dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_token_vertex
+dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_token_fragment
 dEQP-GLES3.functional.shaders.preprocessor.extensions.basic_vertex
 dEQP-GLES3.functional.shaders.preprocessor.extensions.basic_fragment
 dEQP-GLES3.functional.shaders.preprocessor.extensions.macro_exp_vertex
@@ -2757,6 +2761,9 @@
 dEQP-GLES3.functional.shaders.conditionals.if.if_elseif_else_fragment
 dEQP-GLES3.functional.shaders.conditionals.if.mixed_if_elseif_else_vertex
 dEQP-GLES3.functional.shaders.conditionals.if.mixed_if_elseif_else_fragment
+dEQP-GLES3.functional.shaders.conditionals.if.constant_conditional_assignment_to_matrix
+dEQP-GLES3.functional.shaders.conditionals.if.input_conditional_assignment_to_matrix
+dEQP-GLES3.functional.shaders.conditionals.if.uniform_conditional_assignment_to_matrix
 dEQP-GLES3.functional.shaders.conditionals.invalid_if.missing_parenthesis_vertex
 dEQP-GLES3.functional.shaders.conditionals.invalid_if.missing_parenthesis_fragment
 dEQP-GLES3.functional.shaders.conditionals.invalid_if.unclosed_parenthesis_vertex
@@ -3533,6 +3540,7 @@
 dEQP-GLES3.functional.shaders.functions.datatypes.struct_struct_fragment
 dEQP-GLES3.functional.shaders.functions.datatypes.struct_nested_struct_vertex
 dEQP-GLES3.functional.shaders.functions.datatypes.struct_nested_struct_fragment
+dEQP-GLES3.functional.shaders.functions.datatypes.struct_constructor_highp_in_fragment
 dEQP-GLES3.functional.shaders.functions.qualifiers.in_float_vertex
 dEQP-GLES3.functional.shaders.functions.qualifiers.in_float_fragment
 dEQP-GLES3.functional.shaders.functions.qualifiers.out_float_vertex
@@ -5774,6 +5782,7 @@
 dEQP-GLES3.functional.shaders.loops.do_while_dynamic_iterations.nested_tricky_dataflow_1_fragment
 dEQP-GLES3.functional.shaders.loops.do_while_dynamic_iterations.nested_tricky_dataflow_2_vertex
 dEQP-GLES3.functional.shaders.loops.do_while_dynamic_iterations.nested_tricky_dataflow_2_fragment
+dEQP-GLES3.functional.shaders.loops.custom.continue_in_fragment_for_loop
 dEQP-GLES3.functional.shaders.operator.unary_operator.plus.lowp_float_vertex
 dEQP-GLES3.functional.shaders.operator.unary_operator.plus.lowp_float_fragment
 dEQP-GLES3.functional.shaders.operator.unary_operator.plus.mediump_float_vertex
@@ -15446,6 +15455,7 @@
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.usampler2d_vec4_fragment
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.sampler3d_fixed_vertex
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.sampler3d_fixed_fragment
+dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.sampler3d_float_vertex
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.sampler3d_float_fragment
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.isampler3d_fragment
 dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.usampler3d_fragment
@@ -27849,6 +27859,7 @@
 dEQP-GLES3.functional.fbo.api.invalid_rbo_attachments
 dEQP-GLES3.functional.fbo.api.attach_names
 dEQP-GLES3.functional.fbo.api.texture_levels
+dEQP-GLES3.functional.fbo.api.attachment_query_default_fbo
 dEQP-GLES3.functional.fbo.api.attachment_query_empty_fbo
 dEQP-GLES3.functional.fbo.api.attachment_query_tex2d
 dEQP-GLES3.functional.fbo.api.attachment_query_texcube
@@ -38635,6 +38646,7 @@
 dEQP-GLES3.functional.negative_api.texture.compressedtexsubimage2d_neg_width_height
 dEQP-GLES3.functional.negative_api.texture.compressedtexsubimage2d_invalid_size
 dEQP-GLES3.functional.negative_api.texture.compressedtexsubimage2d_invalid_buffer_target
+dEQP-GLES3.functional.negative_api.texture.teximage3d
 dEQP-GLES3.functional.negative_api.texture.teximage3d_neg_level
 dEQP-GLES3.functional.negative_api.texture.teximage3d_max_level
 dEQP-GLES3.functional.negative_api.texture.teximage3d_neg_width_height_depth
@@ -38956,6 +38968,11 @@
 dEQP-GLES3.functional.read_pixels.skip.choose_3_0
 dEQP-GLES3.functional.read_pixels.skip.choose_3_3
 dEQP-GLES3.functional.read_pixels.skip.choose_3_5
+dEQP-GLES3.functional.dither.disabled.gradient_white
+dEQP-GLES3.functional.dither.disabled.gradient_red
+dEQP-GLES3.functional.dither.disabled.gradient_green
+dEQP-GLES3.functional.dither.disabled.gradient_blue
+dEQP-GLES3.functional.dither.disabled.gradient_alpha
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_white
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_red
 dEQP-GLES3.functional.dither.disabled.unicolored_quad_green
@@ -40617,7 +40634,6 @@
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.user_ptr.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_int
-dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_byte
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.points.single_attribute
diff --git a/android/cts/master/gles3-multisample.txt b/android/cts/master/gles3-multisample.txt
index f5b84e6..f5404f8 100644
--- a/android/cts/master/gles3-multisample.txt
+++ b/android/cts/master/gles3-multisample.txt
@@ -81,6 +81,7 @@
 dEQP-GLES3.functional.shaders.builtin_variable.depth_range_vertex
 dEQP-GLES3.functional.shaders.builtin_variable.depth_range_fragment
 dEQP-GLES3.functional.shaders.builtin_variable.fragcoord_xyz
+dEQP-GLES3.functional.shaders.builtin_variable.fragcoord_w
 dEQP-GLES3.functional.shaders.builtin_variable.pointcoord
 dEQP-GLES3.functional.shaders.derivate.dfdx.constant.float
 dEQP-GLES3.functional.shaders.derivate.dfdx.constant.vec2
diff --git a/android/cts/master/gles31-master.txt b/android/cts/master/gles31-master.txt
index cf696b8..5e2887c 100644
--- a/android/cts/master/gles31-master.txt
+++ b/android/cts/master/gles31-master.txt
@@ -3925,6 +3925,51 @@
 dEQP-GLES31.functional.shaders.linkage.io_block.missing_output_block
 dEQP-GLES31.functional.shaders.linkage.io_block.ambiguous_variable_name_1
 dEQP-GLES31.functional.shaders.linkage.io_block.ambiguous_variable_name_2
+dEQP-GLES31.functional.shaders.linkage.uniform.basic.precision_conflict_1
+dEQP-GLES31.functional.shaders.linkage.uniform.basic.precision_conflict_2
+dEQP-GLES31.functional.shaders.linkage.uniform.basic.precision_conflict_3
+dEQP-GLES31.functional.shaders.linkage.uniform.basic.precision_conflict_4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.basic
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vertex_only
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.fragment_only
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vec4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vertex_only_vec4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.fragment_only_vec4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vec4_vec3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vertex_only_vec4_vec3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.fragment_only_vec4_vec3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec4_vec3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vec4_float
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.vertex_only_vec4_float
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.fragment_only_vec4_float
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec4_float
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec4_struct
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec4_vec3_struct
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec2_vec3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_vec2_int
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_int_float
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_bvec2_vec2
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_ivec2_vec2
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.partial_ivec2_ivec2
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.type_conflict_1
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.type_conflict_2
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.type_conflict_3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.precision_conflict_1
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.precision_conflict_2
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.precision_conflict_3
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.precision_conflict_4
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.light_struct_highp
+dEQP-GLES31.functional.shaders.linkage.uniform.struct.light_struct_mediump
+dEQP-GLES31.functional.shaders.linkage.uniform.block.differing_precision
+dEQP-GLES31.functional.shaders.linkage.uniform.block.type_mismatch
+dEQP-GLES31.functional.shaders.linkage.uniform.block.members_mismatch
+dEQP-GLES31.functional.shaders.linkage.uniform.block.layout_qualifier_mismatch_1
+dEQP-GLES31.functional.shaders.linkage.uniform.block.layout_qualifier_mismatch_2
+dEQP-GLES31.functional.shaders.linkage.uniform.block.layout_qualifier_mismatch_3
+dEQP-GLES31.functional.shaders.linkage.uniform.block.layout_qualifier_mismatch_4
+dEQP-GLES31.functional.shaders.linkage.uniform.block.layout_qualifier_mismatch_5
 dEQP-GLES31.functional.shaders.builtin_constants.core.max_vertex_attribs
 dEQP-GLES31.functional.shaders.builtin_constants.core.max_vertex_uniform_vectors
 dEQP-GLES31.functional.shaders.builtin_constants.core.max_vertex_output_vectors
@@ -4019,6 +4064,9 @@
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_4_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_4_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_4_samples_fwidth
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_4_samples_dfdx
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_4_samples_dfdy
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_4_samples_fwidth
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_4_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_4_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_4_samples_fwidth
@@ -4031,6 +4079,9 @@
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_8_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_8_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_8_samples_fwidth
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_8_samples_dfdx
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_8_samples_dfdy
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_8_samples_fwidth
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_8_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_8_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_8_samples_fwidth
@@ -4043,6 +4094,9 @@
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_max_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_max_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.lines_max_samples_fwidth
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_max_samples_dfdx
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_max_samples_dfdy
+dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_max_samples_fwidth
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_max_samples_dfdx
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_max_samples_dfdy
 dEQP-GLES31.functional.shaders.helper_invocation.derivate.points_max_samples_fwidth
@@ -21724,36 +21778,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
@@ -21801,36 +21840,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
@@ -22012,6 +22036,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_to_cubemap
@@ -22032,6 +22060,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_to_cubemap
@@ -22052,6 +22085,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32i_rgba32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_to_cubemap
@@ -22068,6 +22106,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_to_cubemap
@@ -22088,6 +22130,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_to_cubemap
@@ -22108,6 +22155,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_128_bits.rgba32ui_rgba32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_96_bits.rgb32f_rgb32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_96_bits.rgb32f_rgb32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_96_bits.rgb32f_rgb32f.texture2d_to_cubemap
@@ -22364,6 +22416,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_to_cubemap
@@ -22384,6 +22440,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_to_cubemap
@@ -22404,6 +22465,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rg32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_to_cubemap
@@ -22420,6 +22486,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_to_cubemap
@@ -22440,6 +22510,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_to_cubemap
@@ -22460,6 +22535,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32i_rgba16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_to_cubemap
@@ -22476,6 +22556,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_to_cubemap
@@ -22496,6 +22580,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_to_cubemap
@@ -22516,6 +22605,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rg32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_to_cubemap
@@ -22532,6 +22626,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_to_cubemap
@@ -22552,6 +22650,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_to_cubemap
@@ -22572,6 +22675,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rg32ui_rgba16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16f_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16f_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16f_rg32i.texture2d_to_cubemap
@@ -22684,6 +22792,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_to_cubemap
@@ -22704,6 +22816,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_to_cubemap
@@ -22724,6 +22841,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rg32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_to_cubemap
@@ -22740,6 +22862,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_to_cubemap
@@ -22760,6 +22886,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_to_cubemap
@@ -22780,6 +22911,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16i_rgba16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_to_cubemap
@@ -22796,6 +22932,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_to_cubemap
@@ -22816,6 +22956,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_to_cubemap
@@ -22836,6 +22981,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rg32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_to_cubemap
@@ -22852,6 +23002,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_to_cubemap
@@ -22872,6 +23026,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_to_cubemap
@@ -22892,6 +23051,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_64_bits.rgba16ui_rgba16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_48_bits.rgb16f_rgb16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_48_bits.rgb16f_rgb16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_48_bits.rgb16f_rgb16f.texture2d_to_cubemap
@@ -23232,6 +23396,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_rgba8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32f_srgb8_alpha8.texture2d_to_cubemap
@@ -23268,6 +23448,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_to_cubemap
@@ -23288,6 +23472,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_to_cubemap
@@ -23308,6 +23497,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_to_cubemap
@@ -23324,6 +23518,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_to_cubemap
@@ -23344,6 +23542,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_to_cubemap
@@ -23364,6 +23567,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_to_cubemap
@@ -23384,6 +23592,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_to_cubemap
@@ -23404,6 +23617,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_to_cubemap
@@ -23424,6 +23642,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_to_cubemap
@@ -23440,6 +23663,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_to_cubemap
@@ -23460,6 +23687,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_to_cubemap
@@ -23480,6 +23712,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_to_cubemap
@@ -23500,6 +23757,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_to_cubemap
@@ -23516,6 +23778,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32i_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_to_cubemap
@@ -23532,6 +23798,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_to_cubemap
@@ -23552,6 +23822,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_to_cubemap
@@ -23572,6 +23847,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_to_cubemap
@@ -23588,6 +23868,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_to_cubemap
@@ -23608,6 +23892,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_to_cubemap
@@ -23628,6 +23917,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_to_cubemap
@@ -23648,6 +23942,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_to_cubemap
@@ -23668,6 +23967,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_to_cubemap
@@ -23688,6 +23992,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_to_cubemap
@@ -23704,6 +24013,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_to_cubemap
@@ -23724,6 +24037,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_to_cubemap
@@ -23744,6 +24062,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_to_cubemap
@@ -23764,6 +24107,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_to_cubemap
@@ -23780,6 +24128,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r32ui_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_r32i.texture2d_to_cubemap
@@ -23976,6 +24328,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_rgba8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16f_srgb8_alpha8.texture2d_to_cubemap
@@ -24012,6 +24380,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_to_cubemap
@@ -24032,6 +24404,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_to_cubemap
@@ -24052,6 +24429,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_to_cubemap
@@ -24068,6 +24450,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_to_cubemap
@@ -24088,6 +24474,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_to_cubemap
@@ -24108,6 +24499,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_to_cubemap
@@ -24128,6 +24524,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_to_cubemap
@@ -24148,6 +24549,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_to_cubemap
@@ -24168,6 +24574,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_to_cubemap
@@ -24184,6 +24595,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_to_cubemap
@@ -24204,6 +24619,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_to_cubemap
@@ -24224,6 +24644,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_to_cubemap
@@ -24244,6 +24689,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_to_cubemap
@@ -24260,6 +24710,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16i_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_to_cubemap
@@ -24276,6 +24730,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_to_cubemap
@@ -24296,6 +24754,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_to_cubemap
@@ -24316,6 +24779,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_to_cubemap
@@ -24332,6 +24800,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_to_cubemap
@@ -24352,6 +24824,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_to_cubemap
@@ -24372,6 +24849,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_to_cubemap
@@ -24392,6 +24874,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_to_cubemap
@@ -24412,6 +24899,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_to_cubemap
@@ -24432,6 +24924,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_to_cubemap
@@ -24448,6 +24945,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_to_cubemap
@@ -24468,6 +24969,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_to_cubemap
@@ -24488,6 +24994,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_to_cubemap
@@ -24508,6 +25039,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_to_cubemap
@@ -24524,6 +25060,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rg16ui_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_to_cubemap
@@ -24540,6 +25080,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_to_cubemap
@@ -24560,6 +25104,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_to_cubemap
@@ -24580,6 +25129,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_to_cubemap
@@ -24596,6 +25150,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_to_cubemap
@@ -24616,6 +25174,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_to_cubemap
@@ -24636,6 +25199,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_to_cubemap
@@ -24656,6 +25224,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_to_cubemap
@@ -24676,6 +25249,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_to_cubemap
@@ -24696,6 +25274,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_to_cubemap
@@ -24712,6 +25295,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_to_cubemap
@@ -24732,6 +25319,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_to_cubemap
@@ -24752,6 +25344,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_to_cubemap
@@ -24772,6 +25389,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_to_cubemap
@@ -24788,6 +25410,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_to_cubemap
@@ -24804,6 +25430,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_to_cubemap
@@ -24824,6 +25454,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_to_cubemap
@@ -24844,6 +25479,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_to_cubemap
@@ -24860,6 +25500,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_to_cubemap
@@ -24880,6 +25524,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_to_cubemap
@@ -24900,6 +25549,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_to_cubemap
@@ -24920,6 +25574,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_to_cubemap
@@ -24940,6 +25599,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_to_cubemap
@@ -24960,6 +25624,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_to_cubemap
@@ -24976,6 +25645,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_to_cubemap
@@ -24996,6 +25669,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_to_cubemap
@@ -25016,6 +25694,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_to_cubemap
@@ -25036,6 +25739,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_to_cubemap
@@ -25052,6 +25760,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8i_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_to_cubemap
@@ -25068,6 +25780,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_to_cubemap
@@ -25088,6 +25804,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_to_cubemap
@@ -25108,6 +25829,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_to_cubemap
@@ -25124,6 +25850,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_to_cubemap
@@ -25144,6 +25874,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_to_cubemap
@@ -25164,6 +25899,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_to_cubemap
@@ -25184,6 +25924,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_to_cubemap
@@ -25204,6 +25949,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_to_cubemap
@@ -25224,6 +25974,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_to_cubemap
@@ -25240,6 +25995,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_to_cubemap
@@ -25260,6 +26019,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_to_cubemap
@@ -25280,6 +26044,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_to_cubemap
@@ -25300,6 +26089,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_to_cubemap
@@ -25316,6 +26110,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8ui_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_r32i.texture2d_to_cubemap
@@ -25512,6 +26310,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_rgba8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.r11f_g11f_b10f_srgb8_alpha8.texture2d_to_cubemap
@@ -25548,6 +26362,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_to_cubemap
@@ -25568,6 +26386,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_to_cubemap
@@ -25588,6 +26411,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_to_cubemap
@@ -25604,6 +26432,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_to_cubemap
@@ -25624,6 +26456,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_to_cubemap
@@ -25644,6 +26481,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_to_cubemap
@@ -25664,6 +26506,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_to_cubemap
@@ -25684,6 +26531,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_to_cubemap
@@ -25704,6 +26556,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_to_cubemap
@@ -25720,6 +26577,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_to_cubemap
@@ -25740,6 +26601,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_to_cubemap
@@ -25760,6 +26626,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_to_cubemap
@@ -25780,6 +26671,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_to_cubemap
@@ -25796,6 +26692,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2ui_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_to_cubemap
@@ -25812,6 +26712,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_to_cubemap
@@ -25832,6 +26736,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_to_cubemap
@@ -25852,6 +26761,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_to_cubemap
@@ -25868,6 +26782,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_to_cubemap
@@ -25888,6 +26806,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_to_cubemap
@@ -25908,6 +26831,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_to_cubemap
@@ -25928,6 +26856,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_to_cubemap
@@ -25948,6 +26881,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_to_cubemap
@@ -25968,6 +26906,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_to_cubemap
@@ -25984,6 +26927,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_to_cubemap
@@ -26004,6 +26951,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_to_cubemap
@@ -26024,6 +26976,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_to_cubemap
@@ -26044,6 +27021,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_to_cubemap
@@ -26060,6 +27042,290 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb10_a2_rgb9_e5.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_rgb9_e5.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_to_cubemap
@@ -26076,6 +27342,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_to_cubemap
@@ -26096,6 +27366,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_to_cubemap
@@ -26116,6 +27391,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r32ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_to_cubemap
@@ -26132,6 +27412,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_to_cubemap
@@ -26152,6 +27436,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_to_cubemap
@@ -26172,6 +27461,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rg16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_to_cubemap
@@ -26192,6 +27486,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_to_cubemap
@@ -26212,6 +27511,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_to_cubemap
@@ -26232,6 +27536,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_to_cubemap
@@ -26248,6 +27557,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_r11f_g11f_b10f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_to_cubemap
@@ -26268,6 +27581,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_to_cubemap
@@ -26288,6 +27606,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb10_a2.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgba8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_to_cubemap
@@ -26308,6 +27651,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_srgb8_alpha8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_to_cubemap
@@ -26324,6 +27672,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.srgb8_alpha8_rgb9_e5.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_r32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_r32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_r32i.texture2d_to_cubemap
@@ -26504,6 +27856,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgb10_a2.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgb10_a2.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgb10_a2.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_rgba8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_srgb8_alpha8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_srgb8_alpha8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgb9_e5_srgb8_alpha8.texture2d_to_cubemap
@@ -26560,6 +27928,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_to_cubemap
@@ -26576,6 +27949,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8i.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_to_cubemap
@@ -26592,6 +27969,30 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_rgb8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_to_cubemap
@@ -26608,6 +28009,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_srgb8.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8.texture2d_to_cubemap
@@ -26660,6 +28065,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8ui.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_rgb8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_srgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_srgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8i_srgb8.texture2d_to_cubemap
@@ -26728,6 +28149,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8ui.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_rgb8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_to_cubemap
@@ -26744,6 +28181,90 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8ui_srgb8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_rgb8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_srgb8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8.texture2d_to_cubemap
@@ -26796,6 +28317,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8ui.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_rgb8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_srgb8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_srgb8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.srgb8_srgb8.texture2d_to_cubemap
@@ -26928,6 +28465,22 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16f_rg8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_to_cubemap
@@ -26944,6 +28497,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_to_cubemap
@@ -26964,6 +28521,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_to_cubemap
@@ -26984,6 +28546,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_r16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_to_cubemap
@@ -27004,6 +28571,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_to_cubemap
@@ -27024,6 +28596,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_to_cubemap
@@ -27044,6 +28621,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16i_rg8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_to_cubemap
@@ -27060,6 +28662,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_to_cubemap
@@ -27080,6 +28686,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_to_cubemap
@@ -27100,6 +28711,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_r16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_to_cubemap
@@ -27120,6 +28736,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_to_cubemap
@@ -27140,6 +28761,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_to_cubemap
@@ -27160,6 +28786,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.r16ui_rg8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_to_cubemap
@@ -27176,6 +28827,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_to_cubemap
@@ -27196,6 +28851,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_to_cubemap
@@ -27216,6 +28876,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_r16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_to_cubemap
@@ -27236,6 +28901,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_to_cubemap
@@ -27256,6 +28926,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_to_cubemap
@@ -27276,6 +28951,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_rg8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_to_cubemap
@@ -27292,6 +28992,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_to_cubemap
@@ -27312,6 +29016,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_to_cubemap
@@ -27332,6 +29041,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_r16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_to_cubemap
@@ -27352,6 +29066,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_to_cubemap
@@ -27372,6 +29091,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_to_cubemap
@@ -27392,6 +29116,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8i_rg8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_to_cubemap
@@ -27408,6 +29157,10 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16f.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_to_cubemap
@@ -27428,6 +29181,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_to_cubemap
@@ -27448,6 +29206,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_r16ui.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_to_cubemap
@@ -27468,6 +29231,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_to_cubemap
@@ -27488,6 +29256,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_to_cubemap
@@ -27508,6 +29281,163 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8ui_rg8_snorm.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16f.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_r16ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_rg8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_to_cubemap
@@ -27528,6 +29458,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_to_cubemap
@@ -27548,6 +29483,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_to_cubemap
@@ -27568,6 +29508,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_r8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_to_cubemap
@@ -27588,6 +29553,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_to_cubemap
@@ -27608,6 +29578,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_to_cubemap
@@ -27628,6 +29603,31 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8i_r8_snorm.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_to_cubemap
@@ -27648,6 +29648,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_to_cubemap
@@ -27668,6 +29673,11 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8i.renderbuffer_to_renderbuffer
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_to_cubemap
@@ -27688,6 +29698,107 @@
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8ui.renderbuffer_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8ui_r8_snorm.renderbuffer_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8i.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture3d_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.cubemap_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8ui.texture2d_array_to_renderbuffer
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture3d_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture3d_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture3d_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture3d_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.cubemap_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.cubemap_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.cubemap_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.cubemap_to_texture2d_array
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_array_to_texture2d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_array_to_texture3d
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_r8_snorm.texture2d_array_to_texture2d_array
 dEQP-GLES31.functional.copy_image.compressed.viewclass_eac_r11.r11_eac_r11_eac.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.compressed.viewclass_eac_r11.r11_eac_r11_eac.texture2d_to_cubemap
 dEQP-GLES31.functional.copy_image.compressed.viewclass_eac_r11.r11_eac_r11_eac.cubemap_to_texture2d
@@ -29632,6 +31743,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba8_etc2_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba8_etc2_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba8_etc2_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba8_etc2_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba8_etc2_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32ui.texture2d_to_cubemap
@@ -29650,6 +31763,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_etc2_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_etc2_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_etc2_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_etc2_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_etc2_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32ui.texture2d_to_cubemap
@@ -29668,6 +31783,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rg11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rg11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rg11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rg11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rg11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32ui.texture2d_to_cubemap
@@ -29686,6 +31803,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_signed_rg11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_signed_rg11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_signed_rg11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_signed_rg11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_signed_rg11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32ui.texture2d_to_cubemap
@@ -29712,6 +31831,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_4x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32ui.texture2d_to_cubemap
@@ -29748,6 +31871,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32ui.texture2d_to_cubemap
@@ -29784,6 +31911,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_5x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32ui.texture2d_to_cubemap
@@ -29820,6 +31951,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32ui.texture2d_to_cubemap
@@ -29856,6 +31991,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_6x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32ui.texture2d_to_cubemap
@@ -29892,6 +32031,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32ui.texture2d_to_cubemap
@@ -29928,6 +32071,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32ui.texture2d_to_cubemap
@@ -29964,6 +32111,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_8x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32ui.texture2d_to_cubemap
@@ -30000,6 +32151,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32ui.texture2d_to_cubemap
@@ -30036,6 +32191,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32ui.texture2d_to_cubemap
@@ -30072,6 +32231,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32ui.texture2d_to_cubemap
@@ -30108,6 +32271,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_10x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32ui.texture2d_to_cubemap
@@ -30144,6 +32311,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32ui.texture2d_to_cubemap
@@ -30180,6 +32351,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_rgba_astc_12x12_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32ui.texture2d_to_cubemap
@@ -30216,6 +32391,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32ui.texture2d_to_cubemap
@@ -30252,6 +32431,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32ui.texture2d_to_cubemap
@@ -30288,6 +32471,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32ui.texture2d_to_cubemap
@@ -30324,6 +32511,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32ui.texture2d_to_cubemap
@@ -30360,6 +32551,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32ui.texture2d_to_cubemap
@@ -30396,6 +32591,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32ui.texture2d_to_cubemap
@@ -30432,6 +32631,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32ui.texture2d_to_cubemap
@@ -30468,6 +32671,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32ui.texture2d_to_cubemap
@@ -30504,6 +32711,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32ui.texture2d_to_cubemap
@@ -30540,6 +32751,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32ui.texture2d_to_cubemap
@@ -30576,6 +32791,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32ui.texture2d_to_cubemap
@@ -30612,6 +32831,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32ui.texture2d_to_cubemap
@@ -30648,6 +32871,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32ui.texture2d_to_cubemap
@@ -30684,6 +32911,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32ui_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32ui.texture2d_to_cubemap
@@ -30712,6 +32943,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba8_etc2_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba8_etc2_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba8_etc2_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba8_etc2_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba8_etc2_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba8_etc2_eac_rgba32i.texture2d_to_cubemap
@@ -30730,6 +32963,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_etc2_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_etc2_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_etc2_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_etc2_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_etc2_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_etc2_eac_rgba32i.texture2d_to_cubemap
@@ -30748,6 +32983,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rg11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rg11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rg11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rg11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rg11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rg11_eac_rgba32i.texture2d_to_cubemap
@@ -30766,6 +33003,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_signed_rg11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_signed_rg11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_signed_rg11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_signed_rg11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_signed_rg11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.signed_rg11_eac_rgba32i.texture2d_to_cubemap
@@ -30792,6 +33031,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_4x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_4x4_khr_rgba32i.texture2d_to_cubemap
@@ -30828,6 +33071,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x4_khr_rgba32i.texture2d_to_cubemap
@@ -30864,6 +33111,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_5x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_5x5_khr_rgba32i.texture2d_to_cubemap
@@ -30900,6 +33151,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x5_khr_rgba32i.texture2d_to_cubemap
@@ -30936,6 +33191,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_6x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_6x6_khr_rgba32i.texture2d_to_cubemap
@@ -30972,6 +33231,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x5_khr_rgba32i.texture2d_to_cubemap
@@ -31008,6 +33271,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x6_khr_rgba32i.texture2d_to_cubemap
@@ -31044,6 +33311,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_8x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_8x8_khr_rgba32i.texture2d_to_cubemap
@@ -31080,6 +33351,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x5_khr_rgba32i.texture2d_to_cubemap
@@ -31116,6 +33391,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x6_khr_rgba32i.texture2d_to_cubemap
@@ -31152,6 +33431,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x8_khr_rgba32i.texture2d_to_cubemap
@@ -31188,6 +33471,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_10x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_10x10_khr_rgba32i.texture2d_to_cubemap
@@ -31224,6 +33511,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x10_khr_rgba32i.texture2d_to_cubemap
@@ -31260,6 +33551,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_rgba_astc_12x12_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba_astc_12x12_khr_rgba32i.texture2d_to_cubemap
@@ -31296,6 +33591,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_4x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_4x4_khr_rgba32i.texture2d_to_cubemap
@@ -31332,6 +33631,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x4_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x4_khr_rgba32i.texture2d_to_cubemap
@@ -31368,6 +33671,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_5x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_5x5_khr_rgba32i.texture2d_to_cubemap
@@ -31404,6 +33711,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x5_khr_rgba32i.texture2d_to_cubemap
@@ -31440,6 +33751,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_6x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_6x6_khr_rgba32i.texture2d_to_cubemap
@@ -31476,6 +33791,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x5_khr_rgba32i.texture2d_to_cubemap
@@ -31512,6 +33831,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x6_khr_rgba32i.texture2d_to_cubemap
@@ -31548,6 +33871,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_8x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_8x8_khr_rgba32i.texture2d_to_cubemap
@@ -31584,6 +33911,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x5_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x5_khr_rgba32i.texture2d_to_cubemap
@@ -31620,6 +33951,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x6_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x6_khr_rgba32i.texture2d_to_cubemap
@@ -31656,6 +33991,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x8_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x8_khr_rgba32i.texture2d_to_cubemap
@@ -31692,6 +34031,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_10x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_10x10_khr_rgba32i.texture2d_to_cubemap
@@ -31728,6 +34071,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x10_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x10_khr_rgba32i.texture2d_to_cubemap
@@ -31764,6 +34111,10 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.texture2d_array_to_texture2d_array
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture3d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.rgba32i_srgb8_alpha8_astc_12x12_khr.renderbuffer_to_texture2d_array
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_128_bits_mixed.srgb8_alpha8_astc_12x12_khr_rgba32i.texture2d_to_cubemap
@@ -31824,6 +34175,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16ui.texture2d_to_cubemap
@@ -31842,6 +34195,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_signed_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_signed_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_signed_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_signed_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16ui_signed_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16ui.texture2d_to_cubemap
@@ -31860,6 +34215,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rgba16i.texture2d_to_cubemap
@@ -31878,6 +34235,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_signed_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_signed_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_signed_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_signed_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rgba16i_signed_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rgba16i.texture2d_to_cubemap
@@ -31928,6 +34287,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32ui.texture2d_to_cubemap
@@ -31946,6 +34307,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_signed_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_signed_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_signed_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_signed_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32ui_signed_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32ui.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32ui.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32ui.texture2d_to_cubemap
@@ -31964,6 +34327,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.r11_eac_rg32i.texture2d_to_cubemap
@@ -31982,6 +34347,8 @@
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_signed_r11_eac.cubemap_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_signed_r11_eac.texture2d_array_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_signed_r11_eac.texture2d_array_to_cubemap
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_signed_r11_eac.renderbuffer_to_texture2d
+dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.rg32i_signed_r11_eac.renderbuffer_to_cubemap
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32i.texture2d_to_texture2d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32i.texture2d_to_texture3d
 dEQP-GLES31.functional.copy_image.mixed.viewclass_64_bits_mixed.signed_r11_eac_rg32i.texture2d_to_cubemap
diff --git a/android/cts/master/gles31-multisample.txt b/android/cts/master/gles31-multisample.txt
index 721e97b..d78f3f6 100644
--- a/android/cts/master/gles31-multisample.txt
+++ b/android/cts/master/gles31-multisample.txt
@@ -134,24 +134,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -170,24 +152,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/master/gles31-rotate-landscape.txt b/android/cts/master/gles31-rotate-landscape.txt
index c98268f..42f000f 100644
--- a/android/cts/master/gles31-rotate-landscape.txt
+++ b/android/cts/master/gles31-rotate-landscape.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/master/gles31-rotate-portrait.txt b/android/cts/master/gles31-rotate-portrait.txt
index c98268f..42f000f 100644
--- a/android/cts/master/gles31-rotate-portrait.txt
+++ b/android/cts/master/gles31-rotate-portrait.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/master/gles31-rotate-reverse-landscape.txt b/android/cts/master/gles31-rotate-reverse-landscape.txt
index c98268f..42f000f 100644
--- a/android/cts/master/gles31-rotate-reverse-landscape.txt
+++ b/android/cts/master/gles31-rotate-reverse-landscape.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/master/gles31-rotate-reverse-portrait.txt b/android/cts/master/gles31-rotate-reverse-portrait.txt
index c98268f..42f000f 100644
--- a/android/cts/master/gles31-rotate-reverse-portrait.txt
+++ b/android/cts/master/gles31-rotate-reverse-portrait.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/master/src/egl-internal-api-tests.txt b/android/cts/master/src/egl-internal-api-tests.txt
new file mode 100644
index 0000000..b923046
--- /dev/null
+++ b/android/cts/master/src/egl-internal-api-tests.txt
@@ -0,0 +1 @@
+dEQP-EGL.functional.image.*.*android_native*
diff --git a/android/cts/master/src/gles2-test-issues.txt b/android/cts/master/src/gles2-test-issues.txt
index 7553b1a..30c9b9a 100644
--- a/android/cts/master/src/gles2-test-issues.txt
+++ b/android/cts/master/src/gles2-test-issues.txt
@@ -4,3 +4,19 @@
 # Bug 22665757
 dEQP-GLES2.functional.fbo.completeness.renderable.renderbuffer.color0.rgb16f
 dEQP-GLES2.functional.fbo.completeness.renderable.texture.color0.rgb16f
+
+#Bug 25720457
+dEQP-GLES2.functional.texture.units.2_units.only_cube.9
+
+# Bug 25719831
+dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba4444
+dEQP-GLES2.functional.texture.wrap.clamp_mirror_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.clamp_repeat_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.mirror_clamp_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.mirror_repeat_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.repeat_clamp_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_etc1
+dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba4444
+dEQP-GLES2.functional.texture.wrap.repeat_repeat_nearest_pot_etc1
diff --git a/android/cts/master/src/gles3-multisample-issues.txt b/android/cts/master/src/gles3-multisample-issues.txt
index ce134ff..6c53218 100644
--- a/android/cts/master/src/gles3-multisample-issues.txt
+++ b/android/cts/master/src/gles3-multisample-issues.txt
@@ -123,4 +123,3 @@
 dEQP-GLES3.functional.clipping.triangle_vertex.clip_two.clip_pos_x_pos_z_and_neg_y_neg_z
 dEQP-GLES3.functional.clipping.triangle_vertex.clip_two.clip_pos_y_and_neg_x_neg_y
 dEQP-GLES3.functional.clipping.triangle_vertex.clip_two.clip_pos_y_and_pos_x_neg_y
-dEQP-GLES3.functional.shaders.builtin_variable.fragcoord_w
diff --git a/android/cts/master/src/gles3-test-issues.txt b/android/cts/master/src/gles3-test-issues.txt
index e946eea..31566d3 100644
--- a/android/cts/master/src/gles3-test-issues.txt
+++ b/android/cts/master/src/gles3-test-issues.txt
@@ -53,12 +53,6 @@
 # Bug 20092191
 dEQP-GLES3.functional.fragment_ops.interaction.basic_shader.20
 
-# Bug 20340817
-dEQP-GLES3.functional.fbo.api.attachment_query_default_fbo
-
-# Bug 20340818
-dEQP-GLES3.functional.negative_api.texture.teximage3d
-
 # Bug 20699985
 dEQP-GLES3.functional.shaders.texture_functions.textureoffset.isampler3d_vertex
 
@@ -68,10 +62,6 @@
 # Bug 21620051
 dEQP-GLES3.functional.shaders.texture_functions.texture.sampler2darrayshadow_vertex
 
-# Bug 21725534
-dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_debug_*
-dEQP-GLES3.functional.shaders.preprocessor.pragmas.pragma_unrecognized_token_*
-
 # Bug 21791303
 dEQP-GLES3.functional.uniform_api.random.23
 
@@ -97,9 +87,6 @@
 dEQP-GLES3.functional.shaders.builtin_functions.precision.acosh.highp_*
 dEQP-GLES3.functional.shaders.builtin_functions.precision.atanh.highp_*
 
-# Bug 22405868
-dEQP-GLES3.functional.shaders.texture_functions.textureprojlodoffset.sampler3d_float_vertex
-
 # Bug 22628471
 dEQP-GLES3.functional.shaders.derivate.dfdx.*_float.*_highp
 dEQP-GLES3.functional.shaders.derivate.dfdy.*_float.*_highp
@@ -152,17 +139,10 @@
 dEQP-GLES3.functional.shaders.builtin_functions.precision.reflect.*mediump*.scalar
 dEQP-GLES3.functional.shaders.builtin_functions.precision.reflect.*highp*.scalar
 
-# Bug 21326228
-dEQP-GLES3.functional.shaders.linkage.uniform.block.differing_precision
-
-# Bug 23219552
-dEQP-GLES3.functional.dither.disabled.gradient_white
-dEQP-GLES3.functional.dither.disabled.gradient_red
-dEQP-GLES3.functional.dither.disabled.gradient_green
-dEQP-GLES3.functional.dither.disabled.gradient_blue
-dEQP-GLES3.functional.dither.disabled.gradient_alpha
-
 # Bug 24068436
 dEQP-GLES3.functional.shaders.texture_functions.texturegrad.samplercubeshadow_vertex
 dEQP-GLES3.functional.shaders.texture_functions.texturegrad.samplercubeshadow_fragment
 dEQP-GLES3.functional.shaders.texture_functions.texturegrad.isamplercube_fragment
+
+# Bug 26651667
+dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_byte
diff --git a/android/cts/master/src/gles31-hw-issues.txt b/android/cts/master/src/gles31-hw-issues.txt
index ce0bebc..97eba78 100644
--- a/android/cts/master/src/gles31-hw-issues.txt
+++ b/android/cts/master/src/gles31-hw-issues.txt
@@ -67,12 +67,7 @@
 dEQP-GLES31.functional.shaders.builtin_functions.precision.tan.mediump_*
 
 # Bug 22713865
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.*lines*default_framebuffer_bbox*
 
 # Bug 23288315
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_1
diff --git a/android/cts/master/src/gles31-test-issues.txt b/android/cts/master/src/gles31-test-issues.txt
index 94fa510..f6e1cef 100644
--- a/android/cts/master/src/gles31-test-issues.txt
+++ b/android/cts/master/src/gles31-test-issues.txt
@@ -1,9 +1,6 @@
 # Bug 19641084
 dEQP-GLES31.functional.image_load_store.*.atomic.comp_swap_*_return_value
 
-# Bug 20698928
-dEQP-GLES31.functional.copy_image.*.*.*.renderbuffer_*
-
 # Bug 18323265
 dEQP-GLES31.functional.shaders.builtin_functions.precision.dot.highp_compute.vec4
 dEQP-GLES31.functional.shaders.builtin_functions.precision.smoothstep.mediump_compute.scalar
@@ -14,16 +11,6 @@
 # Bug 18033342
 dEQP-GLES31.functional.debug.negative_coverage.get_error.state.get_framebuffer_attachment_parameteriv
 
-# Bug 20302019
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.rgba8_snorm_*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_32_bits.*_rgba8_snorm.*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.rgb8_snorm_*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_24_bits.*_rgb8_snorm.*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.rg8_snorm_*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_16_bits.*_rg8_snorm.*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.r8_snorm_*
-dEQP-GLES31.functional.copy_image.non_compressed.viewclass_8_bits.*_r8_snorm.*
-
 # Bug 20453509
 dEQP-GLES31.functional.shaders.multisample_interpolation.interpolate_at_centroid.consistency.*
 dEQP-GLES31.functional.shaders.multisample_interpolation.interpolate_at_centroid.array_element.*
@@ -32,9 +19,6 @@
 dEQP-GLES31.functional.shaders.builtin_functions.precision.atan2.mediump_*
 dEQP-GLES31.functional.shaders.builtin_functions.precision.atan2.highp_*
 
-# Bug 22180525
-dEQP-GLES31.functional.shaders.helper_invocation.derivate.wide_lines_*_samples_*
-
 # Bug 22302334
 dEQP-GLES31.functional.shaders.builtin_functions.precision.acosh.highp_*
 dEQP-GLES31.functional.shaders.builtin_functions.precision.atanh.highp_*
diff --git a/android/cts/mnc/com.drawelements.deqp.gles2.xml b/android/cts/mnc/com.drawelements.deqp.gles2.xml
index 88e9dc8..076804f 100644
--- a/android/cts/mnc/com.drawelements.deqp.gles2.xml
+++ b/android/cts/mnc/com.drawelements.deqp.gles2.xml
@@ -26601,9 +26601,6 @@
 					<Test name="clamp_clamp_nearest_pot_rgb888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_clamp_nearest_pot_rgba4444">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_clamp_nearest_pot_l8">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -26673,9 +26670,6 @@
 					<Test name="repeat_mirror_nearest_pot_rgb888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_mirror_nearest_pot_rgba4444">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_mirror_nearest_pot_l8">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -26709,57 +26703,30 @@
 					<Test name="mirror_mirror_linear_pot_rgba8888">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="clamp_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="clamp_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="repeat_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="repeat_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_clamp_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_clamp_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_repeat_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_repeat_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
-					<Test name="mirror_mirror_nearest_pot_etc1">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="mirror_mirror_linear_pot_etc1">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
@@ -27863,9 +27830,6 @@
 							<Test name="8">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="9">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							</Test>
 						</TestCase>
 						<TestCase name="mixed">
 							<Test name="3">
@@ -39925,21 +39889,6 @@
 			</TestSuite>
 			<TestSuite name="dither">
 				<TestCase name="disabled">
-					<Test name="gradient_white">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gradient_red">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gradient_green">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gradient_blue">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
-					<Test name="gradient_alpha">
-						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-					</Test>
 					<Test name="unicolored_quad_white">
 						<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 					</Test>
diff --git a/android/cts/mnc/com.drawelements.deqp.gles3.xml b/android/cts/mnc/com.drawelements.deqp.gles3.xml
index 171ca14..22b1593 100644
--- a/android/cts/mnc/com.drawelements.deqp.gles3.xml
+++ b/android/cts/mnc/com.drawelements.deqp.gles3.xml
@@ -134466,9 +134466,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="buffer">
-							<Test name="index_byte">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="index_short">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
diff --git a/android/cts/mnc/com.drawelements.deqp.gles31.xml b/android/cts/mnc/com.drawelements.deqp.gles31.xml
index a30add5..197b15a 100644
--- a/android/cts/mnc/com.drawelements.deqp.gles31.xml
+++ b/android/cts/mnc/com.drawelements.deqp.gles31.xml
@@ -17640,18 +17640,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_separate_grid_1000x1000_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_separate_grid_1200x1200_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_separate_grid_1500x1500_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 						<Test name="drawelements_separate_grid_100x100_drawcount_8">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
@@ -17664,18 +17652,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_separate_grid_1000x1000_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_separate_grid_1200x1200_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_separate_grid_1500x1500_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 						<Test name="drawelements_separate_grid_100x100_drawcount_200">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
@@ -17688,10 +17664,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_separate_grid_1000x1000_drawcount_5000">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 						<Test name="drawelements_combined_grid_100x100_drawcount_1">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
@@ -17704,18 +17676,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_combined_grid_1000x1000_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_combined_grid_1200x1200_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_combined_grid_1500x1500_drawcount_1">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 						<Test name="drawelements_combined_grid_100x100_drawcount_8">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
@@ -17728,18 +17688,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_combined_grid_1000x1000_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_combined_grid_1200x1200_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
-						<Test name="drawelements_combined_grid_1500x1500_drawcount_8">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 						<Test name="drawelements_combined_grid_100x100_drawcount_200">
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
@@ -17752,10 +17700,6 @@
 							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 						</Test>
-						<Test name="drawelements_combined_grid_1000x1000_drawcount_5000">
-							<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-							<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-						</Test>
 					</TestCase>
 				</TestSuite>
 				<TestCase name="random">
@@ -67595,30 +67539,6 @@
 				<TestSuite name="lines">
 					<TestSuite name="global_state">
 						<TestCase name="vertex_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -67630,30 +67550,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -67665,56 +67561,17 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
+							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_larger">
+							<Test name="fbo_bbox_larger">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
+							<Test name="fbo_bbox_smaller">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -67728,30 +67585,6 @@
 					</TestSuite>
 					<TestSuite name="tessellation_set_per_draw">
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -67763,30 +67596,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -68054,30 +67863,6 @@
 				<TestSuite name="wide_lines">
 					<TestSuite name="global_state">
 						<TestCase name="vertex_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -68089,30 +67874,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -68124,56 +67885,17 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
+							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_larger">
+							<Test name="fbo_bbox_larger">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
+							<Test name="fbo_bbox_smaller">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -68187,30 +67909,6 @@
 					</TestSuite>
 					<TestSuite name="tessellation_set_per_draw">
 						<TestCase name="vertex_tessellation_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
@@ -68222,30 +67920,6 @@
 							</Test>
 						</TestCase>
 						<TestCase name="vertex_tessellation_geometry_fragment">
-							<Test name="default_framebuffer_bbox_equal">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_larger">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
-							<Test name="default_framebuffer_bbox_smaller">
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="0" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="90" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="180" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms0" rotation="270" surfacetype="window"/>
-								<TestInstance glconfig="rgba8888d24s8ms4" rotation="unspecified" surfacetype="window"/>
-							</Test>
 							<Test name="fbo_bbox_equal">
 								<TestInstance glconfig="rgba8888d24s8ms0" rotation="unspecified" surfacetype="window"/>
 							</Test>
diff --git a/android/cts/mnc/gles2-master.txt b/android/cts/mnc/gles2-master.txt
index 4fa9c32..c85cca6 100644
--- a/android/cts/mnc/gles2-master.txt
+++ b/android/cts/mnc/gles2-master.txt
@@ -8675,7 +8675,6 @@
 dEQP-GLES2.functional.texture.size.cube.512x512_rgba8888_mipmap
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgb888
@@ -8699,7 +8698,6 @@
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgb888
@@ -8711,23 +8709,14 @@
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_rgba8888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_etc1
 dEQP-GLES2.functional.texture.filtering.2d.nearest_mipmap_nearest_linear_clamp_etc1
 dEQP-GLES2.functional.texture.filtering.2d.linear_mipmap_nearest_linear_clamp_etc1
@@ -9077,7 +9066,6 @@
 dEQP-GLES2.functional.texture.units.2_units.only_cube.6
 dEQP-GLES2.functional.texture.units.2_units.only_cube.7
 dEQP-GLES2.functional.texture.units.2_units.only_cube.8
-dEQP-GLES2.functional.texture.units.2_units.only_cube.9
 dEQP-GLES2.functional.texture.units.2_units.mixed.3
 dEQP-GLES2.functional.texture.units.2_units.mixed.4
 dEQP-GLES2.functional.texture.units.2_units.mixed.6
@@ -12975,11 +12963,6 @@
 dEQP-GLES2.functional.depth_range.compare.clamp_near
 dEQP-GLES2.functional.depth_range.compare.clamp_far
 dEQP-GLES2.functional.depth_range.compare.clamp_both
-dEQP-GLES2.functional.dither.disabled.gradient_white
-dEQP-GLES2.functional.dither.disabled.gradient_red
-dEQP-GLES2.functional.dither.disabled.gradient_green
-dEQP-GLES2.functional.dither.disabled.gradient_blue
-dEQP-GLES2.functional.dither.disabled.gradient_alpha
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_white
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_red
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_green
diff --git a/android/cts/mnc/gles3-master.txt b/android/cts/mnc/gles3-master.txt
index 2be891b..7bbd4b8 100644
--- a/android/cts/mnc/gles3-master.txt
+++ b/android/cts/mnc/gles3-master.txt
@@ -40224,7 +40224,6 @@
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.user_ptr.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_int
-dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_byte
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.points.single_attribute
diff --git a/android/cts/mnc/gles31-master.txt b/android/cts/mnc/gles31-master.txt
index 06a98fe..4a758a0 100644
--- a/android/cts/mnc/gles31-master.txt
+++ b/android/cts/mnc/gles31-master.txt
@@ -5498,35 +5498,21 @@
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.draw_indirect.random.0
 dEQP-GLES31.functional.draw_indirect.random.2
 dEQP-GLES31.functional.draw_indirect.random.3
@@ -20872,36 +20858,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
@@ -20949,36 +20920,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
diff --git a/android/cts/mnc/gles31-multisample.txt b/android/cts/mnc/gles31-multisample.txt
index 44d4428..d78f3f6 100644
--- a/android/cts/mnc/gles31-multisample.txt
+++ b/android/cts/mnc/gles31-multisample.txt
@@ -32,35 +32,21 @@
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.image_load_store.early_fragment_tests.no_early_fragment_tests_depth
 dEQP-GLES31.functional.image_load_store.early_fragment_tests.no_early_fragment_tests_stencil
 dEQP-GLES31.functional.image_load_store.early_fragment_tests.early_fragment_tests_depth
@@ -148,24 +134,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -184,24 +152,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/mnc/gles31-rotate-landscape.txt b/android/cts/mnc/gles31-rotate-landscape.txt
index c98268f..42f000f 100644
--- a/android/cts/mnc/gles31-rotate-landscape.txt
+++ b/android/cts/mnc/gles31-rotate-landscape.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/mnc/gles31-rotate-portrait.txt b/android/cts/mnc/gles31-rotate-portrait.txt
index c98268f..42f000f 100644
--- a/android/cts/mnc/gles31-rotate-portrait.txt
+++ b/android/cts/mnc/gles31-rotate-portrait.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/mnc/gles31-rotate-reverse-landscape.txt b/android/cts/mnc/gles31-rotate-reverse-landscape.txt
index c98268f..42f000f 100644
--- a/android/cts/mnc/gles31-rotate-reverse-landscape.txt
+++ b/android/cts/mnc/gles31-rotate-reverse-landscape.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/mnc/gles31-rotate-reverse-portrait.txt b/android/cts/mnc/gles31-rotate-reverse-portrait.txt
index c98268f..42f000f 100644
--- a/android/cts/mnc/gles31-rotate-reverse-portrait.txt
+++ b/android/cts/mnc/gles31-rotate-reverse-portrait.txt
@@ -47,24 +47,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
@@ -83,24 +65,6 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_points.global_state.vertex_fragment.default_framebuffer_bbox_smaller
diff --git a/android/cts/mnc/src/gles2-master.txt b/android/cts/mnc/src/gles2-master.txt
index 4fa9c32..c85cca6 100644
--- a/android/cts/mnc/src/gles2-master.txt
+++ b/android/cts/mnc/src/gles2-master.txt
@@ -8675,7 +8675,6 @@
 dEQP-GLES2.functional.texture.size.cube.512x512_rgba8888_mipmap
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgba8888
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_npot_rgb888
@@ -8699,7 +8698,6 @@
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgb888
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_rgba4444
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_l8
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_rgb888
@@ -8711,23 +8709,14 @@
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_rgba8888
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_rgba8888
-dEQP-GLES2.functional.texture.wrap.clamp_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.clamp_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.clamp_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.repeat_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.repeat_mirror_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_clamp_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_clamp_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_repeat_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_repeat_linear_pot_etc1
-dEQP-GLES2.functional.texture.wrap.mirror_mirror_nearest_pot_etc1
 dEQP-GLES2.functional.texture.wrap.mirror_mirror_linear_pot_etc1
 dEQP-GLES2.functional.texture.filtering.2d.nearest_mipmap_nearest_linear_clamp_etc1
 dEQP-GLES2.functional.texture.filtering.2d.linear_mipmap_nearest_linear_clamp_etc1
@@ -9077,7 +9066,6 @@
 dEQP-GLES2.functional.texture.units.2_units.only_cube.6
 dEQP-GLES2.functional.texture.units.2_units.only_cube.7
 dEQP-GLES2.functional.texture.units.2_units.only_cube.8
-dEQP-GLES2.functional.texture.units.2_units.only_cube.9
 dEQP-GLES2.functional.texture.units.2_units.mixed.3
 dEQP-GLES2.functional.texture.units.2_units.mixed.4
 dEQP-GLES2.functional.texture.units.2_units.mixed.6
@@ -12975,11 +12963,6 @@
 dEQP-GLES2.functional.depth_range.compare.clamp_near
 dEQP-GLES2.functional.depth_range.compare.clamp_far
 dEQP-GLES2.functional.depth_range.compare.clamp_both
-dEQP-GLES2.functional.dither.disabled.gradient_white
-dEQP-GLES2.functional.dither.disabled.gradient_red
-dEQP-GLES2.functional.dither.disabled.gradient_green
-dEQP-GLES2.functional.dither.disabled.gradient_blue
-dEQP-GLES2.functional.dither.disabled.gradient_alpha
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_white
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_red
 dEQP-GLES2.functional.dither.disabled.unicolored_quad_green
diff --git a/android/cts/mnc/src/gles3-master.txt b/android/cts/mnc/src/gles3-master.txt
index 2be891b..7bbd4b8 100644
--- a/android/cts/mnc/src/gles3-master.txt
+++ b/android/cts/mnc/src/gles3-master.txt
@@ -40224,7 +40224,6 @@
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.user_ptr.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.unaligned_user_ptr.index_int
-dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_byte
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_short
 dEQP-GLES3.functional.draw.draw_elements_instanced.indices.buffer.index_int
 dEQP-GLES3.functional.draw.draw_elements_instanced.points.single_attribute
diff --git a/android/cts/mnc/src/gles31-master.txt b/android/cts/mnc/src/gles31-master.txt
index 06a98fe..4a758a0 100644
--- a/android/cts/mnc/src/gles31-master.txt
+++ b/android/cts/mnc/src/gles31-master.txt
@@ -5498,35 +5498,21 @@
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_separate_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_1
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_1
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1200x1200_drawcount_8
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1500x1500_drawcount_8
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_100x100_drawcount_200
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_200x200_drawcount_800
 dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_500x500_drawcount_2500
-dEQP-GLES31.functional.draw_indirect.compute_interop.large.drawelements_combined_grid_1000x1000_drawcount_5000
 dEQP-GLES31.functional.draw_indirect.random.0
 dEQP-GLES31.functional.draw_indirect.random.2
 dEQP-GLES31.functional.draw_indirect.random.3
@@ -20872,36 +20858,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.triangles.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
@@ -20949,36 +20920,21 @@
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_fragment.fbo
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.default_framebuffer
 dEQP-GLES31.functional.primitive_bounding_box.points.tessellation_set_per_primitive.vertex_tessellation_geometry_fragment.fbo
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.default_framebuffer_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_equal
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_larger
+dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_geometry_fragment.fbo_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.global_state.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_fragment.fbo_bbox_smaller
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_equal
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_larger
-dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.default_framebuffer_bbox_smaller
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_equal
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_larger
 dEQP-GLES31.functional.primitive_bounding_box.wide_lines.tessellation_set_per_draw.vertex_tessellation_geometry_fragment.fbo_bbox_smaller
diff --git a/android/cts/runner/src/com/drawelements/deqp/runner/DeqpTestRunner.java b/android/cts/runner/src/com/drawelements/deqp/runner/DeqpTestRunner.java
index 79d8d13..edb5a1d 100644
--- a/android/cts/runner/src/com/drawelements/deqp/runner/DeqpTestRunner.java
+++ b/android/cts/runner/src/com/drawelements/deqp/runner/DeqpTestRunner.java
@@ -38,6 +38,7 @@
 import com.android.tradefed.testtype.IAbiReceiver;
 import com.android.tradefed.testtype.IDeviceTest;
 import com.android.tradefed.testtype.IRemoteTest;
+import com.android.tradefed.testtype.IShardableTest;
 import com.android.tradefed.testtype.ITestFilterReceiver;
 import com.android.tradefed.util.IRunUtil;
 import com.android.tradefed.util.RunInterruptedException;
@@ -72,7 +73,8 @@
  * Supports running drawElements Quality Program tests found under external/deqp.
  */
 @OptionClass(alias="deqp-test-runner")
-public class DeqpTestRunner implements IBuildReceiver, IDeviceTest, IRemoteTest, ITestFilterReceiver, IAbiReceiver {
+public class DeqpTestRunner implements IBuildReceiver, IDeviceTest, IRemoteTest,
+        ITestFilterReceiver, IAbiReceiver, IShardableTest {
 
     private static final String DEQP_ONDEVICE_APK = "com.drawelements.deqp.apk";
     private static final String DEQP_ONDEVICE_PKG = "com.drawelements.deqp";
@@ -90,17 +92,29 @@
 
     private static final int UNRESPOSIVE_CMD_TIMEOUT_MS = 60000; // one minute
 
-    @Option(name="deqp-package", description="Name of the deqp module used. Determines GLES version.", importance=Option.Importance.ALWAYS)
-    private String mDeqpPackage;
-    @Option(name="deqp-gl-config-name", description="GL render target config. See deqp documentation for syntax. ", importance=Option.Importance.ALWAYS)
-    private String mConfigName;
-    @Option(name="deqp-caselist-file", description="File listing the names of the cases to be run.", importance=Option.Importance.ALWAYS)
-    private String mCaselistFile;
-    @Option(name="deqp-screen-rotation", description="Screen orientation. Defaults to 'unspecified'", importance=Option.Importance.NEVER)
-    private String mScreenRotation = "unspecified";
-    @Option(name="deqp-surface-type", description="Surface type ('window', 'pbuffer', 'fbo'). Defaults to 'window'", importance=Option.Importance.NEVER)
-    private String mSurfaceType = "window";
+    // !NOTE: There's a static method copyOptions() for copying options during split.
+    // If you add state update copyOptions() as appropriate!
 
+    @Option(name="deqp-package",
+            description="Name of the deqp module used. Determines GLES version.",
+            importance=Option.Importance.ALWAYS)
+    private String mDeqpPackage;
+    @Option(name="deqp-gl-config-name",
+            description="GL render target config. See deqp documentation for syntax. ",
+            importance=Option.Importance.ALWAYS)
+    private String mConfigName;
+    @Option(name="deqp-caselist-file",
+            description="File listing the names of the cases to be run.",
+            importance=Option.Importance.ALWAYS)
+    private String mCaselistFile;
+    @Option(name="deqp-screen-rotation",
+            description="Screen orientation. Defaults to 'unspecified'",
+            importance=Option.Importance.NEVER)
+    private String mScreenRotation = "unspecified";
+    @Option(name="deqp-surface-type",
+            description="Surface type ('window', 'pbuffer', 'fbo'). Defaults to 'window'",
+            importance=Option.Importance.NEVER)
+    private String mSurfaceType = "window";
     @Option(name = "include-filter",
             description="Test include filter. '*' is zero or more letters. '.' has no special meaning.")
     private List<String> mIncludeFilters = new ArrayList<>();
@@ -121,11 +135,19 @@
     // When set will override the mCaselistFile for testing purposes.
     private Reader mCaselistReader = null;
 
-    private IRecovery mDeviceRecovery = new Recovery();
-    {
+    private IRecovery mDeviceRecovery = new Recovery(); {
         mDeviceRecovery.setSleepProvider(new SleepProvider());
     }
 
+    public DeqpTestRunner() {
+    }
+
+    private DeqpTestRunner(DeqpTestRunner optionTemplate,
+            Map<TestIdentifier, Set<BatchRunConfiguration>> tests) {
+        copyOptions(this, optionTemplate);
+        mTestInstances = tests;
+    }
+
     /**
      * @param abi the ABI to run the test on
      */
@@ -139,7 +161,7 @@
      */
     @Override
     public void setBuild(IBuildInfo buildInfo) {
-        setBuildHelper(new CompatibilityBuildHelper((IFolderBuildInfo)buildInfo));
+        setBuildHelper(new CompatibilityBuildHelper(buildInfo));
     }
 
     /**
@@ -1852,7 +1874,7 @@
         List<Pattern> includes = buildPatternList(includeFilters);
         List<Pattern> excludes = buildPatternList(excludeFilters);
 
-		List<TestIdentifier> testList = new ArrayList(tests.keySet());
+        List<TestIdentifier> testList = new ArrayList(tests.keySet());
         for (TestIdentifier test : testList) {
             // Remove test if it does not match includes or matches
             // excludes.
@@ -1867,12 +1889,11 @@
     }
 
     /**
-     * {@inheritDoc}
+     * Loads tests into mTestInstances based on the options. Assumes
+     * that no tests have been loaded for this instance before.
      */
-    @Override
-    public void run(ITestInvocationListener listener) throws DeviceNotAvailableException {
-        final Map<String, String> emptyMap = Collections.emptyMap();
-        final boolean isSupportedApi = !isOpenGlEsPackage() || isSupportedGles();
+    private void loadTests() {
+        if (mTestInstances != null) throw new AssertionError("Re-load of tests not supported");
 
         try {
             Reader reader = mCaselistReader;
@@ -1893,9 +1914,32 @@
         catch (IOException e) {
             CLog.w("Failed to close test list reader.");
         }
+        CLog.d("Filters");
+        for (String filter : mIncludeFilters) {
+            CLog.d("Include: %s", filter);
+        }
+        for (String filter : mExcludeFilters) {
+            CLog.d("Exclude: %s", filter);
+        }
+        CLog.i("Num tests before filtering: %d", mTestInstances.size());
         if (!mIncludeFilters.isEmpty() || !mExcludeFilters.isEmpty()) {
             filterTests(mTestInstances, mIncludeFilters, mExcludeFilters);
         }
+        CLog.i("Num tests after filtering: %d", mTestInstances.size());
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public void run(ITestInvocationListener listener) throws DeviceNotAvailableException {
+        final Map<String, String> emptyMap = Collections.emptyMap();
+        final boolean isSupportedApi = !isOpenGlEsPackage() || isSupportedGles();
+
+        // If sharded, split() will load the tests.
+        if (mTestInstances == null)
+            loadTests();
+
         mRemainingTests = new LinkedList<>(mTestInstances.keySet());
 
         listener.testRunStarted(getId(), mRemainingTests.size());
@@ -1956,4 +2000,50 @@
     public void addAllExcludeFilters(List<String> filters) {
         mExcludeFilters.addAll(filters);
     }
+
+    private static void copyOptions(DeqpTestRunner destination, DeqpTestRunner source) {
+        destination.mDeqpPackage = source.mDeqpPackage;
+        destination.mConfigName = source.mConfigName;
+        destination.mCaselistFile = source.mCaselistFile;
+        destination.mScreenRotation = source.mScreenRotation;
+        destination.mSurfaceType = source.mSurfaceType;
+        destination.mIncludeFilters = new ArrayList<>(source.mIncludeFilters);
+        destination.mExcludeFilters = new ArrayList<>(source.mExcludeFilters);
+        destination.mAbi = source.mAbi;
+        destination.mLogData = source.mLogData;
+    }
+
+    /**
+     * {@inheritDoc}
+     */
+    @Override
+    public Collection<IRemoteTest> split() {
+        if (mTestInstances != null) {
+            throw new AssertionError("Re-splitting or splitting running instance?");
+        }
+        // \todo [2015-11-23 kalle] If we split to batches at shard level, we could
+        // basically get rid of batching. Except that sharding is optional?
+
+        // Assume that tests have not been yet loaded.
+        loadTests();
+
+        Collection<IRemoteTest> runners = new ArrayList<>();
+        // NOTE: Use linked hash map to keep the insertion order in iteration
+        Map<TestIdentifier, Set<BatchRunConfiguration>> currentSet = new LinkedHashMap<>();
+        Map<TestIdentifier, Set<BatchRunConfiguration>> iterationSet = this.mTestInstances;
+
+        // Go through tests, split
+        for (TestIdentifier test: iterationSet.keySet()) {
+            currentSet.put(test, iterationSet.get(test));
+            if (currentSet.size() >= TESTCASE_BATCH_LIMIT) {
+                runners.add(new DeqpTestRunner(this, currentSet));
+                // NOTE: Use linked hash map to keep the insertion order in iteration
+                currentSet = new LinkedHashMap<>();
+             }
+        }
+        runners.add(new DeqpTestRunner(this, currentSet));
+
+        CLog.i("Split deqp tests into %d shards", runners.size());
+        return runners;
+    }
 }
diff --git a/android/cts/runner/tests/run_tests.sh b/android/cts/runner/tests/run_tests.sh
index 7293894..9a6935a 100755
--- a/android/cts/runner/tests/run_tests.sh
+++ b/android/cts/runner/tests/run_tests.sh
@@ -45,7 +45,7 @@
     cts-tradefed_v2\
     ddmlib-prebuilt\
     hosttestlib\
-    CtsDeqp\
+    CtsDeqpTestCases\
     CtsDeqpRunnerTests\
     tradefed-prebuilt"
 JAR_PATH=
diff --git a/android/cts/runner/tests/src/com/drawelements/deqp/runner/DeqpTestRunnerTest.java b/android/cts/runner/tests/src/com/drawelements/deqp/runner/DeqpTestRunnerTest.java
index a84eef5..eb2e590 100644
--- a/android/cts/runner/tests/src/com/drawelements/deqp/runner/DeqpTestRunnerTest.java
+++ b/android/cts/runner/tests/src/com/drawelements/deqp/runner/DeqpTestRunnerTest.java
@@ -29,6 +29,7 @@
 import com.android.tradefed.device.ITestDevice;
 import com.android.tradefed.result.ITestInvocationListener;
 import com.android.tradefed.testtype.IAbi;
+import com.android.tradefed.testtype.IRemoteTest;
 import com.android.tradefed.util.IRunUtil;
 import com.android.tradefed.util.RunInterruptedException;
 
@@ -143,6 +144,12 @@
         return buildGlesTestRunner(majorVersion, minorVersion, testlist.toString());
     }
 
+    private static CompatibilityBuildHelper getMockBuildHelper() {
+        IFolderBuildInfo mockIFolderBuildInfo = EasyMock.createMock(IFolderBuildInfo.class);
+        EasyMock.replay(mockIFolderBuildInfo);
+        return new BuildHelperMock(mockIFolderBuildInfo);
+    }
+
     private static DeqpTestRunner buildGlesTestRunner(int majorVersion,
                                                       int minorVersion,
                                                       String testlist) throws ConfigurationException, FileNotFoundException {
@@ -159,13 +166,8 @@
         setter.setOptionValue("deqp-surface-type", "window");
 
         runner.setCaselistReader(new StringReader(testlist));
-
         runner.setAbi(ABI);
-
-        IFolderBuildInfo mockIFolderBuildInfo = EasyMock.createMock(IFolderBuildInfo.class);
-        EasyMock.replay(mockIFolderBuildInfo);
-        CompatibilityBuildHelper mockHelper = new BuildHelperMock(mockIFolderBuildInfo);
-        runner.setBuildHelper(mockHelper);
+        runner.setBuildHelper(getMockBuildHelper());
 
         return runner;
     }
@@ -589,12 +591,7 @@
         EasyMock.verify(mockDevice, mockIDevice);
     }
 
-    private void testFiltering(List<String> includes,
-                               List<String> excludes,
-                               List<TestIdentifier> fullTestList,
-                               String expectedTrie,
-                               List<TestIdentifier> expectedTests) throws Exception{
-
+    static private String buildTestProcessOutput(List<TestIdentifier> tests) {
         /* MultiLineReceiver expects "\r\n" line ending. */
         final String outputHeader = "INSTRUMENTATION_STATUS: dEQP-SessionInfo-Name=releaseName\r\n"
                 + "INSTRUMENTATION_STATUS: dEQP-EventType=SessionInfo\r\n"
@@ -617,7 +614,7 @@
 
         StringWriter output = new StringWriter();
         output.write(outputHeader);
-        for (TestIdentifier test : expectedTests) {
+        for (TestIdentifier test : tests) {
             output.write("INSTRUMENTATION_STATUS: dEQP-EventType=BeginTestCase\r\n");
             output.write("INSTRUMENTATION_STATUS: dEQP-BeginTestCase-TestCasePath=");
             output.write(test.getClassName());
@@ -633,6 +630,15 @@
             output.write("INSTRUMENTATION_STATUS_CODE: 0\r\n");
         }
         output.write(outputEnd);
+        return output.toString();
+    }
+
+    private void testFiltering(List<String> includes,
+                               List<String> excludes,
+                               List<TestIdentifier> fullTestList,
+                               String expectedTrie,
+                               List<TestIdentifier> expectedTests) throws Exception {
+
 
         ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
         ITestInvocationListener mockListener
@@ -664,15 +670,8 @@
         {
             expectRenderConfigQuery(mockDevice, 3, 0);
 
-            String commandLine = String.format(
-                "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
-                + "--deqp-screen-rotation=unspecified "
-                + "--deqp-surface-type=window "
-                + "--deqp-log-images=disable "
-                + "--deqp-watchdog=enable",
-                CASE_LIST_FILE_NAME);
-
-            runInstrumentationLineAndAnswer(mockDevice, mockIDevice, expectedTrie, commandLine, output.toString());
+            String testOut = buildTestProcessOutput(expectedTests);
+            runInstrumentationLineAndAnswer(mockDevice, mockIDevice, testOut);
 
             for (int i = 0; i < expectedTests.size(); i++) {
                 mockListener.testStarted(EasyMock.eq(expectedTests.get(i)));
@@ -699,7 +698,6 @@
 
         EasyMock.verify(mockListener);
         EasyMock.verify(mockDevice, mockIDevice);
-        output.close();
     }
 
     public void testRun_trivialIncludeFilter() throws Exception {
@@ -826,10 +824,10 @@
         String expectedTrie = "";
 
         ArrayList<String> excludes = new ArrayList();
-		excludes.add("*");
+        excludes.add("*");
 
-		testFiltering(null, excludes, allTests, expectedTrie, new ArrayList<TestIdentifier>());
-	}
+        testFiltering(null, excludes, allTests, expectedTrie, new ArrayList<TestIdentifier>());
+    }
 
     /**
      * Test running a unexecutable test.
@@ -960,9 +958,9 @@
         tests.add(testId);
 
         DeqpTestRunner deqpTest = buildGlesTestRunner(3, 0, tests);
-		OptionSetter setter = new OptionSetter(deqpTest);
-		// Note: If the rotation is the default unspecified, features are not queried at all
-		setter.setOptionValue("deqp-screen-rotation", "90");
+        OptionSetter setter = new OptionSetter(deqpTest);
+        // Note: If the rotation is the default unspecified, features are not queried at all
+        setter.setOptionValue("deqp-screen-rotation", "90");
 
         deqpTest.setDevice(mockDevice);
 
@@ -1094,8 +1092,8 @@
         tests.add(testId);
 
         DeqpTestRunner deqpTest = buildGlesTestRunner(3, 0, tests);
-		OptionSetter setter = new OptionSetter(deqpTest);
-		setter.setOptionValue("deqp-screen-rotation", rotation);
+        OptionSetter setter = new OptionSetter(deqpTest);
+        setter.setOptionValue("deqp-screen-rotation", rotation);
 
         deqpTest.setDevice(mockDevice);
 
@@ -1371,9 +1369,9 @@
         Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
         tests.add(testId);
 
-		DeqpTestRunner deqpTest = buildGlesTestRunner(3, 0, tests);
-		OptionSetter setter = new OptionSetter(deqpTest);
-		setter.setOptionValue("deqp-gl-config-name", pixelFormat);
+        DeqpTestRunner deqpTest = buildGlesTestRunner(3, 0, tests);
+        OptionSetter setter = new OptionSetter(deqpTest);
+        setter.setOptionValue("deqp-gl-config-name", pixelFormat);
 
         deqpTest.setDevice(mockDevice);
 
@@ -1780,6 +1778,116 @@
         EasyMock.verify(mockDevice, mockIDevice);
     }
 
+    private void runShardedTest(TestIdentifier[] testIds,
+            ArrayList<ArrayList<TestIdentifier>> testsForShard) throws Exception {
+        Collection<TestIdentifier> tests = new ArrayList<TestIdentifier>();
+        for (TestIdentifier id : testIds) tests.add(id);
+
+        DeqpTestRunner runner = buildGlesTestRunner(3, 0, tests);
+        ArrayList<IRemoteTest> shards = (ArrayList<IRemoteTest>)runner.split();
+
+        for (int shardIndex = 0; shardIndex < shards.size(); shardIndex++) {
+            DeqpTestRunner shard = (DeqpTestRunner)shards.get(shardIndex);
+            shard.setBuildHelper(getMockBuildHelper());
+
+            ArrayList<TestIdentifier> shardTests = testsForShard.get(shardIndex);
+
+            ITestDevice mockDevice = EasyMock.createMock(ITestDevice.class);
+            ITestInvocationListener mockListener
+                    = EasyMock.createStrictMock(ITestInvocationListener.class);
+            IDevice mockIDevice = EasyMock.createMock(IDevice.class);
+            int version = 3 << 16;
+            EasyMock.expect(mockDevice.getProperty("ro.opengles.version"))
+                    .andReturn(Integer.toString(version)).atLeastOnce();
+
+            EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+                    .once();
+            EasyMock.expect(mockDevice.installPackage(EasyMock.<File>anyObject(),
+                    EasyMock.eq(true), EasyMock.eq(AbiUtils.createAbiFlag(ABI.getName()))))
+                    .andReturn(null).once();
+
+            mockListener.testRunStarted(getTestId(shard), shardTests.size());
+            EasyMock.expectLastCall().once();
+
+            expectRenderConfigQuery(mockDevice, 3, 0);
+
+            String testOut = buildTestProcessOutput(shardTests);
+            // NOTE: This assumes that there won't be multiple batches per shard!
+            runInstrumentationLineAndAnswer(mockDevice, mockIDevice, testOut);
+
+            for (int i = 0; i < shardTests.size(); i++) {
+                mockListener.testStarted(EasyMock.eq(shardTests.get(i)));
+                EasyMock.expectLastCall().once();
+
+                mockListener.testEnded(EasyMock.eq(shardTests.get(i)),
+                                       EasyMock.<Map<String, String>>notNull());
+
+                EasyMock.expectLastCall().once();
+            }
+
+            mockListener.testRunEnded(EasyMock.anyLong(), EasyMock.<Map<String, String>>notNull());
+            EasyMock.expectLastCall().once();
+
+            EasyMock.expect(mockDevice.uninstallPackage(EasyMock.eq(DEQP_ONDEVICE_PKG))).andReturn("")
+                    .once();
+
+            EasyMock.replay(mockDevice, mockIDevice);
+            EasyMock.replay(mockListener);
+
+            shard.setDevice(mockDevice);
+            shard.run(mockListener);
+
+            EasyMock.verify(mockListener);
+            EasyMock.verify(mockDevice, mockIDevice);
+        }
+    }
+
+    public void testSharding_smallTrivial() throws Exception {
+        final TestIdentifier[] testIds = {
+                new TestIdentifier("dEQP-GLES3.info", "vendor"),
+                new TestIdentifier("dEQP-GLES3.info", "renderer"),
+                new TestIdentifier("dEQP-GLES3.info", "version"),
+                new TestIdentifier("dEQP-GLES3.info", "shading_language_version"),
+                new TestIdentifier("dEQP-GLES3.info", "extensions"),
+                new TestIdentifier("dEQP-GLES3.info", "render_target")
+        };
+        ArrayList<ArrayList<TestIdentifier>> shardedTests = new ArrayList<>();
+        ArrayList<TestIdentifier> shardOne = new ArrayList<>();
+        for (int i = 0; i < testIds.length; i++) {
+            shardOne.add(testIds[i]);
+        }
+        shardedTests.add(shardOne);
+        runShardedTest(testIds, shardedTests);
+    }
+
+    public void testSharding_twoShards() throws Exception {
+        final int TEST_COUNT = 1237;
+        final int SHARD_SIZE = 1000;
+
+        ArrayList<TestIdentifier> testIds = new ArrayList<>(TEST_COUNT);
+        for (int i = 0; i < TEST_COUNT; i++) {
+            testIds.add(new TestIdentifier("dEQP-GLES3.funny.group", String.valueOf(i)));
+        }
+
+        ArrayList<ArrayList<TestIdentifier>> shardedTests = new ArrayList<>();
+        ArrayList<TestIdentifier> shard = new ArrayList<>();
+        for (int i = 0; i < testIds.size(); i++) {
+            if (i == SHARD_SIZE) {
+                shardedTests.add(shard);
+                shard = new ArrayList<>();
+            }
+            shard.add(testIds.get(i));
+        }
+        shardedTests.add(shard);
+        runShardedTest(testIds.toArray(new TestIdentifier[testIds.size()]), shardedTests);
+    }
+
+    public void testSharding_empty() throws Exception {
+        DeqpTestRunner runner = buildGlesTestRunner(3, 0, new ArrayList<TestIdentifier>());
+        ArrayList<IRemoteTest> shards = (ArrayList<IRemoteTest>)runner.split();
+        // \todo [2015-11-23 kalle] What should the result be? The runner or nothing?
+    }
+
     /**
      * Test external interruption in testFailed().
      */
@@ -1883,6 +1991,18 @@
     }
 
     private void runInstrumentationLineAndAnswer(ITestDevice mockDevice, IDevice mockIDevice,
+            final String output) throws Exception {
+        String cmd = String.format(
+            "--deqp-caselist-file=%s --deqp-gl-config-name=rgba8888d24s8 "
+            + "--deqp-screen-rotation=unspecified "
+            + "--deqp-surface-type=window "
+            + "--deqp-log-images=disable "
+            + "--deqp-watchdog=enable",
+            CASE_LIST_FILE_NAME);
+        runInstrumentationLineAndAnswer(mockDevice, mockIDevice, null, cmd, output);
+    }
+
+    private void runInstrumentationLineAndAnswer(ITestDevice mockDevice, IDevice mockIDevice,
             final String testTrie, final String cmd, final String output) throws Exception {
         EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("rm " + CASE_LIST_FILE_NAME)))
                 .andReturn("").once();
@@ -1890,8 +2010,13 @@
         EasyMock.expect(mockDevice.executeShellCommand(EasyMock.eq("rm " + LOG_FILE_NAME)))
                 .andReturn("").once();
 
-        EasyMock.expect(mockDevice.pushString(testTrie + "\n", CASE_LIST_FILE_NAME))
-                .andReturn(true).once();
+        if (testTrie == null) {
+            mockDevice.pushString((String)EasyMock.anyObject(), EasyMock.eq(CASE_LIST_FILE_NAME));
+        }
+        else {
+            mockDevice.pushString(testTrie + "\n", CASE_LIST_FILE_NAME);
+        }
+        EasyMock.expectLastCall().andReturn(true).once();
 
         String command = String.format(
                 "am instrument %s -w -e deqpLogFileName \"%s\" -e deqpCmdLine \"%s\" "
diff --git a/android/package/src/com/drawelements/deqp/execserver/ExecService.java b/android/package/src/com/drawelements/deqp/execserver/ExecService.java
index bd65f92..f8d040d 100644
--- a/android/package/src/com/drawelements/deqp/execserver/ExecService.java
+++ b/android/package/src/com/drawelements/deqp/execserver/ExecService.java
@@ -25,6 +25,7 @@
 
 import android.app.Service;
 import android.app.Notification;
+import android.app.Notification.Builder;
 import android.app.PendingIntent;
 import android.content.Intent;
 import android.os.Binder;
@@ -82,8 +83,11 @@
 		PendingIntent pm = PendingIntent.getActivity(this, 0, launchIntent, 0);
 
 		// Start as foreground service.
-		Notification notification = new Notification(R.drawable.deqp_app_small, "dEQP ExecServer", System.currentTimeMillis());
-		notification.setLatestEventInfo(this, "dEQP ExecServer", "ExecServer is running in the background.", pm);
+		Notification.Builder builder = new Notification.Builder(this);
+		Notification notification = builder.setContentIntent(pm)
+			.setSmallIcon(R.drawable.deqp_app_small).setTicker("ExecServer is running in the background.")
+			.setWhen(System.currentTimeMillis()).setAutoCancel(true).setContentTitle("dEQP ExecServer")
+			.setContentText("ExecServer is running in the background.").build();
 		startForeground(1, notification);
 
 		return START_STICKY; // Keep us running until explictly stopped
diff --git a/android/scripts/common.py b/android/scripts/common.py
index 888f4de..777d23f 100644
--- a/android/scripts/common.py
+++ b/android/scripts/common.py
@@ -211,7 +211,7 @@
 
 NDK_HOST_OS_NAMES = [
 	"windows",
-	"windows_x86-64",
+	"windows-x86_64",
 	"darwin-x86",
 	"darwin-x86_64",
 	"linux-x86",
@@ -236,7 +236,7 @@
 		NativeLib(21,		"arm64-v8a",	'android-arm64'),	# ARM64 v8a ABI
 	]
 
-ANDROID_JAVA_API		= "android-13"
+ANDROID_JAVA_API		= "android-22"
 NATIVE_LIB_NAME			= "libdeqp.so"
 
 def selectNDKPath ():
diff --git a/data/gles2/shaders/conditionals.test b/data/gles2/shaders/conditionals.test
index 7b5f591..fae1a04 100644
--- a/data/gles2/shaders/conditionals.test
+++ b/data/gles2/shaders/conditionals.test
@@ -202,6 +202,104 @@
 		""
 	end
 
+	case constant_conditional_assignment_to_matrix
+		vertex ""
+                        // This variant doesn't provoke the crash seen in the versions below.
+			${VERTEX_DECLARATIONS}
+			varying mediump float FragVarying;
+			const float in0 = 0.0;
+			void main()
+			{
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				if (in0 == 1.0)
+				{
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			precision mediump float;
+			varying float FragVarying;
+			void main()
+			{
+				gl_FragColor = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
+	case input_conditional_assignment_to_matrix
+		values
+		{
+			input float in0 = [ 0.0 ];
+		}
+		vertex ""
+			${VERTEX_DECLARATIONS}
+			varying mediump float FragVarying;  // Necessary to reproduce.
+			void main()
+			{
+				// Crashes with mat4 as well. Does not crash with vectors.
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				// Testing a non-constant variable is necessary.
+				if (in0 == 1.0)
+				{
+					// Using the matrix variable appears necessary.
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				// Referencing the matrix is necessary though clearly the compiler
+				// doesn't realize the assignment is useless.
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			precision mediump float;
+			varying float FragVarying;
+			void main()
+			{
+				gl_FragColor = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
+	case uniform_conditional_assignment_to_matrix
+		values
+		{
+			uniform float uni0 = [ 0.0 ];
+		}
+		vertex ""
+			${VERTEX_DECLARATIONS}
+			varying mediump float FragVarying;  // Necessary to reproduce.
+			void main()
+			{
+				// Crashes with mat4 as well. Does not crash with vectors.
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				// Testing a non-constant variable is necessary.
+				if (uni0 == 1.0)
+				{
+					// Using the matrix variable appears necessary.
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				// Referencing the matrix is necessary though clearly the compiler
+				// doesn't realize the assignment is useless.
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			precision mediump float;
+			varying float FragVarying;
+			void main()
+			{
+				gl_FragColor = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
 end # if
 
 group invalid_if "Invalid If Conditionals"
diff --git a/data/gles2/shaders/functions.test b/data/gles2/shaders/functions.test
index 6469b98..2890995 100644
--- a/data/gles2/shaders/functions.test
+++ b/data/gles2/shaders/functions.test
@@ -547,6 +547,32 @@
 		""
 	end
 
+	case struct_constructor_highp_in_fragment
+		desc "passing highp vector to struct constructor in fragment shader yields all zeros"
+		vertex ""
+			${VERTEX_DECLARATIONS}
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#ifdef GL_FRAGMENT_PRECISION_HIGH
+			#define PRECISION highp
+			#else
+			#define PRECISION mediump
+			#endif
+			struct Test {
+				PRECISION vec3 color;
+			} ;
+			void main() {
+				PRECISION vec3 color = vec3(0.2, 2.0, 0.1);
+				Test test = Test(color);
+				// Bias the color so all components are guaranteed > 1.0.
+				gl_FragColor = vec4(vec3(0.25, 0.55, 0.65) + vec3(4.0, 0.25, 4.0) * test.color, 1.0);
+			}
+		""
+	end
 
 end # datatypes
 
diff --git a/data/gles2/shaders/loops.test b/data/gles2/shaders/loops.test
new file mode 100644
index 0000000..06b7628
--- /dev/null
+++ b/data/gles2/shaders/loops.test
@@ -0,0 +1,26 @@
+group custom "Custom loop tests"
+
+	case continue_in_fragment_for_loop
+		vertex ""
+			${VERTEX_DECLARATIONS}
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				int count1 = 0;
+				for(int i=0;i<4;i++)
+				{
+					if (count1 == 2)
+						continue;
+				}
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+end # custom
diff --git a/data/gles3/shaders/conditionals.test b/data/gles3/shaders/conditionals.test
index a3a5b47..4483bae 100644
--- a/data/gles3/shaders/conditionals.test
+++ b/data/gles3/shaders/conditionals.test
@@ -220,6 +220,116 @@
 		""
 	end
 
+	case constant_conditional_assignment_to_matrix
+		version 300 es
+		vertex ""
+			#version 300 es
+                        // This variant doesn't provoke the crash seen in the versions below.
+			${VERTEX_DECLARATIONS}
+			out mediump float FragVarying;
+			const float in0 = 0.0;
+			void main()
+			{
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				if (in0 == 1.0)
+				{
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			#version 300 es
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float FragVarying;
+			void main()
+			{
+				${FRAG_COLOR} = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
+	case input_conditional_assignment_to_matrix
+		version 300 es
+		values
+		{
+			input float in0 = [ 0.0 ];
+		}
+		vertex ""
+			#version 300 es
+			${VERTEX_DECLARATIONS}
+			out mediump float FragVarying;  // Necessary to reproduce.
+			void main()
+			{
+				// Crashes with mat4 as well. Does not crash with vectors.
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				// Testing a non-constant variable is necessary.
+				if (in0 == 1.0)
+				{
+					// Using the matrix variable appears necessary.
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				// Referencing the matrix is necessary though clearly the compiler
+				// doesn't realize the assignment is useless.
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			#version 300 es
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float FragVarying;
+			void main()
+			{
+				${FRAG_COLOR} = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
+	case uniform_conditional_assignment_to_matrix
+		version 300 es
+		values
+		{
+			uniform float uni0 = [ 0.0 ];
+		}
+		vertex ""
+			#version 300 es
+			${VERTEX_DECLARATIONS}
+			out mediump float FragVarying;  // Necessary to reproduce.
+			void main()
+			{
+				// Crashes with mat4 as well. Does not crash with vectors.
+				mat2 projectionMatrix = mat2(0.0, 0.0, 0.0, 0.0);
+				// Testing a non-constant variable is necessary.
+				if (uni0 == 1.0)
+				{
+					// Using the matrix variable appears necessary.
+					projectionMatrix[0][0] = 1.0;
+				}
+
+				FragVarying = 1.0;
+				// Referencing the matrix is necessary though clearly the compiler
+				// doesn't realize the assignment is useless.
+				gl_Position = dEQP_Position + vec4(projectionMatrix[1][0], 0.0, 0.0, 0.0);
+			}
+		""
+		fragment ""
+			#version 300 es
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float FragVarying;
+			void main()
+			{
+				${FRAG_COLOR} = vec4(FragVarying, 1.0, 1.0, 1.0);
+			}
+		""
+	end
+
 end # if
 
 group invalid_if "Invalid If Conditionals"
diff --git a/data/gles3/shaders/functions.test b/data/gles3/shaders/functions.test
index 767a035..89b9c57 100644
--- a/data/gles3/shaders/functions.test
+++ b/data/gles3/shaders/functions.test
@@ -869,6 +869,37 @@
 		""
 	end
 
+	case struct_constructor_highp_in_fragment
+		version 300 es
+		desc "passing highp vector to struct constructor in fragment shader yields all zeros"
+		vertex ""
+			#version 300 es
+			${VERTEX_DECLARATIONS}
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 300 es
+			${FRAGMENT_DECLARATIONS}
+			#ifdef GL_FRAGMENT_PRECISION_HIGH
+			#define PRECISION highp
+			#else
+			#define PRECISION mediump
+			#endif
+			struct Test {
+				PRECISION vec3 color;
+			} ;
+			void main() {
+				PRECISION vec3 color = vec3(0.2, 2.0, 0.1);
+				Test test = Test(color);
+				// Bias the color so all components are guaranteed > 1.0.
+				${FRAG_COLOR} = vec4(vec3(0.25, 0.55, 0.65) + vec3(4.0, 0.25, 4.0) * test.color, 1.0);
+			}
+		""
+	end
+
 
 end # datatypes
 
diff --git a/data/gles3/shaders/linkage.test b/data/gles3/shaders/linkage.test
index 56acda6..5becf60 100644
--- a/data/gles3/shaders/linkage.test
+++ b/data/gles3/shaders/linkage.test
@@ -3607,43 +3607,6 @@
 	end
 
 	group block "Uniform blocks"
-		case differing_precision
-			version 300 es
-			expect build_successful
-			vertex ""
-				#version 300 es
-
-				uniform Block
-				{
-					highp vec4 val;
-				};
-
-				${VERTEX_DECLARATIONS}
-				out mediump float res;
-				void main()
-				{
-					res = val.x;
-					${VERTEX_OUTPUT}
-				}
-			""
-			fragment ""
-				#version 300 es
-
-				uniform Block
-				{
-					mediump vec4 val;
-				};
-
-				precision mediump float;
-				${FRAGMENT_DECLARATIONS}
-				in mediump float res;
-				void main()
-				{
-					dEQP_FragColor = val;
-				}
-			""
-		end
-
 		case type_mismatch
 			version 300 es
 			expect link_fail
diff --git a/data/gles3/shaders/loops.test b/data/gles3/shaders/loops.test
new file mode 100644
index 0000000..a8f1e4d
--- /dev/null
+++ b/data/gles3/shaders/loops.test
@@ -0,0 +1,29 @@
+group custom "Custom loop tests"
+
+	case continue_in_fragment_for_loop
+		version 300 es
+		vertex ""
+			#version 300 es
+			${VERTEX_DECLARATIONS}
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 300 es
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				int count1 = 0;
+				for(int i=0;i<4;i++)
+				{
+					if (count1 == 2)
+						continue;
+				}
+				${FRAG_COLOR} = vec4(1.0);
+			}
+		""
+	end
+
+end # custom
diff --git a/data/gles31/shaders/linkage_uniform.test b/data/gles31/shaders/linkage_uniform.test
new file mode 100644
index 0000000..2b5ca31
--- /dev/null
+++ b/data/gles31/shaders/linkage_uniform.test
@@ -0,0 +1,1562 @@
+group basic "Default block uniforms of scalar and vector types"
+	case precision_conflict_1
+		version 310 es
+		desc "Vertex side uniform has highp, fragment side uniform mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			uniform highp float u_val;
+			out mediump float res;
+			void main()
+			{
+				res = u_val;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			uniform float u_val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = u_val + res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+	case precision_conflict_2
+		version 310 es
+		desc "Vertex side uniform has highp, fragment side uniform mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			uniform highp float u_val;
+			out mediump float res;
+			void main()
+			{
+				res = u_val;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision highp float;
+			uniform mediump float u_val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = u_val + res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+	case precision_conflict_3
+		version 310 es
+		desc "Vertex side uniform has lowp, fragment side uniform highp."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			uniform lowp int u_val;
+			out mediump float res;
+			void main()
+			{
+				res = float(u_val);
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision highp float;
+			uniform highp int u_val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = float(u_val) + res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+	case precision_conflict_4
+		version 310 es
+		desc "Vertex side uniform has lowp, fragment side uniform mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			uniform lowp vec3 u_val;
+			out mediump float res;
+			void main()
+			{
+				res = u_val.y;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision highp float;
+			uniform mediump vec3 u_val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = u_val.z + res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+end
+
+group struct "Uniform structs"
+	# Struct linkage handling
+	case basic
+		version 310 es
+		desc "Same uniform struct in both shaders"
+		values {
+			uniform float val.a = 1.0;
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			out mediump float dummy;
+			void main()
+			{
+				dummy = val.a + val.b;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			in mediump float dummy;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				out0 = val.b + val.a;
+				out0 = out0 + dummy;
+				out0 = out0 - dummy;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vertex_only
+		version 310 es
+		desc "Uniform struct declared in both, used only in vertex."
+		values {
+			uniform float val.a = 1.0;
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a + val.b;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			in mediump float res;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				out0 = res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case fragment_only
+		version 310 es
+		desc "Uniform struct declared in both, used only in fragment."
+		values {
+			uniform float val.a = 1.0;
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				out0 = val.a + val.b;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both."
+		values {
+			uniform float val.a = 1.0;
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = res + val.b;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vec4
+		version 310 es
+		desc "Same uniform struct in both shaders. Datatype vec4"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec4 val.b = vec4(1.0, 2.0, 3.0, 4.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			out mediump float dummy;
+			void main()
+			{
+				dummy = val.a.x + val.b.y;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			in mediump float dummy;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{
+				out0 = val.b.y + val.a.x;
+				out0 = out0 + dummy;
+				out0 = out0 - dummy;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vertex_only_vec4
+		version 310 es
+		desc "Uniform struct declared in both, used only in vertex. Datatype vec4	"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec4 val.b = vec4(1.0, 2.0, 3.0, 4.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x + val.b.y;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			in mediump float res;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case fragment_only_vec4
+		version 310 es
+		desc "Uniform struct declared in both, used only in fragment. Datatype vec4"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec4 val.b = vec4(1.0, 2.0, 3.0, 4.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = val.a.x + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec4
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec4"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec4 val.b = vec4(1.0, 2.0, 3.0, 4.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec4 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vec4_vec3
+		version 310 es
+		desc "Same uniform struct in both shaders. Datatype vec4 and vec3"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec3 val.b = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			out mediump float dummy;
+			void main()
+			{
+				dummy = val.a.x + val.b.y;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			in mediump float dummy;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = val.b.y + val.a.x;
+				out0 = out0 + dummy;
+				out0 = out0 - dummy;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vertex_only_vec4_vec3
+		version 310 es
+		desc "Uniform struct declared in both, used only in vertex. Datatype vec4 and vec3"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec3 val.b = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x + val.b.y;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			in mediump float res;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case fragment_only_vec4_vec3
+		version 310 es
+		desc "Uniform struct declared in both, used only in fragment. Datatype vec4 and vec3"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec3 val.b = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = val.a.x + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec4_vec3
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec4 and vec3"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec3 val.b = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump vec3 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vec4_float
+		version 310 es
+		desc "Same uniform struct in both shaders. Datatype vec4 and float"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			out mediump float dummy;
+			void main()
+			{
+				dummy = val.a.x + val.b;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			in mediump float dummy;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = val.b + val.a.x;
+				out0 = out0 + dummy;
+				out0 = out0 - dummy;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case vertex_only_vec4_float
+		version 310 es
+		desc "Uniform struct declared in both, used only in vertex. Datatype vec4 and float"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x + val.b;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			in mediump float res;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = res;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case fragment_only_vec4_float
+		version 310 es
+		desc "Uniform struct declared in both, used only in fragment. Datatype vec4 and float"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			void main()
+			{
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			void main()
+			{			out0 = val.a.x + val.b;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec4_float
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec4 and float"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform float val.b = 2.0;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a; mediump float b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec4_struct
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec4 and struct with vec4"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec4 val.b.c = vec4(1.0, 2.0, 3.0, 4.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Inner {mediump vec4 c;};
+			struct Struct {mediump vec4 a; Inner b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Inner {mediump vec4 c;};
+			struct Struct {mediump vec4 a; Inner b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.c.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec4_vec3_struct
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec4 and struct with vec3"
+		values {
+			uniform vec4 val.a = vec4(1.0, 2.0, 3.0, 4.0);
+			uniform vec3 val.b.c = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Inner {mediump vec3 c;};
+			struct Struct {mediump vec4 a; Inner b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Inner {mediump vec3 c;};
+			struct Struct {mediump vec4 a; Inner b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.c.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec2_vec3
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec2 and vec3"
+		values {
+			uniform vec2 val.a = vec2(1.0, 2.0);
+			uniform vec3 val.b = vec3(1.0, 2.0, 3.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec2 a; mediump vec3 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec2 a; mediump vec3 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_vec2_int
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype vec2 and int"
+		values {
+			uniform vec2 val.a = vec2(1.0, 2.0);
+			uniform int val.b = 2;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec2 a; mediump int b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec2 a; mediump int b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + float(val.b);
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_int_float
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype int and float"
+		values {
+			uniform float val.a = 1.0;
+			uniform int val.b = 2;
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump int b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a; mediump int b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + float(val.b);
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_bvec2_vec2
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype bvec2 and vec2"
+		values {
+			uniform bvec2 val.a = bvec2(true, true);
+			uniform vec2 val.b = vec2(1.0, 2.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {bvec2 a; mediump vec2 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = float(val.a.x);
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {bvec2 a; mediump vec2 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_ivec2_vec2
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype ivec2 and vec2"
+		values {
+			uniform ivec2 val.a = ivec2(1, 2);
+			uniform vec2 val.b = vec2(1.0, 2.0);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump ivec2 a; mediump vec2 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = vec2(val.a).x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump ivec2 a; mediump vec2 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.b.y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case partial_ivec2_ivec2
+		version 310 es
+		desc "Uniform struct declared in both, used partially in both. Datatype ivec2 and ivec2"
+		values {
+			uniform ivec2 val.a = ivec2(1, 2);
+			uniform ivec2 val.b = ivec2(1, 2);
+			output float out0 = 3.0;
+		}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump ivec2 a; mediump ivec2 b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = vec2(val.a).x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump ivec2 a; mediump ivec2 b;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + vec2(val.b).y;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case type_conflict_1
+		version 310 es
+		desc "Fragment struct has one less member than fragment version"
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a; mediump float b;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = res + val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case type_conflict_2
+		version 310 es
+		desc "Vertex struct has int, fragment struct has float."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump int a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = float(val.a);
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case type_conflict_3
+		version 310 es
+		desc "Vertex struct has vec3, fragment struct has vec4."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump vec3 a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = float(val.a.x);
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump vec4 a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a.x;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case precision_conflict_1
+		version 310 es
+		desc "Vertex side struct has highp, fragment side struct mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {highp float a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case precision_conflict_2
+		version 310 es
+		desc "Vertex side struct has mediump, fragment side struct lowp."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {mediump float a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {lowp float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case precision_conflict_3
+		version 310 es
+		desc "Vertex side struct has lowp, fragment side struct mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {lowp float a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {mediump float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case precision_conflict_4
+		version 310 es
+		desc "Vertex side struct has lowp, fragment side struct implicit mediump."
+		expect link_fail
+		values {output float out0 = 3.0;}
+		vertex ""
+			#version 310 es
+			${VERTEX_DECLARATIONS}
+			struct Struct {lowp float a;};
+			uniform Struct val;
+			out mediump float res;
+			void main()
+			{
+				res = val.a;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Struct {float a;};
+			uniform Struct val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{			out0 = val.a;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case light_struct_highp
+		version 310 es
+		desc "Complex Light struct from use case tests."
+		values {
+			uniform float val.constantAttenuation = 1.0;
+			uniform float val.quadraticAttenuation = 1.0;
+			output float out0 = 2.0;
+		}
+		vertex ""
+			#version 310 es
+			struct Light
+			{
+				mediump vec3	color;
+				highp vec4		position;
+				highp vec3		direction;
+				mediump float	constantAttenuation;
+				mediump float	linearAttenuation;
+				mediump float	quadraticAttenuation;
+			};
+			${VERTEX_DECLARATIONS}
+			uniform Light val;
+			out mediump float res;
+			void main()
+			{
+				res = val.constantAttenuation;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Light
+			{
+				mediump vec3	color;
+				highp vec4		position;
+				highp vec3		direction;
+				mediump float	constantAttenuation;
+				mediump float	linearAttenuation;
+				mediump float	quadraticAttenuation;
+			};
+			struct Struct {float a;};
+			uniform Light val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = res + val.quadraticAttenuation;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+
+	case light_struct_mediump
+		version 310 es
+		desc "Complex Light struct from use case tests, without highp usage"
+		values {
+			uniform float val.constantAttenuation = 1.0;
+			uniform float val.quadraticAttenuation = 1.0;
+			output float out0 = 2.0;
+		}
+		vertex ""
+			#version 310 es
+			struct Light
+			{
+				mediump vec3	color;
+				mediump vec4	position;
+				mediump vec3	direction;
+				mediump float	constantAttenuation;
+				mediump float	linearAttenuation;
+				mediump float	quadraticAttenuation;
+			};
+			${VERTEX_DECLARATIONS}
+			uniform Light val;
+			out mediump float res;
+			void main()
+			{
+				res = val.constantAttenuation;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+			precision mediump float;
+			struct Light
+			{
+				mediump vec3	color;
+				mediump vec4	position;
+				mediump vec3	direction;
+				mediump float	constantAttenuation;
+				mediump float	linearAttenuation;
+				mediump float	quadraticAttenuation;
+			};
+			struct Struct {float a;};
+			uniform Light val;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				out0 = res + val.quadraticAttenuation;
+				${FRAGMENT_OUTPUT}
+			}
+		""
+	end
+end
+
+group block "Uniform blocks"
+	case differing_precision
+		version 310 es
+		expect build_successful
+		vertex ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			uniform Block
+			{
+				mediump vec4 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = val;
+			}
+		""
+	end
+
+	case type_mismatch
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec3 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val, 1.0);
+			}
+		""
+	end
+
+	case members_mismatch
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec4 val;
+				lowp uint u;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val);
+			}
+		""
+	end
+
+	case layout_qualifier_mismatch_1
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			layout(std140) uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			uniform Block
+			{
+				highp vec4 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val);
+			}
+		""
+	end
+
+	case layout_qualifier_mismatch_2
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			layout(shared) uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			layout(packed) uniform Block
+			{
+				highp vec4 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val);
+			}
+		""
+	end
+
+	case layout_qualifier_mismatch_3
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			layout(row_major) uniform Block
+			{
+				highp vec4 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val.x;
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			layout(column_major) uniform Block
+			{
+				highp vec4 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val);
+			}
+		""
+	end
+
+	case layout_qualifier_mismatch_4
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			layout(row_major) uniform Block
+			{
+				highp mat3 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val[0][1];
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			layout(column_major) uniform Block
+			{
+				highp mat3 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val[2], 1.0);
+			}
+		""
+	end
+
+	case layout_qualifier_mismatch_5
+		version 310 es
+		expect link_fail
+		vertex ""
+			#version 310 es
+
+			uniform Block
+			{
+				layout(row_major) uniform highp mat3 val;
+			};
+
+			${VERTEX_DECLARATIONS}
+			out mediump float res;
+			void main()
+			{
+				res = val[0][1];
+				${VERTEX_OUTPUT}
+			}
+		""
+		fragment ""
+			#version 310 es
+
+			uniform Block
+			{
+				layout(column_major) uniform highp mat3 val;
+			};
+
+			precision mediump float;
+			${FRAGMENT_DECLARATIONS}
+			in mediump float res;
+			void main()
+			{
+				dEQP_FragColor = vec4(val[2], 1.0);
+			}
+		""
+	end
+end
diff --git a/doc/testspecs/VK/.gitignore b/doc/testspecs/VK/.gitignore
new file mode 100644
index 0000000..2d19fc7
--- /dev/null
+++ b/doc/testspecs/VK/.gitignore
@@ -0,0 +1 @@
+*.html
diff --git a/doc/testspecs/VK/apitests-docinfo.html b/doc/testspecs/VK/apitests-docinfo.html
new file mode 100644
index 0000000..69b8c61
--- /dev/null
+++ b/doc/testspecs/VK/apitests-docinfo.html
@@ -0,0 +1,23 @@
+<style type="text/css">
+
+code,div.listingblock {
+	max-width: 68em;
+}
+
+p {
+    max-width: 50em;
+}
+
+table {
+    max-width: 50em;
+}
+
+table.tableblock {
+  border-width: 1px;
+}
+
+h2 {
+    max-width: 35em;
+}
+
+</style>
diff --git a/doc/testspecs/VK/apitests.adoc b/doc/testspecs/VK/apitests.adoc
new file mode 100644
index 0000000..31e308e
--- /dev/null
+++ b/doc/testspecs/VK/apitests.adoc
@@ -0,0 +1,2887 @@
+// asciidoc -b html5 -d book -f apitests.conf apitests.adoc
+
+:toc:
+:numbered:
+:docinfo:
+:revnumber: 4
+
+Vulkan API Test Plan
+====================
+
+NOTE: Document currently targets API revision 0.138.0
+
+This document currently outlines Vulkan API testing plan. The document splits API into features, and for each the important testing objectives are described. The technical implementation is not currently planned or documented here, except in select cases.
+
+In the future this document will likely evolve into a description of various tests and test coverage.
+
+Test framework
+--------------
+
+Test framework will provide tests access to Vulkan platform interface. In addition a library of generic utilties will be provided.
+
+Test case base class
+~~~~~~~~~~~~~~~~~~~~
+
+Vulkan test cases will use a slightly different interface from traditional +tcu::TestCase+ to facilitate following:
+
+ * Ability to generate shaders in high-level language, and pre-compile them without running the tests
+ * Cleaner separation between test case parameters and execution instance
+
+[source,cpp]
+----
+class TestCase : public tcu::TestCase
+{
+public:
+                            TestCase        (tcu::TestContext& testCtx, const std::string& name, const std::string& description);
+                            TestCase        (tcu::TestContext& testCtx, tcu::TestNodeType type, const std::string& name, const std::string& description);
+    virtual                 ~TestCase       (void) {}
+
+    virtual void            initPrograms    (vk::ProgramCollection<glu::ProgramSources>& programCollection) const;
+    virtual TestInstance*   createInstance  (Context& context) const = 0;
+
+    IterateResult           iterate         (void) { DE_ASSERT(false); return STOP; } // Deprecated in this module
+};
+
+class TestInstance
+{
+public:
+                                TestInstance    (Context& context) : m_context(context) {}
+    virtual                     ~TestInstance   (void) {}
+
+    virtual tcu::TestStatus     iterate         (void) = 0;
+
+protected:
+    Context&                    m_context;
+};
+----
+
+In addition for simple tests a utility to wrap a function as a test case is provided:
+
+[source,cpp]
+----
+tcu::TestStatus createSamplerTest (Context& context)
+{
+    TestLog&                log         = context.getTestContext().getLog();
+    const DefaultDevice     device      (context.getPlatformInterface(), context.getTestContext().getCommandLine());
+    const VkDevice          vkDevice    = device.getDevice();
+    const DeviceInterface&  vk          = device.getInterface();
+
+    {
+        const struct VkSamplerCreateInfo        samplerInfo =
+        {
+            VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,  //  VkStructureType sType;
+            DE_NULL,                                //  const void*     pNext;
+            VK_TEX_FILTER_NEAREST,                  //  VkTexFilter     magFilter;
+            VK_TEX_FILTER_NEAREST,                  //  VkTexFilter     minFilter;
+            VK_TEX_MIPMAP_MODE_BASE,                //  VkTexMipmapMode mipMode;
+            VK_TEX_ADDRESS_CLAMP,                   //  VkTexAddress    addressU;
+            VK_TEX_ADDRESS_CLAMP,                   //  VkTexAddress    addressV;
+            VK_TEX_ADDRESS_CLAMP,                   //  VkTexAddress    addressW;
+            0.0f,                                   //  float           mipLodBias;
+            0u,                                     //  deUint32        maxAnisotropy;
+            VK_COMPARE_OP_ALWAYS,                   //  VkCompareOp     compareOp;
+            0.0f,                                   //  float           minLod;
+            0.0f,                                   //  float           maxLod;
+            VK_BORDER_COLOR_TRANSPARENT_BLACK,      //  VkBorderColor   borderColor;
+        };
+
+        Move<VkSamplerT>    tmpSampler  = createSampler(vk, vkDevice, &samplerInfo);
+    }
+
+    return tcu::TestStatus::pass("Creating sampler succeeded");
+}
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+    de::MovePtr<tcu::TestCaseGroup> apiTests    (new tcu::TestCaseGroup(testCtx, "api", "API Tests"));
+
+    addFunctionCase(apiTests.get(), "create_sampler",   "", createSamplerTest);
+
+    return apiTests.release();
+}
+----
+
++vkt::Context+, which is passed to +vkt::TestInstance+ will provide access to Vulkan platform interface, and a default device instance. Most test cases should use default device instance:
+
+ * Creating device can take up to tens of milliseconds
+ * --deqp-vk-device-id=N command line option can be used to change device
+ * Framework can force validation layers (--deqp-vk-layers=validation,...)
+
+Other considerations:
+
+ * Rather than using default header, deqp uses custom header & interface wrappers
+ ** See +vk::PlatformInterface+ and +vk::DeviceInterface+
+ ** Enables optional run-time dependency to Vulkan driver (required for Android, useful in general)
+ ** Various logging & other analysis facilities can be layered on top of that interface
+ * Expose validation state to tests to be able to test validation
+ * Extensions are opt-in, some tests will require certain extensions to work
+ ** --deqp-vk-extensions? enable all by default?
+ ** Probably good to be able to override extensions as well (verify that tests report correct results without extensions)
+
+Common utilities
+~~~~~~~~~~~~~~~~
+
+Test case independent Vulkan utilities will be provided in +vk+ namespace, and can be found under +framework/vulkan+. These include:
+
+ * +Unique<T>+ and +Move<T>+ wrappers for Vulkan API objects
+ * Creating all types of work with configurable parameters:
+ ** Workload "size" (not really comparable between types)
+ ** Consume & produce memory contents
+ *** Simple checksumming / other verification against reference data typically fine
+
+.TODO
+ * Document important utilities (vkRef.hpp for example).
+ * Document Vulkan platform port.
+
+Object management
+-----------------
+
+Object management tests verify that the driver is able to create and destroy objects of all types. The tests don't attempt to use the objects (unless necessary for testing object construction) as that is covered by feature-specific tests. For all object types the object management tests cover:
+
+ * Creating objects with a relevant set of parameters
+ ** Not exhaustive, guided by what might actually make driver to take different path
+ * Allocating multiple objects of same type
+ ** Reasonable limit depends on object type
+ * Creating objects from multiple threads concurrently (where possible)
+ * Freeing objects from multiple threads
+
+NOTE: tests for various +vkCreate*()+ functions are documented in feature-specific sections.
+
+Multithreaded scaling
+---------------------
+
+Vulkan API is free-threaded and suggests that many operations (such as constructing command buffers) will scale with number of app threads. Tests are needed for proving that such scalability actually exists, and there are no locks in important functionality preventing that.
+
+NOTE: Khronos CTS has not traditionally included any performance testing, and the tests may not be part of conformance criteria. The tests may however be useful for IHVs for driver optimization, and could be enforced by platform-specific conformance tests, such as Android CTS.
+
+Destructor functions
+~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+VkResult VKAPI vkDestroyInstance(
+    VkInstance                                  instance);
+
+VkResult VKAPI vkDestroyDevice(
+    VkDevice                                    device);
+
+VkResult VKAPI vkDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VkResult VKAPI vkDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore);
+
+VkResult VKAPI vkDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VkResult VKAPI vkDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool);
+
+VkResult VKAPI vkDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer);
+
+VkResult VKAPI vkDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView);
+
+VkResult VKAPI vkDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image);
+
+VkResult VKAPI vkDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView);
+
+VkResult VKAPI vkDestroyAttachmentView(
+    VkDevice                                    device,
+    VkAttachmentView                            attachmentView);
+
+VkResult VKAPI vkDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule);
+
+VkResult VKAPI vkDestroyShader(
+    VkDevice                                    device,
+    VkShader                                    shader);
+
+VkResult VKAPI vkDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache);
+
+VkResult VKAPI vkDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline);
+
+VkResult VKAPI vkDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout);
+
+VkResult VKAPI vkDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler);
+
+VkResult VKAPI vkDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout);
+
+VkResult VKAPI vkDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool);
+
+VkResult VKAPI vkDestroyDynamicViewportState(
+    VkDevice                                    device,
+    VkDynamicViewportState                      dynamicViewportState);
+
+VkResult VKAPI vkDestroyDynamicRasterState(
+    VkDevice                                    device,
+    VkDynamicRasterState                        dynamicRasterState);
+
+VkResult VKAPI vkDestroyDynamicColorBlendState(
+    VkDevice                                    device,
+    VkDynamicColorBlendState                    dynamicColorBlendState);
+
+VkResult VKAPI vkDestroyDynamicDepthStencilState(
+    VkDevice                                    device,
+    VkDynamicDepthStencilState                  dynamicDepthStencilState);
+
+VkResult VKAPI vkDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer);
+
+VkResult VKAPI vkDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass);
+
+VkResult VKAPI vkDestroyCommandPool(
+    VkDevice                                    device,
+    VkCmdPool                                   cmdPool);
+
+VkResult VKAPI vkDestroyCommandBuffer(
+    VkDevice                                    device,
+    VkCmdBuffer                                 commandBuffer);
+----
+
+API Queries
+-----------
+
+Objective of API query tests is to validate that various +vkGet*+ functions return correct values. Generic checks that apply to all query types are:
+
+ * Returned value size is equal or multiple of relevant struct size
+ * Query doesn't write outside the provided pointer
+ * Query values (where expected) don't change between subsequent queries
+ * Concurrent queries from multiple threads work
+
+Platform queries
+~~~~~~~~~~~~~~~~
+
+Platform query tests will validate that all queries work as expected and return sensible values.
+
+ * Sensible device properties
+ ** May have some Android-specific requirements
+ *** TBD queue 0 must be universal queue (all command types supported)
+ * All required functions present
+ ** Both platform (physicalDevice = 0) and device-specific
+ ** Culled based on enabled extension list?
+
+[source,c]
+----
+// Physical devices
+
+VkResult VKAPI vkEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VkResult VKAPI vkGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+// Properties & limits
+
+VkResult VKAPI vkGetPhysicalDeviceLimits(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceLimits*                     pLimits);
+
+typedef struct {
+    uint32_t                                    apiVersion;
+    uint32_t                                    driverVersion;
+    uint32_t                                    vendorId;
+    uint32_t                                    deviceId;
+    VkPhysicalDeviceType                        deviceType;
+    char                                        deviceName[VK_MAX_PHYSICAL_DEVICE_NAME];
+    uint8_t                                     pipelineCacheUUID[VK_UUID_LENGTH];
+} VkPhysicalDeviceProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+// Queue properties
+
+VkResult VKAPI vkGetPhysicalDeviceQueueCount(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCount);
+
+typedef enum {
+    VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+    VK_QUEUE_COMPUTE_BIT = 0x00000002,
+    VK_QUEUE_DMA_BIT = 0x00000004,
+    VK_QUEUE_SPARSE_MEMMGR_BIT = 0x00000008,
+    VK_QUEUE_EXTENDED_BIT = 0x40000000,
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
+
+typedef struct {
+    VkQueueFlags                                queueFlags;
+    uint32_t                                    queueCount;
+    VkBool32                                    supportsTimestamps;
+} VkPhysicalDeviceQueueProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceQueueProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    count,
+    VkPhysicalDeviceQueueProperties*            pQueueProperties);
+
+// Memory properties
+
+typedef enum {
+    VK_MEMORY_PROPERTY_DEVICE_ONLY = 0,
+    VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000001,
+    VK_MEMORY_PROPERTY_HOST_NON_COHERENT_BIT = 0x00000002,
+    VK_MEMORY_PROPERTY_HOST_UNCACHED_BIT = 0x00000004,
+    VK_MEMORY_PROPERTY_HOST_WRITE_COMBINED_BIT = 0x00000008,
+    VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010,
+} VkMemoryPropertyFlagBits;
+typedef VkFlags VkMemoryPropertyFlags;
+
+typedef enum {
+    VK_MEMORY_HEAP_HOST_LOCAL = 0x00000001,
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
+
+typedef struct {
+    VkMemoryPropertyFlags                       propertyFlags;
+    uint32_t                                    heapIndex;
+} VkMemoryType;
+
+typedef struct {
+    VkDeviceSize                                size;
+    VkMemoryHeapFlags                           flags;
+} VkMemoryHeap;
+
+typedef struct {
+    uint32_t                                    memoryTypeCount;
+    VkMemoryType                                memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t                                    memoryHeapCount;
+    VkMemoryHeap                                memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+// Proc address queries
+
+PFN_vkVoidFunction VKAPI vkGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+PFN_vkVoidFunction VKAPI vkGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+// Extension queries
+
+typedef struct {
+    char                                        extName[VK_MAX_EXTENSION_NAME];
+    uint32_t                                    specVersion;
+} VkExtensionProperties;
+
+VkResult VKAPI vkGetGlobalExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pCount,
+    VkExtensionProperties*                      pProperties);
+
+VkResult VKAPI vkGetPhysicalDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pCount,
+    VkExtensionProperties*                      pProperties);
+
+// Layer queries
+
+typedef struct {
+    char                                        layerName[VK_MAX_EXTENSION_NAME];
+    uint32_t                                    specVersion;
+    uint32_t                                    implVersion;
+    const char*                                 description[VK_MAX_DESCRIPTION];
+} VkLayerProperties;
+
+VkResult VKAPI vkGetGlobalLayerProperties(
+    uint32_t*                                   pCount,
+    VkLayerProperties*                          pProperties);
+
+VkResult VKAPI vkGetPhysicalDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCount,
+    VkLayerProperties*                          pProperties);
+----
+
+Device queries
+~~~~~~~~~~~~~~
+
+[source,c]
+----
+VkResult VKAPI vkGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VkResult VKAPI vkGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+----
+
+Object queries
+~~~~~~~~~~~~~~
+
+ * Memory requirements: verify that for buffers the returned size is at least the size of the buffer
+
+[source,c]
+----
+typedef struct {
+    VkDeviceSize                                size;
+    VkDeviceSize                                alignment;
+    uint32_t                                    memoryTypeBits;
+} VkMemoryRequirements;
+
+VkResult VKAPI vkGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VkResult VKAPI vkGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+----
+
+Format & image capabilities
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+typedef enum {
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
+    VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
+    VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
+    VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
+    VK_FORMAT_FEATURE_CONVERSION_BIT = 0x00000400,
+} VkFormatFeatureFlagBits;
+typedef VkFlags VkFormatFeatureFlags;
+
+typedef struct {
+    VkFormatFeatureFlags                        linearTilingFeatures;
+    VkFormatFeatureFlags                        optimalTilingFeatures;
+} VkFormatProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+typedef struct {
+    uint64_t                                    maxResourceSize;
+    uint32_t                                    maxSamples;
+} VkImageFormatProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageFormatProperties*                    pImageFormatProperties);
+----
+
+Memory management
+-----------------
+
+Memory management tests cover memory allocation, sub-allocation, access, and CPU and GPU cache control. Testing some areas such as cache control will require stress-testing memory accesses from CPU and various pipeline stages.
+
+Memory allocation
+~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkDeviceSize                                allocationSize;
+    uint32_t                                    memoryTypeIndex;
+} VkMemoryAllocInfo;
+
+VkResult VKAPI vkAllocMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocInfo*                    pAllocInfo,
+    VkDeviceMemory*                             pMem);
+
+VkResult VKAPI vkFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              mem);
+----
+
+ * Test combination of:
+ ** Various allocation sizes
+ ** All heaps
+ * Allocations that exceed total available memory size (expected to fail)
+ * Concurrent allocation and free from multiple threads
+ * Memory leak tests (may not work on platforms that overcommit)
+ ** Allocate memory until fails, free all and repeat
+ ** Total allocated memory size should remain stable over iterations
+ ** Allocate and free in random order
+
+.Spec issues
+
+What are the alignment guarantees for the returned memory allocation? Will it satisfy alignment requirements for all object types? If not, app needs to know the alignment, or alignment parameter needs to be added to +VkMemoryAllocInfo+.
+
+Minimum allocation size? If 1, presumably implementation has to round it up to next page size at least? Is there a query for that? What happens when accessing the added padding?
+
+Mapping memory and CPU access
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+VkResult VKAPI vkMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              mem,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VkResult VKAPI vkUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              mem);
+----
+
+ * Verify that mapping of all host-visible allocations succeed and accessing memory works
+ * Verify mapping of sub-ranges
+ * Access still works after un-mapping and re-mapping memory
+ * Attaching or detaching memory allocation from buffer/image doesn't affect mapped memory access or contents
+ ** Images: test with various formats, mip-levels etc.
+
+.Spec issues
+ * Man pages say vkMapMemory is thread-safe, but to what extent?
+ ** Mapping different VkDeviceMemory allocs concurrently?
+ ** Mapping different sub-ranges of same VkDeviceMemory?
+ ** Mapping overlapping sub-ranges of same VkDeviceMemory?
+ * Okay to re-map same or overlapping range? What pointers should be returned in that case?
+ * Can re-mapping same block return different virtual address?
+ * Alignment of returned CPU pointer?
+ ** Access using SIMD instructions can benefit from alignment
+
+CPU cache control
+~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkDeviceMemory                              mem;
+    VkDeviceSize                                offset;
+    VkDeviceSize                                size;
+} VkMappedMemoryRange;
+
+VkResult VKAPI vkFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memRangeCount,
+    const VkMappedMemoryRange*                  pMemRanges);
+
+VkResult VKAPI vkInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memRangeCount,
+    const VkMappedMemoryRange*                  pMemRanges);
+----
+
+ * TODO Semantics discussed at https://cvs.khronos.org/bugzilla/show_bug.cgi?id=13690
+ ** Invalidate relevant for HOST_NON_COHERENT_BIT, flushes CPU read caches
+ ** Flush flushes CPU write caches?
+ * Test behavior with all possible mem alloc types & various sizes
+ * Corner-cases:
+ ** Empty list
+ ** Empty ranges
+ ** Same range specified multiple times
+ ** Partial overlap between ranges
+
+.Spec issues
+ * Thread-safety? Okay to flush different ranges concurrently?
+
+GPU cache control
+~~~~~~~~~~~~~~~~~
+
+Validate that GPU caches are invalidated where instructed. This includes visibility of memory writes made by both CPU and GPU to both CPU and GPU pipeline stages.
+
+[source,c]
+----
+typedef enum {
+    VK_MEMORY_OUTPUT_HOST_WRITE_BIT = 0x00000001,
+    VK_MEMORY_OUTPUT_SHADER_WRITE_BIT = 0x00000002,
+    VK_MEMORY_OUTPUT_COLOR_ATTACHMENT_BIT = 0x00000004,
+    VK_MEMORY_OUTPUT_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000008,
+    VK_MEMORY_OUTPUT_TRANSFER_BIT = 0x00000010,
+} VkMemoryOutputFlagBits;
+typedef VkFlags VkMemoryOutputFlags;
+
+typedef enum {
+    VK_MEMORY_INPUT_HOST_READ_BIT = 0x00000001,
+    VK_MEMORY_INPUT_INDIRECT_COMMAND_BIT = 0x00000002,
+    VK_MEMORY_INPUT_INDEX_FETCH_BIT = 0x00000004,
+    VK_MEMORY_INPUT_VERTEX_ATTRIBUTE_FETCH_BIT = 0x00000008,
+    VK_MEMORY_INPUT_UNIFORM_READ_BIT = 0x00000010,
+    VK_MEMORY_INPUT_SHADER_READ_BIT = 0x00000020,
+    VK_MEMORY_INPUT_COLOR_ATTACHMENT_BIT = 0x00000040,
+    VK_MEMORY_INPUT_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000080,
+    VK_MEMORY_INPUT_INPUT_ATTACHMENT_BIT = 0x00000100,
+    VK_MEMORY_INPUT_TRANSFER_BIT = 0x00000200,
+} VkMemoryInputFlagBits;
+typedef VkFlags VkMemoryInputFlags;
+
+typedef enum {
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008,
+    VK_PIPELINE_STAGE_TESS_CONTROL_SHADER_BIT = 0x00000010,
+    VK_PIPELINE_STAGE_TESS_EVALUATION_SHADER_BIT = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000,
+    VK_PIPELINE_STAGE_TRANSITION_BIT = 0x00002000,
+    VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS = 0x000007FF,
+    VK_PIPELINE_STAGE_ALL_GPU_COMMANDS = 0x00003FFF,
+} VkPipelineStageFlagBits;
+typedef VkFlags VkPipelineStageFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkMemoryOutputFlags                         outputMask;
+    VkMemoryInputFlags                          inputMask;
+    uint32_t                                    srcQueueFamilyIndex;
+    uint32_t                                    destQueueFamilyIndex;
+    VkBuffer                                    buffer;
+    VkDeviceSize                                offset;
+    VkDeviceSize                                size;
+} VkBufferMemoryBarrier;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkMemoryOutputFlags                         outputMask;
+    VkMemoryInputFlags                          inputMask;
+    VkImageLayout                               oldLayout;
+    VkImageLayout                               newLayout;
+    uint32_t                                    srcQueueFamilyIndex;
+    uint32_t                                    destQueueFamilyIndex;
+    VkImage                                     image;
+    VkImageSubresourceRange                     subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkMemoryOutputFlags                         outputMask;
+    VkMemoryInputFlags                          inputMask;
+} VkMemoryBarrier;
+
+void VKAPI vkCmdPipelineBarrier(
+    VkCmdBuffer                                 cmdBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        destStageMask,
+    VkBool32                                    byRegion,
+    uint32_t                                    memBarrierCount,
+    const void* const*                          ppMemBarriers);
+
+// \note vkCmdWaitEvents includes memory barriers as well
+----
+
+ * Image layout transitions may need special care
+
+Binding memory to objects
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+VkResult VKAPI vkBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              mem,
+    VkDeviceSize                                memOffset);
+
+VkResult VKAPI vkBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              mem,
+    VkDeviceSize                                memOffset);
+----
+
+ * Buffers and images only
+ * Straightforward mapping where allocation size matches object size and memOffset = 0
+ * Sub-allocation of larger allocations
+ * Re-binding object to different memory allocation
+ * Binding multiple objects to same or partially overlapping memory ranges
+ ** Aliasing writable resources? Access granularity?
+ * Binding various (supported) types of memory allocations
+
+.Spec issues
+ * When binding multiple objects to same memory, will data in memory be visible for all objects?
+ ** Reinterpretation rules?
+ * Memory contents after re-binding memory to a different object?
+
+Sparse resources
+----------------
+
+Sparse memory resources are treated as separate feature from basic memory management. Details TBD still.
+
+[source,c]
+----
+typedef enum {
+    VK_SPARSE_MEMORY_BIND_REPLICATE_64KIB_BLOCK_BIT = 0x00000001,
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
+
+typedef struct {
+    VkDeviceSize                                offset;
+    VkDeviceSize                                memOffset;
+    VkDeviceMemory                              mem;
+    VkSparseMemoryBindFlags                     flags;
+} VkSparseMemoryBindInfo;
+
+VkResult VKAPI vkQueueBindSparseBufferMemory(
+    VkQueue                                     queue,
+    VkBuffer                                    buffer,
+    uint32_t                                    numBindings,
+    const VkSparseMemoryBindInfo*               pBindInfo);
+
+VkResult VKAPI vkQueueBindSparseImageOpaqueMemory(
+    VkQueue                                     queue,
+    VkImage                                     image,
+    uint32_t                                    numBindings,
+    const VkSparseMemoryBindInfo*               pBindInfo);
+
+// Non-opaque sparse images
+
+typedef enum {
+    VK_SPARSE_IMAGE_FMT_SINGLE_MIPTAIL_BIT = 0x00000001,
+    VK_SPARSE_IMAGE_FMT_ALIGNED_MIP_SIZE_BIT = 0x00000002,
+    VK_SPARSE_IMAGE_FMT_NONSTD_BLOCK_SIZE_BIT = 0x00000004,
+} VkSparseImageFormatFlagBits;
+typedef VkFlags VkSparseImageFormatFlags;
+
+typedef struct {
+    VkImageAspect                               aspect;
+    VkExtent3D                                  imageGranularity;
+    VkSparseImageFormatFlags                    flags;
+} VkSparseImageFormatProperties;
+
+VkResult VKAPI vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    uint32_t                                    samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pNumProperties,
+    VkSparseImageFormatProperties*              pProperties);
+
+typedef struct {
+    VkSparseImageFormatProperties               formatProps;
+    uint32_t                                    imageMipTailStartLOD;
+    VkDeviceSize                                imageMipTailSize;
+    VkDeviceSize                                imageMipTailOffset;
+    VkDeviceSize                                imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
+VkResult VKAPI vkGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pNumRequirements,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+typedef struct {
+    VkImageSubresource                          subresource;
+    VkOffset3D                                  offset;
+    VkExtent3D                                  extent;
+    VkDeviceSize                                memOffset;
+    VkDeviceMemory                              mem;
+    VkSparseMemoryBindFlags                     flags;
+} VkSparseImageMemoryBindInfo;
+
+VkResult VKAPI vkQueueBindSparseImageMemory(
+    VkQueue                                     queue,
+    VkImage                                     image,
+    uint32_t                                    numBindings,
+    const VkSparseImageMemoryBindInfo*          pBindInfo);
+----
+
+Binding model
+-------------
+
+The objective of the binding model tests is to verify:
+
+ * All valid descriptor sets can be created
+ * Accessing resources from shaders using various layouts
+ * Descriptor updates
+ * Descriptor set chaining
+ * Descriptor set limits
+
+As a necessary side effect, the tests will provide coverage for allocating and accessing all types of resources from all shader stages.
+
+Descriptor set functions
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+// DescriptorSetLayout
+
+typedef struct {
+    VkDescriptorType                            descriptorType;
+    uint32_t                                    arraySize;
+    VkShaderStageFlags                          stageFlags;
+    const VkSampler*                            pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    count;
+    const VkDescriptorSetLayoutBinding*         pBinding;
+} VkDescriptorSetLayoutCreateInfo;
+
+VkResult VKAPI vkCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+// DescriptorPool
+
+typedef struct {
+    VkDescriptorType                            type;
+    uint32_t                                    count;
+} VkDescriptorTypeCount;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    count;
+    const VkDescriptorTypeCount*                pTypeCount;
+} VkDescriptorPoolCreateInfo;
+
+VkResult VKAPI vkCreateDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPoolUsage                       poolUsage,
+    uint32_t                                    maxSets,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VkResult VKAPI vkResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool);
+
+// DescriptorSet
+
+typedef struct {
+    VkBufferView                                bufferView;
+    VkSampler                                   sampler;
+    VkImageView                                 imageView;
+    VkAttachmentView                            attachmentView;
+    VkImageLayout                               imageLayout;
+} VkDescriptorInfo;
+
+VkResult VKAPI vkAllocDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorSetUsage                        setUsage,
+    uint32_t                                    count,
+    const VkDescriptorSetLayout*                pSetLayouts,
+    VkDescriptorSet*                            pDescriptorSets,
+    uint32_t*                                   pCount);
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkDescriptorSet                             destSet;
+    uint32_t                                    destBinding;
+    uint32_t                                    destArrayElement;
+    uint32_t                                    count;
+    VkDescriptorType                            descriptorType;
+    const VkDescriptorInfo*                     pDescriptors;
+} VkWriteDescriptorSet;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkDescriptorSet                             srcSet;
+    uint32_t                                    srcBinding;
+    uint32_t                                    srcArrayElement;
+    VkDescriptorSet                             destSet;
+    uint32_t                                    destBinding;
+    uint32_t                                    destArrayElement;
+    uint32_t                                    count;
+} VkCopyDescriptorSet;
+
+VkResult VKAPI vkUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    writeCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    copyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+----
+
+Pipeline layout functions
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Pipeline layouts will be covered mostly by tests that use various layouts, but in addition some corner-case tests are needed:
+
+ * Creating empty layouts for shaders that don't use any resources
+ ** For example: vertex data generated with +gl_VertexID+ only
+
+[source,c]
+----
+typedef struct {
+    VkShaderStageFlags                          stageFlags;
+    uint32_t                                    start;
+    uint32_t                                    length;
+} VkPushConstantRange;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    descriptorSetCount;
+    const VkDescriptorSetLayout*                pSetLayouts;
+    uint32_t                                    pushConstantRangeCount;
+    const VkPushConstantRange*                  pPushConstantRanges;
+} VkPipelineLayoutCreateInfo;
+
+VkResult VKAPI vkCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    VkPipelineLayout*                           pPipelineLayout);
+----
+
+Multipass
+---------
+
+Multipass tests will verify:
+
+ * Various possible multipass data flow configurations
+ ** Target formats, number of targets, load, store, resolve, dependencies, ...
+ ** Exhaustive tests for selected dimensions
+ ** Randomized tests
+ * Interaction with other features
+ ** Blending
+ ** Tessellation, geometry shaders (esp. massive geometry expansion)
+ ** Barriers that may cause tiler flushes
+ ** Queries
+ * Large passes that may require tiler flushes
+
+[source,c]
+----
+// Framebuffer
+
+typedef struct {
+    VkAttachmentView                            view;
+    VkImageLayout                               layout;
+} VkAttachmentBindInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkRenderPass                                renderPass;
+    uint32_t                                    attachmentCount;
+    const VkAttachmentBindInfo*                 pAttachments;
+    uint32_t                                    width;
+    uint32_t                                    height;
+    uint32_t                                    layers;
+} VkFramebufferCreateInfo;
+
+VkResult VKAPI vkCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    VkFramebuffer*                              pFramebuffer);
+
+// RenderPass
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkFormat                                    format;
+    uint32_t                                    samples;
+    VkAttachmentLoadOp                          loadOp;
+    VkAttachmentStoreOp                         storeOp;
+    VkAttachmentLoadOp                          stencilLoadOp;
+    VkAttachmentStoreOp                         stencilStoreOp;
+    VkImageLayout                               initialLayout;
+    VkImageLayout                               finalLayout;
+} VkAttachmentDescription;
+
+typedef struct {
+    uint32_t                                    attachment;
+    VkImageLayout                               layout;
+} VkAttachmentReference;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineBindPoint                         pipelineBindPoint;
+    VkSubpassDescriptionFlags                   flags;
+    uint32_t                                    inputCount;
+    const VkAttachmentReference*                inputAttachments;
+    uint32_t                                    colorCount;
+    const VkAttachmentReference*                colorAttachments;
+    const VkAttachmentReference*                resolveAttachments;
+    VkAttachmentReference                       depthStencilAttachment;
+    uint32_t                                    preserveCount;
+    const VkAttachmentReference*                preserveAttachments;
+} VkSubpassDescription;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    srcSubpass;
+    uint32_t                                    destSubpass;
+    VkPipelineStageFlags                        srcStageMask;
+    VkPipelineStageFlags                        destStageMask;
+    VkMemoryOutputFlags                         outputMask;
+    VkMemoryInputFlags                          inputMask;
+    VkBool32                                    byRegion;
+} VkSubpassDependency;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    attachmentCount;
+    const VkAttachmentDescription*              pAttachments;
+    uint32_t                                    subpassCount;
+    const VkSubpassDescription*                 pSubpasses;
+    uint32_t                                    dependencyCount;
+    const VkSubpassDependency*                  pDependencies;
+} VkRenderPassCreateInfo;
+
+VkResult VKAPI vkCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    VkRenderPass*                               pRenderPass);
+
+VkResult VKAPI vkGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkRenderPass                                renderPass;
+    VkFramebuffer                               framebuffer;
+    VkRect2D                                    renderArea;
+    uint32_t                                    attachmentCount;
+    const VkClearValue*                         pAttachmentClearValues;
+} VkRenderPassBeginInfo;
+
+typedef enum {
+    VK_RENDER_PASS_CONTENTS_INLINE = 0,
+    VK_RENDER_PASS_CONTENTS_SECONDARY_CMD_BUFFERS = 1,
+    VK_RENDER_PASS_CONTENTS_BEGIN_RANGE = VK_RENDER_PASS_CONTENTS_INLINE,
+    VK_RENDER_PASS_CONTENTS_END_RANGE = VK_RENDER_PASS_CONTENTS_SECONDARY_CMD_BUFFERS,
+    VK_RENDER_PASS_CONTENTS_NUM = (VK_RENDER_PASS_CONTENTS_SECONDARY_CMD_BUFFERS - VK_RENDER_PASS_CONTENTS_INLINE + 1),
+    VK_RENDER_PASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderPassContents;
+
+void VKAPI vkCmdBeginRenderPass(
+    VkCmdBuffer                                 cmdBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkRenderPassContents                        contents);
+
+void VKAPI vkCmdNextSubpass(
+    VkCmdBuffer                                 cmdBuffer,
+    VkRenderPassContents                        contents);
+
+void VKAPI vkCmdEndRenderPass(
+    VkCmdBuffer                                 cmdBuffer);
+----
+
+Device initialization
+---------------------
+
+Device initialization tests verify that all reported devices can be created, with various possible configurations.
+
+[source,c]
+----
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    const char*                                 pAppName;
+    uint32_t                                    appVersion;
+    const char*                                 pEngineName;
+    uint32_t                                    engineVersion;
+    uint32_t                                    apiVersion;
+} VkApplicationInfo;
+
+typedef void* (VKAPI *PFN_vkAllocFunction)(
+    void*                           pUserData,
+    size_t                          size,
+    size_t                          alignment,
+    VkSystemAllocType               allocType);
+
+typedef void (VKAPI *PFN_vkFreeFunction)(
+    void*                           pUserData,
+    void*                           pMem);
+
+typedef struct {
+    void*                                       pUserData;
+    PFN_vkAllocFunction                         pfnAlloc;
+    PFN_vkFreeFunction                          pfnFree;
+} VkAllocCallbacks;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    const VkApplicationInfo*                    pAppInfo;
+    const VkAllocCallbacks*                     pAllocCb;
+    uint32_t                                    layerCount;
+    const char*const*                           ppEnabledLayerNames;
+    uint32_t                                    extensionCount;
+    const char*const*                           ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+VkResult VKAPI vkCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    VkInstance*                                 pInstance);
+----
+
+ - +VkApplicationInfo+ parameters
+   * Arbitrary +pAppName+ / +pEngineName+ (spaces, utf-8, ...)
+   * +pAppName+ / +pEngineName+ = NULL?
+   * +appVersion+ / +engineVersion+ for 0, ~0, couple of values
+   * Valid +apiVersion+
+   * Invalid +apiVersion+ (expected to fail?)
+ - +VkAllocCallbacks+
+   * Want to be able to run all tests with and without callbacks?
+   ** See discussion about default device in framework section
+   * Custom allocators that provide guardbands and check them at free
+   * Override malloc / free and verify that driver doesn't call if callbacks provided
+   ** As part of object mgmt tests
+   * Must be inherited to all devices created from instance
+ - +VkInstanceCreateInfo+
+   * Empty extension list
+   * Unsupported extensions (expect VK_UNSUPPORTED)
+   * Various combinations of supported extensions
+   ** Any dependencies between extensions (enabling Y requires enabling X)?
+
+.Spec issues
+ * Only VkPhysicalDevice is passed to vkCreateDevice, ICD-specific magic needed for passing callbacks down to VkDevice instance
+
+[source,c]
+----
+typedef struct {
+    VkBool32                                    robustBufferAccess;
+    VkBool32                                    fullDrawIndexUint32;
+    VkBool32                                    imageCubeArray;
+    VkBool32                                    independentBlend;
+    VkBool32                                    geometryShader;
+    VkBool32                                    tessellationShader;
+    VkBool32                                    sampleRateShading;
+    VkBool32                                    dualSourceBlend;
+    VkBool32                                    logicOp;
+    VkBool32                                    instancedDrawIndirect;
+    VkBool32                                    depthClip;
+    VkBool32                                    depthBiasClamp;
+    VkBool32                                    fillModeNonSolid;
+    VkBool32                                    depthBounds;
+    VkBool32                                    wideLines;
+    VkBool32                                    largePoints;
+    VkBool32                                    textureCompressionETC2;
+    VkBool32                                    textureCompressionASTC_LDR;
+    VkBool32                                    textureCompressionBC;
+    VkBool32                                    pipelineStatisticsQuery;
+    VkBool32                                    vertexSideEffects;
+    VkBool32                                    tessellationSideEffects;
+    VkBool32                                    geometrySideEffects;
+    VkBool32                                    fragmentSideEffects;
+    VkBool32                                    shaderTessellationPointSize;
+    VkBool32                                    shaderGeometryPointSize;
+    VkBool32                                    shaderTextureGatherExtended;
+    VkBool32                                    shaderStorageImageExtendedFormats;
+    VkBool32                                    shaderStorageImageMultisample;
+    VkBool32                                    shaderStorageBufferArrayConstantIndexing;
+    VkBool32                                    shaderStorageImageArrayConstantIndexing;
+    VkBool32                                    shaderUniformBufferArrayDynamicIndexing;
+    VkBool32                                    shaderSampledImageArrayDynamicIndexing;
+    VkBool32                                    shaderStorageBufferArrayDynamicIndexing;
+    VkBool32                                    shaderStorageImageArrayDynamicIndexing;
+    VkBool32                                    shaderClipDistance;
+    VkBool32                                    shaderCullDistance;
+    VkBool32                                    shaderFloat64;
+    VkBool32                                    shaderInt64;
+    VkBool32                                    shaderFloat16;
+    VkBool32                                    shaderInt16;
+    VkBool32                                    shaderResourceResidency;
+    VkBool32                                    shaderResourceMinLOD;
+    VkBool32                                    sparse;
+    VkBool32                                    sparseResidencyBuffer;
+    VkBool32                                    sparseResidencyImage2D;
+    VkBool32                                    sparseResidencyImage3D;
+    VkBool32                                    sparseResidency2Samples;
+    VkBool32                                    sparseResidency4Samples;
+    VkBool32                                    sparseResidency8Samples;
+    VkBool32                                    sparseResidency16Samples;
+    VkBool32                                    sparseResidencyStandard2DBlockShape;
+    VkBool32                                    sparseResidencyStandard2DMSBlockShape;
+    VkBool32                                    sparseResidencyStandard3DBlockShape;
+    VkBool32                                    sparseResidencyAlignedMipSize;
+    VkBool32                                    sparseResidencyNonResident;
+    VkBool32                                    sparseResidencyNonResidentStrict;
+    VkBool32                                    sparseResidencyAliased;
+} VkPhysicalDeviceFeatures;
+
+typedef struct {
+    uint32_t                                    queueFamilyIndex;
+    uint32_t                                    queueCount;
+} VkDeviceQueueCreateInfo;
+
+typedef enum {
+    VK_DEVICE_CREATE_VALIDATION_BIT = 0x00000001,
+} VkDeviceCreateFlagBits;
+typedef VkFlags VkDeviceCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    queueRecordCount;
+    const VkDeviceQueueCreateInfo*              pRequestedQueues;
+    uint32_t                                    layerCount;
+    const char*const*                           ppEnabledLayerNames;
+    uint32_t                                    extensionCount;
+    const char*const*                           ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures*             pEnabledFeatures;
+    VkDeviceCreateFlags                         flags;
+} VkDeviceCreateInfo;
+
+VkResult VKAPI vkCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    VkDevice*                                   pDevice);
+----
+
+ * Creating multiple devices from single physical device
+ * Different queue configurations
+ ** Combinations of supported node indexes
+ ** Use of all queues simultaneously for various operations
+ ** Various queue counts
+ * Various extension combinations
+ * Flags
+ ** Enabling validation (see spec issues)
+ ** VK_DEVICE_CREATE_MULTI_DEVICE_IQ_MATCH_BIT not relevant for Android
+
+.Spec issues
+ * Can same queue node index used multiple times in +pRequestedQueues+ list?
+ * VK_DEVICE_CREATE_VALIDATION_BIT vs. layers
+
+Queue functions
+---------------
+
+Queue functions (one currently) will have a lot of indicental coverage from other tests, so only targeted corner-case tests are needed:
+
+ * +cmdBufferCount+ = 0
+ * Submitting empty VkCmdBuffer
+
+[source,c]
+----
+VkResult VKAPI vkQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    cmdBufferCount,
+    const VkCmdBuffer*                          pCmdBuffers,
+    VkFence                                     fence);
+----
+
+.Spec issues
+ * Can +fence+ be +NULL+ if app doesn't need it?
+
+Synchronization
+---------------
+
+Synchronization tests will verify that all execution ordering primitives provided by the API will function as expected. Testing scheduling and synchronization robustness will require generating non-trivial workloads and possibly randomization to reveal potential issues.
+
+[source,c]
+----
+VkResult VKAPI vkQueueWaitIdle(
+    VkQueue                                     queue);
+
+VkResult VKAPI vkDeviceWaitIdle(
+    VkDevice                                    device);
+----
+
+ * Verify that all sync objects signaled after *WaitIdle() returns
+ ** Fences (vkGetFenceStatus)
+ ** Events (vkEventGetStatus)
+ ** No way to query semaphore status?
+ * Threads blocking at vkWaitForFences() must be resumed
+ * Various amounts of work queued (from nothing to large command buffers)
+ * vkDeviceWaitIdle() concurrently with commands that submit more work
+ * all types of work
+
+Fences
+~~~~~~
+
+[source,c]
+----
+typedef enum {
+    VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
+} VkFenceCreateFlagBits;
+typedef VkFlags VkFenceCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkFenceCreateFlags                          flags;
+} VkFenceCreateInfo;
+
+VkResult VKAPI vkCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    VkFence*                                    pFence);
+
+VkResult VKAPI vkResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VkResult VKAPI vkGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VkResult VKAPI vkWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+----
+
+ * Basic waiting on fences
+ ** All types of commands
+ ** Waiting on a different thread than the thread that submitted the work
+ * Reusing fences (vkResetFences)
+ * Waiting on a fence / querying status of a fence before it has been submitted to be signaled
+ * Waiting on a fence / querying status of a fence has just been created with CREATE_SIGNALED_BIT
+ ** Reuse in different queue
+ ** Different queues
+
+.Spec issues
+ * Using same fence in multiple vkQueueSubmit calls without waiting/resetting in between
+ ** Completion of first cmdbuf will reset fence and others won't do anything?
+ * Waiting on same fence from multiple threads?
+
+Semaphores
+~~~~~~~~~~
+
+[source,c]
+----
+typedef VkFlags VkSemaphoreCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkSemaphoreCreateFlags                      flags;
+} VkSemaphoreCreateInfo;
+
+VkResult VKAPI vkCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    VkSemaphore*                                pSemaphore);
+
+VkResult VKAPI vkQueueSignalSemaphore(
+    VkQueue                                     queue,
+    VkSemaphore                                 semaphore);
+
+VkResult VKAPI vkQueueWaitSemaphore(
+    VkQueue                                     queue,
+    VkSemaphore                                 semaphore);
+----
+
+ * All types of commands waiting & signaling semaphore
+ * Cross-queue semaphores
+ * Queuing wait on initially signaled semaphore
+ * Queuing wait immediately after queuing signaling
+ * vkQueueWaitIdle & vkDeviceWaitIdle waiting on semaphore
+ * Multiple queues waiting on same semaphore
+
+NOTE: Semaphores might change; counting is causing problems for some IHVs.
+
+Events
+~~~~~~
+
+[source,c]
+----
+typedef VkFlags VkEventCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkEventCreateFlags                          flags;
+} VkEventCreateInfo;
+
+VkResult VKAPI vkCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    VkEvent*                                    pEvent);
+
+VkResult VKAPI vkGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VkResult VKAPI vkSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VkResult VKAPI vkResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+void VKAPI vkCmdSetEvent(
+    VkCmdBuffer                                 cmdBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void VKAPI vkCmdResetEvent(
+    VkCmdBuffer                                 cmdBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+void VKAPI vkCmdWaitEvents(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        destStageMask,
+    uint32_t                                    memBarrierCount,
+    const void* const*                          ppMemBarriers);
+----
+
+ * All types of work waiting on all types of events
+ ** Including signaling from CPU side (vkSetEvent)
+ ** Memory barrier
+ * Polling event status (vkGetEventStatus)
+ * Memory barriers (see also GPU cache control)
+ * Corner-cases:
+ ** Re-setting event before it has been signaled
+ ** Polling status of event concurrently with signaling it or re-setting it from another thread
+ ** Multiple commands (maybe multiple queues as well) setting same event
+ *** Presumably first set will take effect, rest have no effect before event is re-set
+
+Pipeline queries
+----------------
+
+Pipeline query test details TBD. These are of lower priority initially.
+
+NOTE: Currently contains only exact occlusion query as mandatory. Might be problematic for some, and may change?
+
+[source,c]
+----
+typedef enum {
+    VK_QUERY_TYPE_OCCLUSION = 0,
+    VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
+    VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION,
+    VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    VK_QUERY_TYPE_NUM = (VK_QUERY_TYPE_PIPELINE_STATISTICS - VK_QUERY_TYPE_OCCLUSION + 1),
+    VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkQueryType;
+
+typedef enum {
+    VK_QUERY_PIPELINE_STATISTIC_IA_VERTICES_BIT = 0x00000001,
+    VK_QUERY_PIPELINE_STATISTIC_IA_PRIMITIVES_BIT = 0x00000002,
+    VK_QUERY_PIPELINE_STATISTIC_VS_INVOCATIONS_BIT = 0x00000004,
+    VK_QUERY_PIPELINE_STATISTIC_GS_INVOCATIONS_BIT = 0x00000008,
+    VK_QUERY_PIPELINE_STATISTIC_GS_PRIMITIVES_BIT = 0x00000010,
+    VK_QUERY_PIPELINE_STATISTIC_C_INVOCATIONS_BIT = 0x00000020,
+    VK_QUERY_PIPELINE_STATISTIC_C_PRIMITIVES_BIT = 0x00000040,
+    VK_QUERY_PIPELINE_STATISTIC_FS_INVOCATIONS_BIT = 0x00000080,
+    VK_QUERY_PIPELINE_STATISTIC_TCS_PATCHES_BIT = 0x00000100,
+    VK_QUERY_PIPELINE_STATISTIC_TES_INVOCATIONS_BIT = 0x00000200,
+    VK_QUERY_PIPELINE_STATISTIC_CS_INVOCATIONS_BIT = 0x00000400,
+} VkQueryPipelineStatisticFlagBits;
+typedef VkFlags VkQueryPipelineStatisticFlags;
+
+typedef enum {
+    VK_QUERY_RESULT_DEFAULT = 0,
+    VK_QUERY_RESULT_64_BIT = 0x00000001,
+    VK_QUERY_RESULT_WAIT_BIT = 0x00000002,
+    VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004,
+    VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008,
+} VkQueryResultFlagBits;
+typedef VkFlags VkQueryResultFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkQueryType                                 queryType;
+    uint32_t                                    slots;
+    VkQueryPipelineStatisticFlags               pipelineStatistics;
+} VkQueryPoolCreateInfo;
+
+VkResult VKAPI vkCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    VkQueryPool*                                pQueryPool);
+
+VkResult VKAPI vkGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    startQuery,
+    uint32_t                                    queryCount,
+    size_t*                                     pDataSize,
+    void*                                       pData,
+    VkQueryResultFlags                          flags);
+
+void VKAPI vkCmdBeginQuery(
+    VkCmdBuffer                                 cmdBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    slot,
+    VkQueryControlFlags                         flags);
+
+void VKAPI vkCmdEndQuery(
+    VkCmdBuffer                                 cmdBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    slot);
+
+void VKAPI vkCmdResetQueryPool(
+    VkCmdBuffer                                 cmdBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    startQuery,
+    uint32_t                                    queryCount);
+
+void VKAPI vkCmdCopyQueryPoolResults(
+    VkCmdBuffer                                 cmdBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    startQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    destBuffer,
+    VkDeviceSize                                destOffset,
+    VkDeviceSize                                destStride,
+    VkQueryResultFlags                          flags);
+----
+
+Buffers
+-------
+
+Buffers will have a lot of coverage from memory management and access tests. Targeted buffer tests need to verify that various corner-cases and more exotic configurations work as expected.
+
+[source,c]
+----
+typedef enum {
+    VK_BUFFER_USAGE_TRANSFER_SOURCE_BIT = 0x00000001,
+    VK_BUFFER_USAGE_TRANSFER_DESTINATION_BIT = 0x00000002,
+    VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004,
+    VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010,
+    VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020,
+    VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
+    VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
+    VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+} VkBufferUsageFlagBits;
+typedef VkFlags VkBufferUsageFlags;
+
+typedef enum {
+    VK_BUFFER_CREATE_SPARSE_BIT = 0x00000001,
+    VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+} VkBufferCreateFlagBits;
+typedef VkFlags VkBufferCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkDeviceSize                                size;
+    VkBufferUsageFlags                          usage;
+    VkBufferCreateFlags                         flags;
+    VkSharingMode                               sharingMode;
+    uint32_t                                    queueFamilyCount;
+    const uint32_t*                             pQueueFamilyIndices;
+} VkBufferCreateInfo;
+
+VkResult VKAPI vkCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    VkBuffer*                                   pBuffer);
+----
+
+ * All combinations of create and usage flags work
+ ** There are total 511 combinations of usage flags and 7 combinations of create flags
+ * Buffers of various sizes can be created and they report sensible memory requirements
+ ** Test with different sizes:
+ *** 0 Byte
+ *** 1181 Byte
+ *** 15991 Byte
+ *** 16 kByte
+ *** Device limit (maxTexelBufferSize)
+ * Sparse buffers: very large (limit TBD) buffers can be created
+
+[source,c]
+----
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBuffer                                    buffer;
+    VkFormat                                    format;
+    VkDeviceSize                                offset;
+    VkDeviceSize                                range;
+} VkBufferViewCreateInfo;
+
+VkResult VKAPI vkCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    VkBufferView*                               pView);
+----
+
+ * Buffer views of all (valid) types and formats can be created from all (compatible) buffers
+ ** There are 2 buffer types and 173 different formats.
+ * Various view sizes
+ ** Complete buffer
+ ** Partial buffer
+ * View can be created before and after attaching memory to buffer
+ ** 2 tests for each bufferView
+ * Changing memory binding makes memory contents visible in already created views
+ ** Concurrently changing memory binding and creating views
+
+.Spec issues
+ * Alignment or size requirements for buffer views?
+
+Images
+------
+
+Like buffers, images will have significant coverage from other test groups that focus on various ways to access image data. Additional coverage not provided by those tests will be included in this feature group.
+
+Image functions
+~~~~~~~~~~~~~~~
+
+.Spec issues
+ * +VK_IMAGE_USAGE_GENERAL+?
+
+[source,c]
+----
+typedef enum {
+    VK_IMAGE_TYPE_1D = 0,
+    VK_IMAGE_TYPE_2D = 1,
+    VK_IMAGE_TYPE_3D = 2,
+    VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D,
+    VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D,
+    VK_IMAGE_TYPE_NUM = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1),
+    VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
+typedef enum {
+    VK_IMAGE_TILING_LINEAR = 0,
+    VK_IMAGE_TILING_OPTIMAL = 1,
+    VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_LINEAR,
+    VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_OPTIMAL,
+    VK_IMAGE_TILING_NUM = (VK_IMAGE_TILING_OPTIMAL - VK_IMAGE_TILING_LINEAR + 1),
+    VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
+} VkImageTiling;
+
+typedef enum {
+    VK_IMAGE_USAGE_GENERAL = 0,
+    VK_IMAGE_USAGE_TRANSFER_SOURCE_BIT = 0x00000001,
+    VK_IMAGE_USAGE_TRANSFER_DESTINATION_BIT = 0x00000002,
+    VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+    VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+    VK_IMAGE_USAGE_DEPTH_STENCIL_BIT = 0x00000020,
+    VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+    VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+
+typedef enum {
+    VK_IMAGE_CREATE_SPARSE_BIT = 0x00000001,
+    VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_IMAGE_CREATE_INVARIANT_DATA_BIT = 0x00000008,
+    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000010,
+    VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000020,
+} VkImageCreateFlagBits;
+typedef VkFlags VkImageCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkImageType                                 imageType;
+    VkFormat                                    format;
+    VkExtent3D                                  extent;
+    uint32_t                                    mipLevels;
+    uint32_t                                    arraySize;
+    uint32_t                                    samples;
+    VkImageTiling                               tiling;
+    VkImageUsageFlags                           usage;
+    VkImageCreateFlags                          flags;
+    VkSharingMode                               sharingMode;
+    uint32_t                                    queueFamilyCount;
+    const uint32_t*                             pQueueFamilyIndices;
+} VkImageCreateInfo;
+
+VkResult VKAPI vkCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    VkImage*                                    pImage);
+
+VkResult VKAPI vkGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+----
+
+ * All valid and supported combinations of image parameters
+ ** Sampling verification with nearest only (other modes will be covered separately)
+ * Various image sizes
+ * Linear-layout images & writing data from CPU
+ * Copying data between identical opaque-layout images on CPU?
+
+Image view functions
+~~~~~~~~~~~~~~~~~~~~
+
+.Spec issues
+ * What are format compatibility rules?
+ * Can color/depth/stencil attachments to write to image which has different format?
+ ** Can I create DS view of RGBA texture and write to only one component by creating VkDepthStencilView for example?
+ * Image view granularity
+ ** All sub-rects allowed? In all use cases (RTs for example)?
+ * Memory access granularity
+ ** Writing concurrently to different areas of same memory backed by same/different image or view
+
+[source,c]
+----
+typedef struct {
+    VkChannelSwizzle                            r;
+    VkChannelSwizzle                            g;
+    VkChannelSwizzle                            b;
+    VkChannelSwizzle                            a;
+} VkChannelMapping;
+
+typedef struct {
+    VkImageAspect                               aspect;
+    uint32_t                                    baseMipLevel;
+    uint32_t                                    mipLevels;
+    uint32_t                                    baseArraySlice;
+    uint32_t                                    arraySize;
+} VkImageSubresourceRange;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkImage                                     image;
+    VkImageViewType                             viewType;
+    VkFormat                                    format;
+    VkChannelMapping                            channels;
+    VkImageSubresourceRange                     subresourceRange;
+} VkImageViewCreateInfo;
+
+VkResult VKAPI vkCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    VkImageView*                                pView);
+----
+
+ * Image views of all (valid) types and formats can be created from all (compatible) images
+ * Channel swizzles
+ * Depth- and stencil-mode
+ * Different formats
+ * Various view sizes
+ ** Complete image
+ ** Partial image (mip- or array slice)
+ * View can be created before and after attaching memory to image
+ * Changing memory binding makes memory contents visible in already created views
+ ** Concurrently changing memory binding and creating views
+
+[source,c]
+----
+typedef enum {
+    VK_ATTACHMENT_VIEW_CREATE_READ_ONLY_DEPTH_BIT = 0x00000001,
+    VK_ATTACHMENT_VIEW_CREATE_READ_ONLY_STENCIL_BIT = 0x00000002,
+} VkAttachmentViewCreateFlagBits;
+typedef VkFlags VkAttachmentViewCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkImage                                     image;
+    VkFormat                                    format;
+    uint32_t                                    mipLevel;
+    uint32_t                                    baseArraySlice;
+    uint32_t                                    arraySize;
+    VkAttachmentViewCreateFlags                 flags;
+} VkAttachmentViewCreateInfo;
+
+VkResult VKAPI vkCreateAttachmentView(
+    VkDevice                                    device,
+    const VkAttachmentViewCreateInfo*           pCreateInfo,
+    VkAttachmentView*                           pView);
+----
+
+ * Writing to color/depth/stencil attachments in various view configurations
+ ** Multipass tests will contain some coverage for this
+ ** Image layout
+ ** View size
+ ** Image mip- or array sub-range
+ * +msaaResolveImage+
+ ** TODO What is exactly this?
+
+Shaders
+-------
+
+Shader API test will verify that shader loading functions behave as expected. Verifying that various SPIR-V constructs are accepted and executed correctly however is not an objective; that will be covered more extensively by a separate SPIR-V test set.
+
+[source,c]
+----
+typedef VkFlags VkShaderModuleCreateFlags;
+typedef VkFlags VkShaderCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    size_t                                      codeSize;
+    const void*                                 pCode;
+    VkShaderModuleCreateFlags                   flags;
+} VkShaderModuleCreateInfo;
+
+VkResult VKAPI vkCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    VkShaderModule*                             pShaderModule);
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkShaderModule                              module;
+    const char*                                 pName;
+    VkShaderCreateFlags                         flags;
+} VkShaderCreateInfo;
+
+VkResult VKAPI vkCreateShader(
+    VkDevice                                    device,
+    const VkShaderCreateInfo*                   pCreateInfo,
+    VkShader*                                   pShader);
+----
+
+Pipelines
+---------
+
+Construction
+~~~~~~~~~~~~
+
+Pipeline tests will create various pipelines and verify that rendering results appear to match (resulting HW pipeline is correct). Fixed-function unit corner-cases nor accuracy is verified. It is not possible to exhaustively test all pipeline configurations so tests have to test some areas in isolation and extend coverage with randomized tests.
+
+[source,c]
+----
+typedef enum {
+    VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
+    VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
+    VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
+} VkPipelineCreateFlagBits;
+typedef VkFlags VkPipelineCreateFlags;
+
+typedef struct {
+    uint32_t                                    constantId;
+    size_t                                      size;
+    uint32_t                                    offset;
+} VkSpecializationMapEntry;
+
+typedef struct {
+    uint32_t                                    mapEntryCount;
+    const VkSpecializationMapEntry*             pMap;
+    const size_t                                dataSize;
+    const void*                                 pData;
+} VkSpecializationInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkShaderStage                               stage;
+    VkShader                                    shader;
+    const VkSpecializationInfo*                 pSpecializationInfo;
+} VkPipelineShaderStageCreateInfo;
+
+typedef struct {
+    uint32_t                                    binding;
+    uint32_t                                    strideInBytes;
+    VkVertexInputStepRate                       stepRate;
+} VkVertexInputBindingDescription;
+
+typedef struct {
+    uint32_t                                    location;
+    uint32_t                                    binding;
+    VkFormat                                    format;
+    uint32_t                                    offsetInBytes;
+} VkVertexInputAttributeDescription;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    bindingCount;
+    const VkVertexInputBindingDescription*      pVertexBindingDescriptions;
+    uint32_t                                    attributeCount;
+    const VkVertexInputAttributeDescription*    pVertexAttributeDescriptions;
+} VkPipelineVertexInputStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPrimitiveTopology                         topology;
+    VkBool32                                    primitiveRestartEnable;
+} VkPipelineInputAssemblyStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    patchControlPoints;
+} VkPipelineTessellationStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    viewportCount;
+} VkPipelineViewportStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBool32                                    depthClipEnable;
+    VkBool32                                    rasterizerDiscardEnable;
+    VkFillMode                                  fillMode;
+    VkCullMode                                  cullMode;
+    VkFrontFace                                 frontFace;
+} VkPipelineRasterStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    rasterSamples;
+    VkBool32                                    sampleShadingEnable;
+    float                                       minSampleShading;
+    VkSampleMask                                sampleMask;
+} VkPipelineMultisampleStateCreateInfo;
+
+typedef struct {
+    VkStencilOp                                 stencilFailOp;
+    VkStencilOp                                 stencilPassOp;
+    VkStencilOp                                 stencilDepthFailOp;
+    VkCompareOp                                 stencilCompareOp;
+} VkStencilOpState;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBool32                                    depthTestEnable;
+    VkBool32                                    depthWriteEnable;
+    VkCompareOp                                 depthCompareOp;
+    VkBool32                                    depthBoundsEnable;
+    VkBool32                                    stencilTestEnable;
+    VkStencilOpState                            front;
+    VkStencilOpState                            back;
+} VkPipelineDepthStencilStateCreateInfo;
+
+typedef struct {
+    VkBool32                                    blendEnable;
+    VkBlend                                     srcBlendColor;
+    VkBlend                                     destBlendColor;
+    VkBlendOp                                   blendOpColor;
+    VkBlend                                     srcBlendAlpha;
+    VkBlend                                     destBlendAlpha;
+    VkBlendOp                                   blendOpAlpha;
+    VkChannelFlags                              channelWriteMask;
+} VkPipelineColorBlendAttachmentState;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBool32                                    alphaToCoverageEnable;
+    VkBool32                                    logicOpEnable;
+    VkLogicOp                                   logicOp;
+    uint32_t                                    attachmentCount;
+    const VkPipelineColorBlendAttachmentState*  pAttachments;
+} VkPipelineColorBlendStateCreateInfo;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    stageCount;
+    const VkPipelineShaderStageCreateInfo*      pStages;
+    const VkPipelineVertexInputStateCreateInfo* pVertexInputState;
+    const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState;
+    const VkPipelineTessellationStateCreateInfo* pTessellationState;
+    const VkPipelineViewportStateCreateInfo*    pViewportState;
+    const VkPipelineRasterStateCreateInfo*      pRasterState;
+    const VkPipelineMultisampleStateCreateInfo* pMultisampleState;
+    const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState;
+    const VkPipelineColorBlendStateCreateInfo*  pColorBlendState;
+    VkPipelineCreateFlags                       flags;
+    VkPipelineLayout                            layout;
+    VkRenderPass                                renderPass;
+    uint32_t                                    subpass;
+    VkPipeline                                  basePipelineHandle;
+    int32_t                                     basePipelineIndex;
+} VkGraphicsPipelineCreateInfo;
+
+VkResult VKAPI vkCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    count,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    VkPipeline*                                 pPipelines);
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineShaderStageCreateInfo             cs;
+    VkPipelineCreateFlags                       flags;
+    VkPipelineLayout                            layout;
+    VkPipeline                                  basePipelineHandle;
+    int32_t                                     basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
+VkResult VKAPI vkCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    count,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    VkPipeline*                                 pPipelines);
+----
+
+Pipeline caches
+^^^^^^^^^^^^^^^
+
+Extend pipeline tests to cases to use pipeline caches, test that pipelines created from pre-populated cache still produce identical results to pipelines created with empty cache.
+
+Verify that maximum cache size is not exceeded.
+
+[source,c]
+----
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    size_t                                      initialSize;
+    const void*                                 initialData;
+    size_t                                      maxSize;
+} VkPipelineCacheCreateInfo;
+
+VkResult VKAPI vkCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    VkPipelineCache*                            pPipelineCache);
+
+size_t VKAPI vkGetPipelineCacheSize(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache);
+
+VkResult VKAPI vkGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    void*                                       pData);
+
+VkResult VKAPI vkMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             destCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+----
+
+Pipeline state
+~~~~~~~~~~~~~~
+
+Pipeline tests, as they need to verify rendering results, will provide a lot of coverage for pipeline state manipulation. In addition some corner-case tests are needed:
+
+ * Re-setting pipeline state bits before use
+ * Carrying / manipulating only part of state over draw calls
+ * Submitting command buffers that have only pipeline state manipulation calls (should be no-op)
+
+.Spec issues
+ * Does vkCmdBindPipeline invalidate other state bits?
+
+[source,c]
+----
+void VKAPI vkCmdBindPipeline(
+    VkCmdBuffer                                 cmdBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+void VKAPI vkCmdBindDescriptorSets(
+    VkCmdBuffer                                 cmdBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    setCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+void VKAPI vkCmdBindIndexBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+void VKAPI vkCmdBindVertexBuffers(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    startBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+----
+
+Samplers
+--------
+
+Sampler tests verify that sampler parameters are mapped to correct HW state. That will be verified by sampling various textures in certain configurations (as listed below). More exhaustive texture filtering verification will be done separately.
+
+ * All valid sampler state configurations
+ * Selected texture formats (RGBA8, FP16, integer textures)
+ * All texture types
+ * Mip-mapping with explicit and implicit LOD
+
+[source,c]
+----
+typedef enum {
+    VK_TEX_FILTER_NEAREST = 0,
+    VK_TEX_FILTER_LINEAR = 1,
+    VK_TEX_FILTER_BEGIN_RANGE = VK_TEX_FILTER_NEAREST,
+    VK_TEX_FILTER_END_RANGE = VK_TEX_FILTER_LINEAR,
+    VK_TEX_FILTER_NUM = (VK_TEX_FILTER_LINEAR - VK_TEX_FILTER_NEAREST + 1),
+    VK_TEX_FILTER_MAX_ENUM = 0x7FFFFFFF
+} VkTexFilter;
+
+typedef enum {
+    VK_TEX_MIPMAP_MODE_BASE = 0,
+    VK_TEX_MIPMAP_MODE_NEAREST = 1,
+    VK_TEX_MIPMAP_MODE_LINEAR = 2,
+    VK_TEX_MIPMAP_MODE_BEGIN_RANGE = VK_TEX_MIPMAP_MODE_BASE,
+    VK_TEX_MIPMAP_MODE_END_RANGE = VK_TEX_MIPMAP_MODE_LINEAR,
+    VK_TEX_MIPMAP_MODE_NUM = (VK_TEX_MIPMAP_MODE_LINEAR - VK_TEX_MIPMAP_MODE_BASE + 1),
+    VK_TEX_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkTexMipmapMode;
+
+typedef enum {
+    VK_TEX_ADDRESS_WRAP = 0,
+    VK_TEX_ADDRESS_MIRROR = 1,
+    VK_TEX_ADDRESS_CLAMP = 2,
+    VK_TEX_ADDRESS_MIRROR_ONCE = 3,
+    VK_TEX_ADDRESS_CLAMP_BORDER = 4,
+    VK_TEX_ADDRESS_BEGIN_RANGE = VK_TEX_ADDRESS_WRAP,
+    VK_TEX_ADDRESS_END_RANGE = VK_TEX_ADDRESS_CLAMP_BORDER,
+    VK_TEX_ADDRESS_NUM = (VK_TEX_ADDRESS_CLAMP_BORDER - VK_TEX_ADDRESS_WRAP + 1),
+    VK_TEX_ADDRESS_MAX_ENUM = 0x7FFFFFFF
+} VkTexAddress;
+
+typedef enum {
+    VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+    VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+    VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+    VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+    VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    VK_BORDER_COLOR_NUM = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1),
+    VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkTexFilter                                 magFilter;
+    VkTexFilter                                 minFilter;
+    VkTexMipmapMode                             mipMode;
+    VkTexAddress                                addressU;
+    VkTexAddress                                addressV;
+    VkTexAddress                                addressW;
+    float                                       mipLodBias;
+    float                                       maxAnisotropy;
+    VkBool32                                    compareEnable;
+    VkCompareOp                                 compareOp;
+    float                                       minLod;
+    float                                       maxLod;
+    VkBorderColor                               borderColor;
+} VkSamplerCreateInfo;
+
+VkResult VKAPI vkCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    VkSampler*                                  pSampler);
+----
+
+Dynamic state objects
+---------------------
+
+Pipeline tests will include coverage for most dynamic state object usage as some pipeline configurations need corresponding dynamic state objects. In addition there are couple of corner-cases worth exploring separately:
+
+ * Re-setting dynamic state bindings one or more times before first use
+ * Dynamic state object binding persistence over pipeline changes
+ * Large amounts of unique dynamic state objects in a command buffer, pass, or multipass
+
+[source,c]
+----
+// Viewport
+
+typedef struct {
+    float                                       originX;
+    float                                       originY;
+    float                                       width;
+    float                                       height;
+    float                                       minDepth;
+    float                                       maxDepth;
+} VkViewport;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    viewportAndScissorCount;
+    const VkViewport*                           pViewports;
+    const VkRect2D*                             pScissors;
+} VkDynamicViewportStateCreateInfo;
+
+VkResult VKAPI vkCreateDynamicViewportState(
+    VkDevice                                    device,
+    const VkDynamicViewportStateCreateInfo*     pCreateInfo,
+    VkDynamicViewportState*                     pState);
+
+void VKAPI vkCmdBindDynamicViewportState(
+    VkCmdBuffer                                 cmdBuffer,
+    VkDynamicViewportState                      dynamicViewportState);
+
+// Raster
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    float                                       depthBias;
+    float                                       depthBiasClamp;
+    float                                       slopeScaledDepthBias;
+    float                                       lineWidth;
+} VkDynamicRasterStateCreateInfo;
+
+VkResult VKAPI vkCreateDynamicRasterState(
+    VkDevice                                    device,
+    const VkDynamicRasterStateCreateInfo*       pCreateInfo,
+    VkDynamicRasterState*                       pState);
+
+void VKAPI vkCmdBindDynamicRasterState(
+    VkCmdBuffer                                 cmdBuffer,
+    VkDynamicRasterState                        dynamicRasterState);
+
+// Color blend
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    float                                       blendConst[4];
+} VkDynamicColorBlendStateCreateInfo;
+
+VkResult VKAPI vkCreateDynamicColorBlendState(
+    VkDevice                                    device,
+    const VkDynamicColorBlendStateCreateInfo*   pCreateInfo,
+    VkDynamicColorBlendState*                   pState);
+
+void VKAPI vkCmdBindDynamicColorBlendState(
+    VkCmdBuffer                                 cmdBuffer,
+    VkDynamicColorBlendState                    dynamicColorBlendState);
+
+// Depth & stencil
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    float                                       minDepthBounds;
+    float                                       maxDepthBounds;
+    uint32_t                                    stencilReadMask;
+    uint32_t                                    stencilWriteMask;
+    uint32_t                                    stencilFrontRef;
+    uint32_t                                    stencilBackRef;
+} VkDynamicDepthStencilStateCreateInfo;
+
+VkResult VKAPI vkCreateDynamicDepthStencilState(
+    VkDevice                                    device,
+    const VkDynamicDepthStencilStateCreateInfo* pCreateInfo,
+    VkDynamicDepthStencilState*                 pState);
+
+void VKAPI vkCmdBindDynamicDepthStencilState(
+    VkCmdBuffer                                 cmdBuffer,
+    VkDynamicDepthStencilState                  dynamicDepthStencilState);
+----
+
+Command buffers
+---------------
+
+Tests for various rendering features will provide significant coverage for command buffer recording. Additional coverage will be needed for:
+
+ * Re-setting command buffers
+ * Very small (empty) and large command buffers
+ * Various optimize flags combined with various command buffer sizes and contents
+ ** Forcing optimize flags in other tests might be useful for finding cases that may break
+
+[source,c]
+----
+typedef enum {
+    VK_CMD_BUFFER_LEVEL_PRIMARY = 0,
+    VK_CMD_BUFFER_LEVEL_SECONDARY = 1,
+    VK_CMD_BUFFER_LEVEL_BEGIN_RANGE = VK_CMD_BUFFER_LEVEL_PRIMARY,
+    VK_CMD_BUFFER_LEVEL_END_RANGE = VK_CMD_BUFFER_LEVEL_SECONDARY,
+    VK_CMD_BUFFER_LEVEL_NUM = (VK_CMD_BUFFER_LEVEL_SECONDARY - VK_CMD_BUFFER_LEVEL_PRIMARY + 1),
+    VK_CMD_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
+} VkCmdBufferLevel;
+
+typedef VkFlags VkCmdBufferCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkCmdPool                                   cmdPool;
+    VkCmdBufferLevel                            level;
+    VkCmdBufferCreateFlags                      flags;
+} VkCmdBufferCreateInfo;
+
+VkResult VKAPI vkCreateCommandBuffer(
+    VkDevice                                    device,
+    const VkCmdBufferCreateInfo*                pCreateInfo,
+    VkCmdBuffer*                                pCmdBuffer);
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkCmdBufferOptimizeFlags                    flags;
+    VkRenderPass                                renderPass;
+    VkFramebuffer                               framebuffer;
+} VkCmdBufferBeginInfo;
+
+typedef enum {
+    VK_CMD_BUFFER_OPTIMIZE_SMALL_BATCH_BIT = 0x00000001,
+    VK_CMD_BUFFER_OPTIMIZE_PIPELINE_SWITCH_BIT = 0x00000002,
+    VK_CMD_BUFFER_OPTIMIZE_ONE_TIME_SUBMIT_BIT = 0x00000004,
+    VK_CMD_BUFFER_OPTIMIZE_DESCRIPTOR_SET_SWITCH_BIT = 0x00000008,
+    VK_CMD_BUFFER_OPTIMIZE_NO_SIMULTANEOUS_USE_BIT = 0x00000010,
+} VkCmdBufferOptimizeFlagBits;
+typedef VkFlags VkCmdBufferOptimizeFlags;
+
+VkResult VKAPI vkBeginCommandBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    const VkCmdBufferBeginInfo*                 pBeginInfo);
+
+VkResult VKAPI vkEndCommandBuffer(
+    VkCmdBuffer                                 cmdBuffer);
+
+typedef enum {
+    VK_CMD_BUFFER_RESET_RELEASE_RESOURCES = 0x00000001,
+} VkCmdBufferResetFlagBits;
+typedef VkFlags VkCmdBufferResetFlags;
+
+VkResult VKAPI vkResetCommandBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkCmdBufferResetFlags                       flags);
+----
+
+Command Pools (6.1 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+typedef enum {
+    VK_CMD_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
+    VK_CMD_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
+} VkCmdPoolCreateFlagBits;
+typedef VkFlags VkCmdPoolCreateFlags;
+
+typedef struct {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    queueFamilyIndex;
+    VkCmdPoolCreateFlags                        flags;
+} VkCmdPoolCreateInfo;
+
+VkResult VKAPI vkCreateCommandPool(
+    VkDevice                                    device,
+    const VkCmdPoolCreateInfo*                  pCreateInfo,
+    VkCmdPool*                                  pCmdPool);
+
+typedef enum {
+    VK_CMD_POOL_RESET_RELEASE_RESOURCES = 0x00000001,
+} VkCmdPoolResetFlagBits;
+typedef VkFlags VkCmdPoolResetFlags;
+
+VkResult VKAPI vkResetCommandPool(
+    VkDevice                                    device,
+    VkCmdPool                                   cmdPool,
+    VkCmdPoolResetFlags                         flags);
+----
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Creation | Call vkCreateCommandPool with all parameters that can be NULL having that value | If pAllocator is not NULL, pAllocator must be a pointer to a valid VkAllocationCallbacks structure
+|2  | | ... with pAllocator != NULL |
+|3  | | ... with VK_COMMAND_POOL_CREATE_TRANSIENT_BIT set in pCreateInfo's flags | flags is a combination of bitfield flags indicating usage behavior for the pool and command buffers allocated from it.
+|4  | | ... with VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT set in pCreateInfo's flags |
+|5  | Resetting | Call vkResetCommandPool with VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT set |
+|6  | | ... without any bits set |
+|===
+
+Command Buffer Lifetime (6.2 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Allocation | Allocate a single primary  buffer |
+|2  | | Allocate a large number of primare buffers |
+|3  | | Allocate no primary buffers (bufferCount == 0) |
+|4  | | Allocate a single secondary buffer |
+|5  | | Allocate a large number of secondary buffers |
+|6  | | Allocate no secondary buffers (bufferCount == 0) |
+|7  | Execution | Execute a small primary buffer |
+|8  | | Execute a large primary buffer |
+|9  | Resetting - implicit | Reset a command buffer by calling vkBeginCommandBuffer on a buffer that has already been recorded |
+|===
+
+Command Buffer Recording (6.3 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Recording to buffers  | Record a single command in a primary buffer |
+|2  | | Record a large number of commands in a primary buffer |
+|3  | | Record a single command in a secondary buffer |
+|4  | | Record a large number of commands in a secondary buffer |
+|5  | | Record a primary command buffer without VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT. Submit it twice in a row. |
+|6  | | Record a secondary command buffer without VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT. Submit it twice in a row. |
+|7  | Recording for one time usage | Record a primary command buffer with VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT. Submit it, reset, record, and submit again. |
+|8  | | Record a secondary command buffer with VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT. Submit it, reset, record, and submit again. |
+|9  | Render pass ignoring  | if VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT flag is not set, the values of renderPass, framebuffer, and subpass members of the VkCommandBufferBeginInfo should be ignored | If flags has VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT set, the entire secondary command buffer is considered inside a render pass. In this case, the renderPass, framebuffer, and subpass members of the VkCommandBufferBeginInfo structure must be set as described below. Otherwise the renderPass, framebuffer, and subpass members of the VkCommandBufferBeginInfo structure are ignored, and the secondary command buffer may not contain commands that are only allowed inside a render pass.
+|10  | Simultaneous use – primary buffers | Set flag VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT and submit two times simultanously | If flags does not have VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT set, the command buffer must not be pending execution more than once at any given time. A primary command buffer is considered to be pending execution from the time it is submitted via vkQueueSubmit until that submission completes.
+|11  | Simultaneous use – secondary buffers | Set VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT on secondary buffer, and use the secondary buffer twice in primary buffer | If VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT is not set on a secondary command buffer, that command buffer cannot be used more than once in a given primary command buffer.
+|12 | Recording with an active occlusion query | Recond a secondary command buffer with occlusionQueryEnable == VK_TRUE and queryFlags == VK_QUERY_CONTROL_PRECISE_BIT and execute it in a primary buffer with an active precise occlusion query |
+|13 | | ... imprecise occlusion query |
+|14 | | ... queryFlags == 0x00000000, imprecise occlusion query |
+|===
+
+Command Buffer Submission (6.4 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Submission correctness | Call vkQueueSubmit with submitCount equal to the actual count of submits | pSubmits must be an array of submitCount valid VkSubmitInfo structures. If submitCount is 0 though, pSubmits is ignored
+|2  | | ... submitCount == 0 |
+|3  | Submission with semaphores | Call vkQueueSubmit that waits for a single semaphore |
+|4  | | ... for multiple semaphores |
+|5  | | ... notifies a single semaphore |
+|6  | | ... notifies multiple semaphores |
+|7  | Submission without a fence | Call vkQueueSubmit with VK_NULL_HANDLE passed as fence. | If fence is not VK_NULL_HANDLE, fence must be a valid VkFence handle
+|===
+
+Secondary Command Buffer Execution (6.6 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[source,c]
+----
+void VKAPI vkCmdExecuteCommands(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    cmdBuffersCount,
+    const VkCmdBuffer*                          pCmdBuffers);
+----
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Secondary buffers execution | Check if secondary command buffers are executed | Secondary command buffers may be called from primary command buffers, and are not directly submitted to queues.
+|2  | Simultaneous use | Call vkCmdExecuteCommands with pCommandBuffers such that its element is already pending execution in commandBuffer and was created with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag | Any given element of pCommandBuffers must not be already pending execution in commandBuffer, or appear twice in pCommandBuffers, unless it was created with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag
+|3  | | Call vkCmdExecuteCommands with pCommandBuffers such that its element appears twice in pCommandBuffers and was created with the VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT flag |
+|4  | Call from within a VkRenderPass | Call vkCmdExecuteCommands within a VkRenderPass with all elements of pCommandBuffers recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT | If vkCmdExecuteCommands is being called within a VkRenderPass, any given element of pCommandBuffers must have been recorded with the VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT
+|===
+
+Commands Allowed Inside Command Buffers (6.7 in VK 1.0 Spec)
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+[cols="1,4,8,8", options="header"]
+|===
+|No. | Tested area | Test Description | Relevant specification text
+|1  | Order of execution | Check if vkCmdBindPipeline commands are executed in-order  |
+|2  | | Check if vkCmdBindDescriptorSets commands are executed in-order  |
+|3  | | Check if vkCmdBindIndexBuffer commands are executed in-order |
+|4  | | Check if vkCmdBindVertexBuffers commands are executed in-order |
+|5  | | Check if vkCmdResetQueryPool, vkCmdBeginQuery, vkCmdEndQuery, vkCmdCopyQueryPoolResults commands are executed in-order relative to each other |
+|===
+
+Draw commands
+-------------
+
+Draw command tests verify that all draw parameters are respected (including vertex input state) and various draw call sizes work correctly. The tests won't however validate that all side effects of shader invocations happen as intended (covered by feature-specific tests) nor that primitive rasterization is fully correct (will be covered by separate targeted tests).
+
+[source,c]
+----
+void VKAPI vkCmdDraw(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    firstVertex,
+    uint32_t                                    vertexCount,
+    uint32_t                                    firstInstance,
+    uint32_t                                    instanceCount);
+
+void VKAPI vkCmdDrawIndexed(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    firstIndex,
+    uint32_t                                    indexCount,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance,
+    uint32_t                                    instanceCount);
+
+void VKAPI vkCmdDrawIndirect(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    count,
+    uint32_t                                    stride);
+
+void VKAPI vkCmdDrawIndexedIndirect(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    count,
+    uint32_t                                    stride);
+----
+
+Compute
+-------
+
+Like draw tests, compute dispatch tests will validate that call parameters have desired effects. In addition compute tests need to verify that various dispatch parameters (number of work groups, invocation IDs) are passed correctly to the shader invocations.
+
+NOTE: Assuming that compute-specific shader features, such as shared memory access, is covered by SPIR-V tests.
+
+[source,c]
+----
+void VKAPI vkCmdDispatch(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    x,
+    uint32_t                                    y,
+    uint32_t                                    z);
+
+void VKAPI vkCmdDispatchIndirect(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+----
+
+Copies and blits
+----------------
+
+Buffer copies
+~~~~~~~~~~~~~
+
+Buffer copy tests need to validate that copies and updates happen as expected for both simple and more complex cases:
+
+ * Whole-buffer, partial copies
+ * Small (1 byte) to very large copies and updates
+ * Copies between objects backed by same memory
+
+NOTE: GPU cache control tests need to verify copy source and destination visibility as well.
+
+[source,c]
+----
+typedef struct {
+    VkDeviceSize                                srcOffset;
+    VkDeviceSize                                destOffset;
+    VkDeviceSize                                copySize;
+} VkBufferCopy;
+
+void VKAPI vkCmdCopyBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    destBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+void VKAPI vkCmdUpdateBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    destBuffer,
+    VkDeviceSize                                destOffset,
+    VkDeviceSize                                dataSize,
+    const uint32_t*                             pData);
+
+void VKAPI vkCmdFillBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    destBuffer,
+    VkDeviceSize                                destOffset,
+    VkDeviceSize                                fillSize,
+    uint32_t                                    data);
+----
+
+Image copies
+~~~~~~~~~~~~
+
+Image copy and blitting tests need to validate that copies and updates happen as expected for both simple and more complex cases:
+
+* Image copies should cover
+** Whole and partial copies
+** Source and destination are backed by the same Image
+** Compressed and uncompressed copies
+** Multiple copy regions in one command
+** Copies between different but compatible formats
+* Blitting should cover
+** Whole and partial copies
+** With and without scaling
+** Copies between different but compatible formats (format conversions)
+
+[source,c]
+----
+typedef struct {
+    VkImageSubresourceLayers                    srcSubresource;
+    VkOffset3D                                  srcOffset;
+    VkImageSubresourceLayers                    destSubresource;
+    VkOffset3D                                  destOffset;
+    VkExtent3D                                  extent;
+} VkImageCopy;
+
+typedef struct {
+    VkImageSubresourceLayers                    srcSubresource;
+    VkOffset3D                                  srcOffset;
+    VkExtent3D                                  srcExtent;
+    VkImageSubresourceLayers                    destSubresource;
+    VkOffset3D                                  destOffset;
+    VkExtent3D                                  destExtent;
+} VkImageBlit;
+
+void VKAPI vkCmdCopyImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     destImage,
+    VkImageLayout                               destImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+void VKAPI vkCmdBlitImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     destImage,
+    VkImageLayout                               destImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkTexFilter                                 filter);
+----
+
+Copies between buffers and images
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The copies between buffers and images are used for checking the rendering result across the vulkancts so it
+is well tested. This tests should cover corner cases.
+
+* Various sizes
+** Whole and partial copies
+* Multiple copies in one command
+
+[source,c]
+----
+typedef struct {
+    VkDeviceSize                                bufferOffset;
+    uint32_t                                    bufferRowLength;
+    uint32_t                                    bufferImageHeight;
+    VkImageSubresourceLayers                    imageSubresource;
+    VkOffset3D                                  imageOffset;
+    VkExtent3D                                  imageExtent;
+} VkBufferImageCopy;
+
+void VKAPI vkCmdCopyBufferToImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     destImage,
+    VkImageLayout                               destImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+void VKAPI vkCmdCopyImageToBuffer(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    destBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+----
+
+Clearing images
+~~~~~~~~~~~~~~~
+
+Clearing tests need to validate that clearing happen as expected for both simple and more complex cases:
+
+* Clear the attachments.
+** Whole and partial clear.
+
+[source,c]
+----
+typedef union {
+    float                                       f32[4];
+    int32_t                                     s32[4];
+    uint32_t                                    u32[4];
+} VkClearColorValue;
+
+typedef struct {
+    float                                       depth;
+    uint32_t                                    stencil;
+} VkClearDepthStencilValue;
+
+typedef union {
+    VkClearColorValue                           color;
+    VkClearDepthStencilValue                    ds;
+} VkClearValue;
+
+void VKAPI vkCmdClearColorImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void VKAPI vkCmdClearDepthStencilImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    float                                       depth,
+    uint32_t                                    stencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+void VKAPI vkCmdClearColorAttachment(
+    VkCmdBuffer                                 cmdBuffer,
+    uint32_t                                    colorAttachment,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rectCount,
+    const VkRect3D*                             pRects);
+
+void VKAPI vkCmdClearDepthStencilAttachment(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImageAspectFlags                          imageAspectMask,
+    VkImageLayout                               imageLayout,
+    float                                       depth,
+    uint32_t                                    stencil,
+    uint32_t                                    rectCount,
+    const VkRect3D*                             pRects);
+----
+
+Multisample resolve
+~~~~~~~~~~~~~~~~~~~
+
+Multisample tests need to validate that clearing happen as expected for both simple and more complex cases.
+
+[source,c]
+----
+typedef struct {
+    VkImageSubresourceLayers                    srcSubresource;
+    VkOffset3D                                  srcOffset;
+    VkImageSubresourceLayers                    destSubresource;
+    VkOffset3D                                  destOffset;
+    VkExtent3D                                  extent;
+} VkImageResolve;
+
+void VKAPI vkCmdResolveImage(
+    VkCmdBuffer                                 cmdBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     destImage,
+    VkImageLayout                               destImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+----
+
+Push constants
+--------------
+
+[source,c]
+----
+void VKAPI vkCmdPushConstants(
+    VkCmdBuffer                                 cmdBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    start,
+    uint32_t                                    length,
+    const void*                                 values);
+----
+
+ * Range size, including verify various size of a single range from minimum to maximum
+ * Range count, including verify all the valid shader stages
+ * Data update, including verify a sub-range update, multiple times of updates
+ 
+ ? Invalid usages specified in spec NOT tested
+
+GPU timestamps
+--------------
+
+[source,c]
+----
+enum VkPipelineStageFlagBits
+{
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT                       = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT                     = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT                      = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT                     = 0x00000008,
+    VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT       = 0x00000010,
+    VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT    = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT                   = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT                   = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT              = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT               = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT           = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT                    = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT                          = 0x00001000,
+    VK_PIPELINE_STAGE_HOST_BIT                              = 0x00002000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT                      = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_COMMANDS_BIT                      = 0x00008000,
+};
+
+void VKAPI vkCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    deUint32                                    entry);
+----
+
+ * All timestamp stages
+ * record multiple timestamps in single command buffer
+ * timestamps in/out of render pass
+ * Command buffers that only record timestamps
+
+.Spec issues
+
+
+Validation layer tests
+----------------------
+
+Validation layer tests exercise all relevant invalid API usage patterns and verify that correct return values and error messages are generated. In addition validation tests would try to load invalid SPIR-V binaries and verify that all generic SPIR-V, and Vulkan SPIR-V environment rules are checked.
+
+Android doesn't plan to ship validation layer as part of the system image so validation tests are not required by Android CTS and thus are of very low priority currently.
diff --git a/doc/testspecs/VK/apitests.conf b/doc/testspecs/VK/apitests.conf
new file mode 100644
index 0000000..0b31922
--- /dev/null
+++ b/doc/testspecs/VK/apitests.conf
@@ -0,0 +1,5 @@
+[attributes]

+newline=\n

+

+[replacements]

+\+\/-=&plusmn;

diff --git a/execserver/xsPosixTestProcess.cpp b/execserver/xsPosixTestProcess.cpp
index 5c9dcb6..9f4d21d 100644
--- a/execserver/xsPosixTestProcess.cpp
+++ b/execserver/xsPosixTestProcess.cpp
@@ -200,7 +200,7 @@
 	}
 
 	// Construct command line.
-	string cmdLine = de::FilePath(name).isAbsolutePath() ? name : de::FilePath::join(workingDir, name).normalize().getPath();
+	string cmdLine = de::FilePath(name).isAbsolutePath() ? name : de::FilePath::join(workingDir, name).getPath();
 	cmdLine += string(" --deqp-log-filename=") + logFilePath.getBaseName();
 
 	if (hasCaseList)
diff --git a/external/fetch_sources.py b/external/fetch_sources.py
index 71204f5..40d2a6c 100644
--- a/external/fetch_sources.py
+++ b/external/fetch_sources.py
@@ -147,7 +147,7 @@
 
 		pushWorkingDir(fullDstPath)
 		try:
-			execute(["git", "fetch", self.url])
+			execute(["git", "fetch", self.url, "+refs/heads/*:refs/remotes/origin/*"])
 			execute(["git", "checkout", self.revision])
 		finally:
 			popWorkingDir()
@@ -168,6 +168,14 @@
 		"a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31",
 		"libpng",
 		postExtract = postExtractLibpng),
+	GitRepo(
+		"git@gitlab.khronos.org:spirv/spirv-tools.git",
+		"3e6b2dfa699b13987657298ab2a7652a0a577ca9",
+		"spirv-tools"),
+	GitRepo(
+		"git@gitlab.khronos.org:GLSL/glslang.git",
+		"38616fa321e6beea9816ee2fe9817f6d79cd1766",
+		"glslang"),
 ]
 
 def parseArgs ():
diff --git a/external/glslang/.gitignore b/external/glslang/.gitignore
new file mode 100644
index 0000000..85de9cf
--- /dev/null
+++ b/external/glslang/.gitignore
@@ -0,0 +1 @@
+src
diff --git a/external/glslang/CMakeLists.txt b/external/glslang/CMakeLists.txt
new file mode 100644
index 0000000..834190c
--- /dev/null
+++ b/external/glslang/CMakeLists.txt
@@ -0,0 +1,112 @@
+# cmake file for glslang
+
+if (NOT DE_DEFS)
+	message(FATAL_ERROR "Include Defs.cmake")
+endif ()
+
+if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/SPIRV/GlslangToSpv.cpp")
+	set(DEFAULT_GLSLANG_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}/src)
+else ()
+	set(DEFAULT_GLSLANG_SRC_PATH "../glslang")
+endif ()
+
+set(GLSLANG_SRC_PATH ${DEFAULT_GLSLANG_SRC_PATH} CACHE STRING "Path to glslang source tree")
+
+if (IS_ABSOLUTE ${GLSLANG_SRC_PATH})
+	set(GLSLANG_ABS_PATH ${GLSLANG_SRC_PATH})
+else ()
+	set(GLSLANG_ABS_PATH "${CMAKE_SOURCE_DIR}/${GLSLANG_SRC_PATH}")
+endif ()
+
+find_package(BISON)
+
+# \todo [2015-06-24 pyry] Full C++11 support on Android requires using CLang + libc++
+if (NOT BISON_FOUND AND DE_OS_IS_WIN32 AND EXISTS ${GLSLANG_ABS_PATH}/tools/bison.exe)
+	message(STATUS "Using pre-built bison executable")
+	set(BISON_EXECUTABLE ${GLSLANG_ABS_PATH}/tools/bison.exe)
+	set(BISON_FOUND ON)
+endif ()
+
+if (BISON_FOUND AND EXISTS ${GLSLANG_ABS_PATH}/glslang/GenericCodeGen/CodeGen.cpp AND NOT DE_OS_IS_ANDROID)
+	message(STATUS "glslang found; building with DEQP_SUPPORT_GLSLANG")
+
+	include_directories(
+		${GLSLANG_ABS_PATH}
+		${GLSLANG_ABS_PATH}/glslang
+		${GLSLANG_ABS_PATH}/glslang/Include
+		${GLSLANG_ABS_PATH}/glslang/Public
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent
+		${GLSLANG_ABS_PATH}/glslang/GenericCodeGen
+		${GLSLANG_ABS_PATH}/glslang/OSDependent
+		${GLSLANG_ABS_PATH}/OGLCompilersDLL
+		${GLSLANG_ABS_PATH}/SPIRV
+		${CMAKE_CURRENT_BINARY_DIR}
+		)
+
+	set(GLSLANG_SRCS
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/Constant.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/InfoSink.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/Initialize.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/IntermTraverse.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/Intermediate.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/ParseHelper.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/PoolAlloc.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/RemoveTree.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/Scan.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/ShaderLang.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/SymbolTable.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/Versions.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/intermOut.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/limits.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/linkValidate.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/parseConst.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/reflection.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/Pp.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpAtom.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpContext.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpMemory.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpScanner.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpSymbols.cpp
+		${GLSLANG_ABS_PATH}/glslang/MachineIndependent/preprocessor/PpTokens.cpp
+		${GLSLANG_ABS_PATH}/glslang/GenericCodeGen/CodeGen.cpp
+		${GLSLANG_ABS_PATH}/glslang/GenericCodeGen/Link.cpp
+		${GLSLANG_ABS_PATH}/OGLCompilersDLL/InitializeDll.cpp
+
+		${GLSLANG_ABS_PATH}/SPIRV/GlslangToSpv.cpp
+		${GLSLANG_ABS_PATH}/SPIRV/InReadableOrder.cpp
+		${GLSLANG_ABS_PATH}/SPIRV/SpvBuilder.cpp
+		${GLSLANG_ABS_PATH}/SPIRV/SPVRemapper.cpp
+		${GLSLANG_ABS_PATH}/SPIRV/doc.cpp
+		${GLSLANG_ABS_PATH}/SPIRV/disassemble.cpp
+
+		${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp
+		${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp.h
+
+		osinclude.cpp
+		)
+
+	set(CMAKE_C_FLAGS	${DE_3RD_PARTY_C_FLAGS})
+	set(CMAKE_CXX_FLAGS	${DE_3RD_PARTY_CXX_FLAGS})
+
+	if (DE_COMPILER_IS_GCC OR DE_COMPILER_IS_CLANG)
+		set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
+	endif ()
+
+	add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp ${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp.h
+					   COMMAND ${BISON_EXECUTABLE} --defines=${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp.h -t MachineIndependent/glslang.y -o ${CMAKE_CURRENT_BINARY_DIR}/glslang_tab.cpp
+					   MAIN_DEPENDENCY ${GLSLANG_ABS_PATH}/glslang/MachineIndependent/glslang.y
+					   WORKING_DIRECTORY ${GLSLANG_ABS_PATH}/glslang)
+
+	add_library(glslang STATIC ${GLSLANG_SRCS})
+	target_link_libraries(glslang dethread ${ZLIB_LIBRARY})
+
+	set(GLSLANG_INCLUDE_PATH	${GLSLANG_ABS_PATH}	PARENT_SCOPE)
+	set(GLSLANG_LIBRARY			glslang				PARENT_SCOPE)
+	set(DEQP_HAVE_GLSLANG		ON					PARENT_SCOPE)
+
+else ()
+	message(STATUS "glslang not found; GLSL to SPIR-V compilation not available")
+
+	set(DEQP_HAVE_GLSLANG		OFF					PARENT_SCOPE)
+
+endif ()
diff --git a/external/glslang/osinclude.cpp b/external/glslang/osinclude.cpp
new file mode 100644
index 0000000..7ce67f6
--- /dev/null
+++ b/external/glslang/osinclude.cpp
@@ -0,0 +1,107 @@
+/*-------------------------------------------------------------------------
+ * dEQP glslang integration
+ * ------------------------
+ *
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief glslang OS interface.
+ *//*--------------------------------------------------------------------*/
+
+#include "osinclude.h"
+
+#include "deThread.h"
+#include "deThreadLocal.h"
+
+namespace glslang
+{
+
+DE_STATIC_ASSERT(sizeof(deThreadLocal)	== sizeof(OS_TLSIndex));
+DE_STATIC_ASSERT(sizeof(deThread)		== sizeof(void*));
+
+// Thread-local
+
+OS_TLSIndex OS_AllocTLSIndex (void)
+{
+	return (OS_TLSIndex)deThreadLocal_create();
+}
+
+bool OS_SetTLSValue (OS_TLSIndex nIndex, void* lpvValue)
+{
+	deThreadLocal_set((deThreadLocal)nIndex, lpvValue);
+	return true;
+}
+
+bool OS_FreeTLSIndex (OS_TLSIndex nIndex)
+{
+	deThreadLocal_destroy((deThreadLocal)nIndex);
+	return true;
+}
+
+void* OS_GetTLSValue (OS_TLSIndex nIndex)
+{
+	return deThreadLocal_get((deThreadLocal)nIndex);
+}
+
+// Global lock - not used
+
+void InitGlobalLock (void)
+{
+}
+
+void GetGlobalLock (void)
+{
+}
+
+void ReleaseGlobalLock (void)
+{
+}
+
+// Threading
+
+DE_STATIC_ASSERT(sizeof(void*) >= sizeof(deThread));
+
+static void EnterGenericThread (void* entry)
+{
+	((TThreadEntrypoint)entry)(DE_NULL);
+}
+
+void* OS_CreateThread (TThreadEntrypoint entry)
+{
+	return (void*)(deUintptr)deThread_create(EnterGenericThread, (void*)entry, DE_NULL);
+}
+
+void OS_WaitForAllThreads (void* threads, int numThreads)
+{
+	for (int ndx = 0; ndx < numThreads; ndx++)
+	{
+		const deThread thread = (deThread)(deUintptr)((void**)threads)[ndx];
+		deThread_join(thread);
+		deThread_destroy(thread);
+	}
+}
+
+void OS_Sleep (int milliseconds)
+{
+	deSleep(milliseconds);
+}
+
+void OS_DumpMemoryCounters (void)
+{
+	// Not used
+}
+
+} // glslang
diff --git a/external/spirv-tools/.gitignore b/external/spirv-tools/.gitignore
new file mode 100644
index 0000000..85de9cf
--- /dev/null
+++ b/external/spirv-tools/.gitignore
@@ -0,0 +1 @@
+src
diff --git a/external/spirv-tools/CMakeLists.txt b/external/spirv-tools/CMakeLists.txt
new file mode 100644
index 0000000..c6166ec
--- /dev/null
+++ b/external/spirv-tools/CMakeLists.txt
@@ -0,0 +1,31 @@
+# cmake file for spirv-tools
+
+if (NOT DE_DEFS)
+	message(FATAL_ERROR "Include Defs.cmake")
+endif ()
+
+if (EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/src/include/libspirv/libspirv.h")
+	set(DEFAULT_SPIRV_TOOLS_SRC_PATH ${CMAKE_CURRENT_SOURCE_DIR}/src)
+else ()
+	set(DEFAULT_SPIRV_TOOLS_SRC_PATH "../spirv-tools")
+endif ()
+
+set(SPIRV_TOOLS_SRC_PATH ${DEFAULT_SPIRV_TOOLS_SRC_PATH} CACHE STRING "Path to spirv-tools source tree")
+if (IS_ABSOLUTE ${SPIRV_TOOLS_SRC_PATH})
+	set(SPIRV_TOOLS_ABS_PATH ${SPIRV_TOOLS_SRC_PATH})
+else ()
+	set(SPIRV_TOOLS_ABS_PATH "${CMAKE_SOURCE_DIR}/${SPIRV_TOOLS_SRC_PATH}")
+endif ()
+
+if (EXISTS ${SPIRV_TOOLS_ABS_PATH}/source/opcode.cpp)
+	message(STATUS "spirv-tools found; building with DEQP_HAVE_SPIRV_TOOLS")
+	set(CMAKE_C_FLAGS ${DE_3RD_PARTY_C_FLAGS})
+	set(CMAKE_CXX_FLAGS ${DE_3RD_PARTY_CXX_FLAGS})
+
+	set(DEQP_HAVE_SPIRV_TOOLS		ON					PARENT_SCOPE)
+	set(SPIRV_SKIP_EXECUTABLES		ON CACHE BOOL "" FORCE)
+	add_subdirectory(${SPIRV_TOOLS_ABS_PATH} spirv-tools)
+else ()
+	message(STATUS "spirv-tools not found; SPIR-V assembly not available")
+	set(DEQP_HAVE_SPIRV_TOOLS		OFF					PARENT_SCOPE)
+endif ()
diff --git a/external/vulkancts/LICENSE b/external/vulkancts/LICENSE
new file mode 100644
index 0000000..9f586b7
--- /dev/null
+++ b/external/vulkancts/LICENSE
@@ -0,0 +1,25 @@
+
+Copyright (c) 2015 Google Inc.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and/or associated documentation files (the
+"Materials"), to deal in the Materials without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Materials, and to
+permit persons to whom the Materials are furnished to do so, subject to
+the following conditions:
+
+The above copyright notice(s) and this permission notice shall be
+included in all copies or substantial portions of the Materials.
+
+The Materials are Confidential Information as defined by the
+Khronos Membership Agreement until designated non-confidential by
+Khronos, at which point this condition clause shall be removed.
+
+THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
diff --git a/external/vulkancts/README.md b/external/vulkancts/README.md
new file mode 100644
index 0000000..65561e9
--- /dev/null
+++ b/external/vulkancts/README.md
@@ -0,0 +1,323 @@
+Vulkan CTS README
+=================
+
+This document describes how to build and run Vulkan Conformance Test suite.
+
+Vulkan CTS is built on dEQP framework. dEQP documentation is available
+at http://source.android.com/devices/graphics/testing.html
+
+
+Requirements
+------------
+
+### Common
+
+ * Git (for checking out sources)
+ * Python 2.7.x (all recent versions in 2.x should work, 3.x is not supported)
+ * CMake 2.8 or newer
+
+### Win32
+
+ * Visual Studio 2013 or newer (glslang uses several C++11 features)
+
+### Linux
+
+ * Standard toolchain (make, gcc/clang)
+
+### Android
+
+ * Android NDK r10e
+ * Android SDK with: SDK Tools, SDK Platform-tools, SDK Build-tools, and API 22
+ * Apache Ant
+ * Windows: either NMake or JOM in PATH
+
+
+Building CTS
+------------
+
+To build dEQP, you need first to download sources for zlib, libpng, glslang,
+and spirv-tools.
+
+To download sources, run:
+
+	python external/fetch_sources.py
+
+You may need to re-run `fetch_sources.py` to update to the latest glslang and
+spirv-tools revisions occasionally.
+
+NOTE: glslang integration is not yet available on Android due to a toolchain
+bug, so pre-compiled SPIR-V binaries must be used. See instructions below.
+
+With CMake out-of-source builds are always recommended. Create a build directory
+of your choosing, and in that directory generate Makefiles or IDE project
+using cmake.
+
+
+### Windows x86-32
+
+	cmake <path to vulkancts> -G"Visual Studio 12"
+	start dEQP-Core-default.sln
+
+### Windows x86-64
+
+	cmake <path to vulkancts> -G"Visual Studio 12 Win64"
+	start dEQP-Core-default.sln
+
+### Linux 32-bit Debug
+
+	cmake <path to vulkancts> -DCMAKE_BUILD_TYPE=Debug -DCMAKE_C_FLAGS=-m32 -DCMAKE_CXX_FLAGS=-m32
+	make -j
+
+Release build can be done by using -DCMAKE_BUILD_TYPE=Release
+
+### Linux 64-bit Debug
+
+	cmake <path to vulkancts> -DCMAKE_BUILD_TYPE=Debug -DCMAKE_C_FLAGS=-m64 -DCMAKE_CXX_FLAGS=-m64
+	make -j
+
+### Android
+
+Following command will build CTS into android/package/bin/dEQP-debug.apk.
+
+	python android/scripts/build.py
+
+The package can be installed by either running:
+
+	python android/scripts/install.py
+
+By default the CTS package will contain libdeqp.so built for armeabi-v7a, arm64-v8a,
+and x86 ABIs, but that can be changed in android/scripts/common.py script.
+
+To pick which ABI to use at install time, following commands must be used
+instead:
+
+	adb install --abi <ABI name> android/package/bin/dEQP-debug.apk /data/local/tmp/dEQP-debug.apk
+
+
+Building Mustpass
+-----------------
+
+Current mustpass is checked into repository and can be found at:
+
+	external/vulkancts/mustpass/1.0.0/vk-default.txt
+
+Vulkan CTS mustpass can be re-generated by running:
+
+	python <vulkancts>/external/vulkancts/build_mustpass.py
+
+
+Pre-compiling SPIR-V binaries
+-----------------------------
+
+For distribution, and platforms that don't support GLSL to SPIR-V compilation,
+SPIR-V binaries must be pre-built with following command:
+
+	python external/vulkancts/build_spirv_binaries.py
+
+Binaries will be written to `external/vulkancts/data/vulkan/prebuilt/`.
+
+Test modules (or in case of Android, the APK) must be re-built after building
+SPIR-V programs in order for the binaries to be available.
+
+
+Running CTS
+-----------
+
+Following command line options MUST be used when running CTS:
+
+	--deqp-caselist-file=<vulkancts>/external/vulkancts/mustpass/1.0.0/vk-default.txt
+	--deqp-log-images=disable
+	--deqp-log-shader-sources=disable
+
+In addition on multi-device systems the device for which conformance is claimed
+can be selected with:
+
+	--deqp-vk-device-id=<value>
+
+No other command line options are allowed.
+
+### Win32
+
+	cd <builddir>/external/vulkancts/modules/vulkan
+	Debug/deqp-vk.exe --deqp-caselist-file=...
+
+Test log will be written into TestResults.qpa
+
+### Linux
+
+	cd <builddir>/external/vulkancts/modules/vulkan
+	./deqp-vk --deqp-vk-caselist-file=...
+
+### Android
+
+	adb push <vulkancts>/external/vulkancts/mustpass/1.0.0/vk-default.txt /sdcard/vk-default.txt
+	adb shell
+
+In device shell:
+
+	am start -n com.drawelements.deqp/android.app.NativeActivity -e cmdLine "deqp --deqp-caselist-file=/sdcard/vk-default.txt --deqp-log-images=disable --deqp-log-shader-sources=disable --deqp-log-filename=/sdcard/TestResults.qpa"
+
+Test progress will be written to device log and can be displayed with:
+
+	adb logcat -s dEQP
+
+Test log will be written into `/sdcard/TestResults.qpa`.
+
+
+Conformance Submission Package Requirements
+-------------------------------------------
+
+The conformance submission package must contain the following:
+
+1. Full test logs (`TestResults.qpa`) from CTS runs against all driver builds
+2. Result of `git status` and `git log` from CTS source directory
+3. Any patches used on top of release tag
+4. Conformance statement
+
+Test logs (1) should be named `<submission pkg dir>/TestResults-<driver build type>.qpa`,
+for example `TestResults-armeabi-v7a.qpa`. On platforms where multiple different driver
+builds (for example 64-bit and 32-bit) are present, CTS logs must be provided
+for each driver build as part of the submission package.
+
+The CTS build must always be done from clean git repository that doesn't have any
+uncommitted changes. Thus it is necessary to run and capture output of `git
+status` and `git log` (2) in the source directory:
+
+	git status > <submission pkg dir>/git-status.txt
+	git log <release tag>..HEAD > <submission pkg dir>/git-log.txt
+
+Any changes made to CTS must be committed to the local repository, and provided
+as part of the submission package (3). This can be done by running:
+
+	git format-patch -o <submission pkg dir> <release tag>..HEAD
+
+In general, bugfixes and changes to platform-specific code (mostly under
+`framework/platform`) are allowed. The commit message for each change must
+include a clear description of the change and why it is necessary. Non-porting
+related changes must be accompanied by a waiver (see below).
+
+NOTE: When cherry-picking patches on top of release tag, please use `git cherry-pick -x`
+to include original commit hash in the commit message.
+
+Conformance statement (4) must be included in a file called `STATEMENT-<adopter>`
+and must contain following:
+
+	CONFORM_VERSION:         <git tag of CTS release>
+	PRODUCT:                 <string-value>
+	CPU:                     <string-value>
+	OS:                      <string-value>
+
+Note that product/cpu/os information is also captured in `dEQP-VK.info.*` tests
+if `vk::Platform::describePlatform()` is implemented.
+
+If the submission package covers multiple products, you can list them by appending
+additional `PRODUCT:` lines to the conformance statement. For example:
+
+	CONFORM_VERSION:         vulkan-cts-1.0.0-internal
+	PRODUCT:                 Product A
+	PRODUCT:                 Product B
+	...
+
+The actual submission package consists of the above set of files which must
+be bundled into a gzipped tar file named `VK10_<adopter><_info>.tgz`. `<adopter>`
+is the name of the Adopting member company, or some recognizable abbreviation.
+The `<_info>` field is optional. It may be used to uniquely identify a submission
+by OS, platform, date, or other criteria when making multiple submissions.
+
+One way to create a suiteable gzipped tar file is to execute the command:
+
+	tar -cvzf <filename.tgz> -C <submission pkg dir> .
+
+where `<submission pkg dir>` is the directory containing the files from (1)-(4)
+from above. A submission package must contain all of the files listed above,
+and only those files.
+
+As an example submission package could contain:
+
+	STATEMENT-Khronos
+	git-log.txt
+	git-status.txt
+	0001-Remove-Waived-Filtering-Tests.patch
+	0002-Fix-Pipeline-Parameters.patch
+	TestResults-armeabi-v7a.qpa
+	TestResults-arm64-v8a.qpa
+
+
+Waivers
+-------
+
+The process for requesting a waiver is to report the issue by filing a bug
+report in the Gitlab VulkanCTS project (TODO Github?). When creating the
+submission package, include references to the waiver in the commit message of
+the relevant change. Including as much information as possible in your bug
+report (including a unified diff or a merge request of suggested file changes)
+will ensure the issue can be progressed as rapidly as possible. Issues must
+be labeled "Waiver" (TODO!) and identify the version of the CTS and affected
+tests.
+
+Conformance Criteria
+--------------------
+
+Conformance run is considered passing if all tests finish with allowed result
+codes. Test results are contained in the TestResults.qpa log. Each
+test case section contains XML tag Result, for example:
+
+	<Result StatusCode="Pass">Not validated</Result>
+
+The result code is the value of the StatusCode attribute. Following status
+codes are allowed:
+
+	Pass
+	NotSupported
+	QualityWarning
+	CompatibilityWarning
+
+Submission package can be verified using `external/vulkancts/verify_submission.py`
+script. The script takes two arguments: path to extracted submission package
+and path to current mustpass list. For example:
+
+	python external/vulkancts/verify_submission.py VK_10_Khronos_1/ external/vulkancts/mustpass/1.0.0/vk-default.txt
+
+
+Vulkan platform port
+--------------------
+
+Vulkan support from Platform implementation requires providing
+`getVulkanPlatform()` method in `tcu::Platform` class implementation.
+
+See `framework/common/tcuPlatform.hpp` and examples in
+`framework/platform/win32/tcuWin32Platform.cpp` and
+`framework/platform/android/tcuAndroidPlatform.cpp`.
+
+
+Null (dummy) driver
+-------------------
+
+For testing and development purposes it might be useful to be able to run
+tests on dummy Vulkan implementation. One such implementation is provided in
+vkNullDriver.cpp. To use that, implement `vk::Platform::createLibrary()` with
+`vk::createNullDriver()`.
+
+
+Cherry GUI
+----------
+
+Vulkan test module can be used with Cherry (GUI for test execution and
+analysis). Cherry is available at
+https://android.googlesource.com/platform/external/cherry. Please follow
+instructions in README to get started.
+
+To enable support for Vulkan tests, dEQP-VK module must be added to list of
+test packages.
+
+In `cherry/testrunner.go`, add following line to `testPackageDescriptors` list
+(line 645 in `NewTestRunner` function):
+
+	{"dEQP-VK", "deqp-vk", "../external/vulkancts/modules/vulkan", dataDir + "dEQP-VK-cases.xml"},
+
+Before first launch, and every time test hierarchy has been modified, test
+case list must be refreshed by running:
+
+	python scripts/build_caselists.py path/to/cherry/data
+
+Cherry must be restarted for the case list update to take effect.
diff --git a/external/vulkancts/build_mustpass.py b/external/vulkancts/build_mustpass.py
new file mode 100644
index 0000000..c669904
--- /dev/null
+++ b/external/vulkancts/build_mustpass.py
@@ -0,0 +1,89 @@
+# -*- coding: utf-8 -*-
+
+#-------------------------------------------------------------------------
+# Vulkan CTS
+# ----------
+#
+# Copyright (c) 2016 Google Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and/or associated documentation files (the
+# "Materials"), to deal in the Materials without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Materials, and to
+# permit persons to whom the Materials are furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice(s) and this permission notice shall be
+# included in all copies or substantial portions of the Materials.
+#
+# The Materials are Confidential Information as defined by the
+# Khronos Membership Agreement until designated non-confidential by
+# Khronos, at which point this condition clause shall be removed.
+#
+# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+#
+#-------------------------------------------------------------------------
+
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts"))
+
+from build.common import DEQP_DIR
+from build.config import ANY_GENERATOR
+from build_caselists import Module, getModuleByName, getBuildConfig, DEFAULT_BUILD_DIR, DEFAULT_TARGET
+from mustpass import Project, Package, Mustpass, Configuration, include, exclude, genMustpassLists
+
+COPYRIGHT_DECLARATION = """
+     Permission is hereby granted, free of charge, to any person obtaining a
+     copy of this software and/or associated documentation files (the
+     "Materials"), to deal in the Materials without restriction, including
+     without limitation the rights to use, copy, modify, merge, publish,
+     distribute, sublicense, and/or sell copies of the Materials, and to
+     permit persons to whom the Materials are furnished to do so, subject to
+     the following conditions:
+
+     The above copyright notice(s) and this permission notice shall be
+     included in all copies or substantial portions of the Materials.
+
+     The Materials are Confidential Information as defined by the
+     Khronos Membership Agreement until designated non-confidential by
+     Khronos, at which point this condition clause shall be removed.
+
+     THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+     EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+     MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+     IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+     CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+     TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+     MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+     """
+
+MUSTPASS_PATH		= os.path.join(DEQP_DIR, "external", "vulkancts", "mustpass")
+PROJECT				= Project(path = MUSTPASS_PATH, copyright = COPYRIGHT_DECLARATION)
+VULKAN_MODULE		= getModuleByName("dEQP-VK")
+BUILD_CONFIG		= getBuildConfig(DEFAULT_BUILD_DIR, DEFAULT_TARGET, "Debug")
+
+# 1.0.0
+
+VULKAN_1_0_0_PKG	= Package(module = VULKAN_MODULE, configurations = [
+		# Master
+		Configuration(name		= "default",
+					  filters	= [include("master.txt"),
+					  			   exclude("test-issues.txt"),
+					  			   exclude("excluded-tests.txt")]),
+	])
+
+MUSTPASS_LISTS		= [
+		Mustpass(project = PROJECT, version = "1.0.0",		packages = [VULKAN_1_0_0_PKG])
+	]
+
+if __name__ == "__main__":
+	genMustpassLists(MUSTPASS_LISTS, ANY_GENERATOR, BUILD_CONFIG)
diff --git a/external/vulkancts/build_spirv_binaries.py b/external/vulkancts/build_spirv_binaries.py
new file mode 100644
index 0000000..8927f79
--- /dev/null
+++ b/external/vulkancts/build_spirv_binaries.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+#-------------------------------------------------------------------------
+# Vulkan CTS
+# ----------
+#
+# Copyright (c) 2015 Google Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and/or associated documentation files (the
+# "Materials"), to deal in the Materials without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Materials, and to
+# permit persons to whom the Materials are furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice(s) and this permission notice shall be
+# included in all copies or substantial portions of the Materials.
+#
+# The Materials are Confidential Information as defined by the
+# Khronos Membership Agreement until designated non-confidential by
+# Khronos, at which point this condition clause shall be removed.
+#
+# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+#
+#-------------------------------------------------------------------------
+
+import os
+import sys
+import string
+import argparse
+import tempfile
+import shutil
+import fnmatch
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts"))
+
+from build.common import *
+from build.config import *
+from build.build import *
+
+class Module:
+	def __init__ (self, name, dirName, binName):
+		self.name		= name
+		self.dirName	= dirName
+		self.binName	= binName
+
+VULKAN_MODULE		= Module("dEQP-VK", "../external/vulkancts/modules/vulkan", "deqp-vk")
+DEFAULT_BUILD_DIR	= os.path.join(tempfile.gettempdir(), "spirv-binaries", "{targetName}-{buildType}")
+DEFAULT_TARGET		= "null"
+DEFAULT_DST_DIR		= os.path.join(DEQP_DIR, "external", "vulkancts", "data", "vulkan", "prebuilt")
+
+def getBuildConfig (buildPathPtrn, targetName, buildType):
+	buildPath = buildPathPtrn.format(
+		targetName	= targetName,
+		buildType	= buildType)
+
+	return BuildConfig(buildPath, buildType, ["-DDEQP_TARGET=%s" % targetName])
+
+def cleanDstDir (dstPath):
+	binFiles = [f for f in os.listdir(dstPath) if os.path.isfile(os.path.join(dstPath, f)) and fnmatch.fnmatch(f, "*.spirv")]
+
+	for binFile in binFiles:
+		print "Removing %s" % os.path.join(dstPath, binFile)
+		os.remove(os.path.join(dstPath, binFile))
+
+def execBuildPrograms (buildCfg, generator, module, mode, dstPath):
+	workDir = os.path.join(buildCfg.getBuildDir(), "modules", module.dirName)
+
+	pushWorkingDir(workDir)
+
+	try:
+		binPath = generator.getBinaryPath(buildCfg.getBuildType(), os.path.join(".", "vk-build-programs"))
+		execute([binPath, "--mode", mode, "--dst-path", dstPath])
+	finally:
+		popWorkingDir()
+
+def parseArgs ():
+	parser = argparse.ArgumentParser(description = "Build SPIR-V programs",
+									 formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+	parser.add_argument("-b",
+						"--build-dir",
+						dest="buildDir",
+						default=DEFAULT_BUILD_DIR,
+						help="Temporary build directory")
+	parser.add_argument("-t",
+						"--build-type",
+						dest="buildType",
+						default="Debug",
+						help="Build type")
+	parser.add_argument("-c",
+						"--deqp-target",
+						dest="targetName",
+						default=DEFAULT_TARGET,
+						help="dEQP build target")
+	parser.add_argument("--mode",
+						dest="mode",
+						default="build",
+						help="Build mode (build or verify)")
+	parser.add_argument("-d",
+						"--dst-path",
+						dest="dstPath",
+						default=DEFAULT_DST_DIR,
+						help="Destination path")
+	return parser.parse_args()
+
+if __name__ == "__main__":
+	args = parseArgs()
+
+	generator	= ANY_GENERATOR
+	buildCfg	= getBuildConfig(args.buildDir, args.targetName, args.buildType)
+	module		= VULKAN_MODULE
+
+	build(buildCfg, generator, ["vk-build-programs"])
+
+	if args.mode == "build":
+		if os.path.exists(args.dstPath):
+			cleanDstDir(args.dstPath)
+		else:
+			os.makedirs(args.dstPath)
+
+	execBuildPrograms(buildCfg, generator, module, args.mode, args.dstPath)
diff --git a/external/vulkancts/data/vulkan/.gitignore b/external/vulkancts/data/vulkan/.gitignore
new file mode 100644
index 0000000..21ba210
--- /dev/null
+++ b/external/vulkancts/data/vulkan/.gitignore
@@ -0,0 +1 @@
+prebuilt
diff --git a/external/vulkancts/data/vulkan/data/brick.png b/external/vulkancts/data/vulkan/data/brick.png
new file mode 100644
index 0000000..2dbc654
--- /dev/null
+++ b/external/vulkancts/data/vulkan/data/brick.png
Binary files differ
diff --git a/external/vulkancts/data/vulkan/draw/VertexFetch.frag b/external/vulkancts/data/vulkan/draw/VertexFetch.frag
new file mode 100644
index 0000000..6506a3e
--- /dev/null
+++ b/external/vulkancts/data/vulkan/draw/VertexFetch.frag
@@ -0,0 +1,7 @@
+#version 430
+layout(location = 0) in vec4 in_color;
+layout(location = 0) out vec4 out_color;
+void main()
+{
+  out_color = in_color;
+}
\ No newline at end of file
diff --git a/external/vulkancts/data/vulkan/draw/VertexFetch.vert b/external/vulkancts/data/vulkan/draw/VertexFetch.vert
new file mode 100644
index 0000000..1730f5f
--- /dev/null
+++ b/external/vulkancts/data/vulkan/draw/VertexFetch.vert
@@ -0,0 +1,11 @@
+#version 430
+
+layout(location = 0) in vec4 in_position;
+layout(location = 1) in vec4 in_color;
+
+layout(location = 0) out vec4 out_color;
+
+void main() {
+	gl_Position = in_position;
+	out_color = in_color;
+}
\ No newline at end of file
diff --git a/external/vulkancts/data/vulkan/draw/VertexFetchWithInstance.vert b/external/vulkancts/data/vulkan/draw/VertexFetchWithInstance.vert
new file mode 100644
index 0000000..2e76fe9
--- /dev/null
+++ b/external/vulkancts/data/vulkan/draw/VertexFetchWithInstance.vert
@@ -0,0 +1,14 @@
+#version 430
+
+layout(location = 0) in vec4 in_position;
+layout(location = 1) in vec4 in_color;
+
+layout(location = 0) out vec4 out_color;
+
+void main() {
+	vec2 perVertex = vec2(in_position.x, in_position.y);
+	vec2 perInstance[6]	= vec2[6](vec2(0.7, -0.7), vec2(-0.75, 0.8), vec2(0.0, 0.0), vec2(0.3, 0.0), vec2(0.0, -0.3),vec2(0.3, -0.3) );
+
+	gl_Position = vec4(perVertex + perInstance[gl_InstanceIndex], 0.0, 1.0);
+	out_color = in_color;
+}
\ No newline at end of file
diff --git a/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.frag b/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.frag
new file mode 100644
index 0000000..cf2b27c
--- /dev/null
+++ b/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.frag
@@ -0,0 +1,10 @@
+#version 310 es
+precision highp float;
+
+layout(location = 0) in vec4 in_color;
+layout(location = 0) out vec4 out_color;
+
+void main()
+{
+  out_color = in_color;
+}
diff --git a/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.vert b/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.vert
new file mode 100644
index 0000000..2b97bd9
--- /dev/null
+++ b/external/vulkancts/data/vulkan/dynamic_state/VertexFetch.vert
@@ -0,0 +1,12 @@
+#version 310 es
+precision highp float;
+
+layout(location = 0) in vec4 in_position;
+layout(location = 1) in vec4 in_color;
+
+layout(location = 0) out vec4 out_color;
+
+void main() {
+	gl_Position = in_position;
+	out_color = in_color;
+}
diff --git a/external/vulkancts/data/vulkan/dynamic_state/ViewportArray.geom b/external/vulkancts/data/vulkan/dynamic_state/ViewportArray.geom
new file mode 100644
index 0000000..ebf6821
--- /dev/null
+++ b/external/vulkancts/data/vulkan/dynamic_state/ViewportArray.geom
@@ -0,0 +1,16 @@
+#version 430
+layout(triangles) in;
+layout(triangle_strip, max_vertices = 3) out;
+
+layout(location = 0) in vec4 in_color[];
+layout(location = 0) out vec4 out_color;
+
+void main() {
+	for (int i=0; i<gl_in.length(); ++i) {
+		gl_Position = gl_in[i].gl_Position;
+		gl_ViewportIndex = int(round(gl_in[i].gl_Position.z * 3.0));
+		out_color = in_color[i];
+		EmitVertex();
+	}
+	EndPrimitive();
+}
diff --git a/external/vulkancts/data/vulkan/glsl/es310/arrays.test b/external/vulkancts/data/vulkan/glsl/es310/arrays.test
new file mode 100644
index 0000000..6c1d414
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/arrays.test
@@ -0,0 +1,1798 @@
+#X1. Type:					float[5]								// An array type with 5 elements
+#X2. Return value:			float[5] func() { ... }					// Function with a 5-element array return value
+#X3. Array constructor:		float[3] (1.0, 2.0, 5.5)				// 3-element array with given elements
+#																	// Fails with array of matrices!
+#X4. As unnamed parameter:	void func(float[5]);
+#X5. Variable declaration:	float[5] a;								// Equivalent to float a[5]; (?)
+#X6. Empty brackets:		float x[] = float[] (1.0, 2.0, 3.0);	// Size of x is 3
+#							float y[] = float[3] (1.0, 2.0, 3.0);	// Size of y is 3 (equivalent)
+#							float z[] = y;							// Size of z is 3
+#X7. Testing that 2-dimensional arrays don't work:	float a[5][3];	// Illegal
+#													float[5] a[3];	// Illegal
+#X8. Testing that array declaration with dynamic variables as array size won't work.
+#X9. Testing length() operator:	z.length();							// Returns 3 for z defined before
+#X10. Test C/C++ style {}-constructor
+#X11. Test struct arrays
+#X12. Test array element access at initialization with const/dynamic values
+
+group constructor "Array constructors"
+
+	case float3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(7.4, -1.0, 2.0) | vec3(3.0, 1.6, -2.0) ];
+			output vec3 out0 = [ vec3(2.0, 0.5, 1.0) | vec3(2.0, 7.4, -1.0) | vec3(-2.0, 3.0, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				float[3] x;
+				x = float[3] (in0.z, in0.x, in0.y);
+				out0 = vec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.5, 1.0, 2.0, 0.2) | vec4(7.4, -1.0, 2.0, -1.3) | vec4(3.0, 1.6, -2.0, 0.5) ];
+			output vec4 out0 = [ vec4(2.0, 0.5, 0.2, 1.0) | vec4(2.0, 7.4, -1.3, -1.0) | vec4(-2.0, 3.0, 0.5, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				float[4] x;
+				x = float[4] (in0.z, in0.x, in0.w, in0.y);
+				out0 = vec4(x[0], x[1], x[2], x[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 1, 2) | ivec3(7, -1, 2) | ivec3(3, 1, -2) ];
+			output ivec3 out0 = [ ivec3(2, 0, 1) | ivec3(2, 7, -1) | ivec3(-2, 3, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				int[3] x;
+				x = int[3] (in0.z, in0.x, in0.y);
+				out0 = ivec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 1, 2, 0) | ivec4(7, -1, 2, -1) | ivec4(3, 1, -2, 0) ];
+			output ivec4 out0 = [ ivec4(2, 0, 0, 1) | ivec4(2, 7, -1, -1) | ivec4(-2, 3, 0, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				int[4] x;
+				x = int[4] (in0.z, in0.x, in0.w, in0.y);
+				out0 = ivec4(x[0], x[1], x[2], x[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, true, false) ];
+			output bvec3 out0 = [ bvec3(false, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				bool[3] x;
+				x = bool[3] (in0.z, in0.x, in0.y);
+				out0 = bvec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, true, false, false) ];
+			output bvec4 out0 = [ bvec4(false, true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				bool[4] x;
+				x = bool[4] (in0.z, in0.x, in0.y, in0.w);
+				out0 = bvec4(x[0], x[1], x[2], x[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(2.0, -0.5, -1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+
+
+			void main()
+			{
+				${SETUP}
+
+				struct test
+				{
+					float f;
+					vec3 v;
+				};
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+
+				test[3] x = test[3] (a, b, c);
+
+				out0 = vec3(x[0].f, x[1].v.y, x[2].v.x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.5, 1.0, 2.0, 1.5) ];
+			output vec4 out0 = [ vec4(2.0, -0.5, -1.0, -1.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+
+			void main()
+			{
+				${SETUP}
+
+
+				struct test
+				{
+					float f;
+					vec3 v;
+				};
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+				test d = test(-in0.w, vec3(-in0.w, -in0.x, -in0.z));
+
+				test[4] x = test[4] (a, b, c, d);
+
+				out0 = vec4(x[0].f, x[1].v.y, x[2].v.x, x[3].v.x);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case float_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(7.4, -1.0, 2.0) | vec3(3.0, 1.6, -2.0) ];
+			output vec3 out0 = [ vec3(0.5, -2.0, 1.0) | vec3(7.4, -2.0, -1.0) | vec3(3.0, 2.0, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+
+				vec3[3] x;
+				x = vec3[3] (	vec3(in0.x, in0.y, in0.z)	,
+								vec3(-in0.y, -in0.z, -in0.x),
+								vec3(in0.z, in0.x, in0.y)	);
+				out0 = vec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_vec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(5, 1, 2) | ivec3(7, -1, 2) | ivec3(3, 1, -2) ];
+			output ivec3 out0 = [ ivec3(5, -2, 1) | ivec3(7, -2, -1) | ivec3(3, 2, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+
+				ivec3[3] x;
+				x = ivec3[3] (	ivec3(in0.x, in0.y, in0.z)	,
+								ivec3(-in0.y, -in0.z, -in0.x),
+								ivec3(in0.z, in0.x, in0.y)	);
+				out0 = ivec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_vec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) ];
+			output bvec3 out0 = [ bvec3(true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+
+				bvec3[3] x;
+				x = bvec3[3] (	bvec3(in0.x, in0.y, in0.z)	,
+								bvec3(in0.y, in0.z, in0.x),
+								bvec3(in0.z, in0.x, in0.y)	);
+				out0 = bvec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_mat3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(-1.5, 0.0, -2.3) ];
+			output vec3 out0 = [ vec3(0.5, -1.0, 1.0) | vec3(-1.5, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a = mat3[3] (	mat3(	in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z)	,
+										mat3(	in0.z, in0.x, -in0.y,
+												in0.z, in0.x, -in0.y,
+												in0.z, in0.x, -in0.y)	,
+										mat3(	-in0.z, -in0.z, in0.z,
+												-in0.y, -in0.y, in0.y,
+												-in0.x, -in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = vec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_mat3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 1, 2) | ivec3(-1, 0, -2) ];
+			output ivec3 out0 = [ ivec3(0, -1, 1) | ivec3(-1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a = mat3[3] (	mat3(	in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z)	,
+										mat3(	in0.z, in0.x, -in0.y,
+												in0.z, in0.x, -in0.y,
+												in0.z, in0.x, -in0.y)	,
+										mat3(	-in0.z, -in0.z, in0.z,
+												-in0.y, -in0.y, in0.y,
+												-in0.x, -in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = ivec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_mat3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) ];
+			output bvec3 out0 = [ bvec3(true, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a = mat3[3] (	mat3(	in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z,
+												in0.x, in0.y, in0.z)	,
+										mat3(	in0.z, in0.x, in0.y,
+												in0.z, in0.x, in0.y,
+												in0.z, in0.x, in0.y)	,
+										mat3(	in0.z, in0.z, in0.z,
+												in0.y, in0.y, in0.y,
+												in0.x, in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = bvec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # type
+
+group return "Arrays as return value"
+
+	case float
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) | vec3(7.4, -1.0, 2.0) | vec3(3.0, 1.6, -2.0) ];
+			output vec3 out0 = [ vec3(2.0, -0.5, 1.0) | vec3(2.0, -7.4, -1.0) | vec3(-2.0, -3.0, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float[3] func(vec3 a)
+			{
+				return float[3] (a.z, -a.x, a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				float[3] x = func(in0);
+				out0 = vec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(4, 1, 2) | ivec3(7, -1, 2) | ivec3(3, 1, -2) ];
+			output ivec3 out0 =	[ ivec3(2, -4, 1) | ivec3(2, -7, -1) | ivec3(-2, -3, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			int[3] func(ivec3 a)
+			{
+				return int[3] (a.z, -a.x, a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				int[3] x = func(in0);
+				out0 = ivec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool
+		version 310 es
+		values
+		{
+			input bvec3 in0 =	[ bvec3(false, true, true) ];
+			output bvec3 out0 = [ bvec3(true, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool[3] func(bvec3 a)
+			{
+				return bool[3] (a.z, a.x, a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool[3] x = func(in0);
+				out0 = bvec3(x[0], x[1], x[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+
+	case float_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) | vec3(-0.5, 11.2, -1.0) ];
+			output vec3 out0 = [ vec3(1.0, 0.5, -2.0) | vec3(11.2, -0.5, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec3[3] func(vec3[3] a)
+			{
+				return vec3[3] (a[1], a[2], a[0]);
+			}
+
+			void main()
+			{
+				${SETUP}
+				vec3[3] x = vec3[3](vec3(in0.x, in0.y, -in0.z)	,
+									vec3(in0.y, -in0.z, in0.x)	,
+									vec3(-in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = vec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(-1.0, 2.0, 0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct test
+			{
+				float f;
+				vec3 v;
+			};
+
+			test[3] func(test[3] a)
+			{
+				return test[3] (a[1], a[2], a[0]);
+			}
+
+			void main()
+			{
+				${SETUP}
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+
+				test[3] t = test[3] (a, b, c);
+				test[3] x = func(t);
+
+				out0 = vec3(x[0].v.z, x[1].v.y, x[2].v.x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_vec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 =	[ ivec3(5, 1, 2) | ivec3(-5, 11, -1) ];
+			output ivec3 out0 = [ ivec3(1, 5, -2) | ivec3(11, -5, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			ivec3[3] func(ivec3[3] a)
+			{
+				return ivec3[3] (a[1], a[2], a[0]);
+			}
+
+			void main()
+			{
+				${SETUP}
+				ivec3[3] x = ivec3[3](	ivec3(in0.x, in0.y, -in0.z)	,
+										ivec3(in0.y, -in0.z, in0.x)	,
+										ivec3(-in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = ivec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_vec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 =	[ bvec3(true, false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			bvec3[3] func(bvec3[3] a)
+			{
+				return bvec3[3] (a[1], a[2], a[0]);
+			}
+
+			void main()
+			{
+				${SETUP}
+				bvec3[3] x = bvec3[3](	bvec3(in0.x, in0.y, in0.z)	,
+										bvec3(in0.y, in0.z, in0.x)	,
+										bvec3(in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = bvec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_mat3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(-1.5, 0.0, -2.3) ];
+			output vec3 out0 = [ vec3(2.0, -1.0, 2.0) | vec3(-2.3, 0.0, -2.3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y);
+				a[2] = mat3(-in0.z, -in0.z, in0.z,
+							-in0.y, -in0.y, in0.y,
+							-in0.x, -in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = vec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_mat3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(5, 1, 2) | ivec3(-1, 0, -2) ];
+			output ivec3 out0 = [ ivec3(2, -1, 2) | ivec3(-2, 0, -2) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y);
+				a[2] = mat3(-in0.z, -in0.z, in0.z,
+							-in0.y, -in0.y, in0.y,
+							-in0.x, -in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = ivec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_mat3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) | bvec3(true, true, false) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, in0.y,
+							in0.z, in0.x, in0.y,
+							in0.z, in0.x, in0.y);
+				a[2] = mat3(in0.z, in0.z, in0.z,
+							in0.y, in0.y, in0.y,
+							in0.x, in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = bvec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # return
+
+group unnamed_parameter "Array type as unnamed parameter of a function prototype"
+
+	case float
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) | vec3(7.4, -1.0, 2.0) | vec3(3.0, 1.6, -2.0) ];
+			output vec3 out0 = [ vec3(2.0, 0.5, 1.0) | vec3(2.0, 7.4, -1.0) | vec3(-2.0, 3.0, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float[3] func(float[3]);
+
+			void main()
+			{
+				${SETUP}
+				float[3] a = float[3] (in0.x, in0.y, in0.z);
+				float[3] b = func(a);
+				out0 = vec3(b[0], b[1], b[2]);
+				${OUTPUT}
+			}
+
+			float[3] func(float[3] a)
+			{
+				return float[3] (a[2], a[0], a[1]);
+			}
+
+		""
+	end
+
+	case int
+		version 310 es
+		values
+		{
+			input ivec3 in0 =	[ ivec3(0, 1, 2) | ivec3(7, -1, 2) | ivec3(3, 1, -2) ];
+			output ivec3 out0 = [ ivec3(2, 0, 1) | ivec3(2, 7, -1) | ivec3(-2, 3, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			int[3] func(int[3]);
+
+			void main()
+			{
+				${SETUP}
+				int[3] a = int[3] (in0.x, in0.y, in0.z);
+				int[3] b = func(a);
+				out0 = ivec3(b[0], b[1], b[2]);
+				${OUTPUT}
+			}
+
+			int[3] func(int[3] a)
+			{
+				return int[3] (a[2], a[0], a[1]);
+			}
+
+		""
+	end
+
+	case bool
+		version 310 es
+		values
+		{
+			input bvec3 in0 =	[ bvec3(false, true, true) ];
+			output bvec3 out0 = [ bvec3(true, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool[3] func(bool[3]);
+
+			void main()
+			{
+				${SETUP}
+				bool[3] a = bool[3] (in0.x, in0.y, in0.z);
+				bool[3] b = func(a);
+				out0 = bvec3(b[0], b[1], b[2]);
+				${OUTPUT}
+			}
+
+			bool[3] func(bool[3] a)
+			{
+				return bool[3] (a[2], a[0], a[1]);
+			}
+
+		""
+	end
+
+	case struct
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(-1.0, 2.0, 0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct test
+			{
+				float f;
+				vec3 v;
+			};
+
+			test[3] func(test[3]);
+
+			void main()
+			{
+				${SETUP}
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+
+				test[3] t = test[3] (a, b, c);
+				test[3] x = func(t);
+				out0 = vec3(x[0].v.z, x[1].v.y, x[2].v.x);
+				${OUTPUT}
+			}
+
+			test[3] func(test[3] a)
+			{
+				return test[3] (a[1], a[2], a[0]);
+			}
+
+		""
+	end
+
+	case float_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) | vec3(-0.5, 11.2, -1.0) ];
+			output vec3 out0 = [ vec3(1.0, 0.5, -2.0) | vec3(11.2, -0.5, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec3[3] func(vec3[3]);
+
+			void main()
+			{
+				${SETUP}
+				vec3[3] x = vec3[3](vec3(in0.x, in0.y, -in0.z)	,
+									vec3(in0.y, -in0.z, in0.x)	,
+									vec3(-in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = vec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+
+			vec3[3] func(vec3[3] a)
+			{
+				return vec3[3] (a[1], a[2], a[0]);
+			}
+		""
+	end
+
+	case int_vec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 =	[ ivec3(5, 1, 2) | ivec3(-5, 11, -1) ];
+			output ivec3 out0 = [ ivec3(1, 5, -2) | ivec3(11, -5, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			ivec3[3] func(ivec3[3]);
+
+			void main()
+			{
+				${SETUP}
+				ivec3[3] x = ivec3[3](	ivec3(in0.x, in0.y, -in0.z)	,
+										ivec3(in0.y, -in0.z, in0.x)	,
+										ivec3(-in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = ivec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+
+			ivec3[3] func(ivec3[3] a)
+			{
+				return ivec3[3] (a[1], a[2], a[0]);
+			}
+		""
+	end
+
+	case bool_vec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 =	[ bvec3(true, false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			bvec3[3] func(bvec3[3]);
+
+			void main()
+			{
+				${SETUP}
+				bvec3[3] x = bvec3[3](	bvec3(in0.x, in0.y, in0.z)	,
+										bvec3(in0.y, in0.z, in0.x)	,
+										bvec3(in0.z, in0.x, in0.y)	);
+				x = func(x);
+				out0 = bvec3(x[0].x, x[1].y, x[2].z);
+				${OUTPUT}
+			}
+
+			bvec3[3] func(bvec3[3] a)
+			{
+				return bvec3[3] (a[1], a[2], a[0]);
+			}
+
+		""
+	end
+
+	case float_mat3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(-1.5, 0.0, -2.3) ];
+			output vec3 out0 = [ vec3(2.0, -1.0, 2.0) | vec3(-2.3, 0.0, -2.3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3]);
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y);
+				a[2] = mat3(-in0.z, -in0.z, in0.z,
+							-in0.y, -in0.y, in0.y,
+							-in0.x, -in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = vec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+		""
+	end
+
+	case int_mat3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(5, 1, 2) | ivec3(-1, 0, -2) ];
+			output ivec3 out0 = [ ivec3(2, -1, 2) | ivec3(-2, 0, -2) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3]);
+
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y,
+							in0.z, in0.x, -in0.y);
+				a[2] = mat3(-in0.z, -in0.z, in0.z,
+							-in0.y, -in0.y, in0.y,
+							-in0.x, -in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = ivec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+		""
+	end
+
+	case bool_mat3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) | bvec3(true, true, false) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3[3] func(mat3[3]);
+			void main()
+			{
+				${SETUP}
+				mat3[3] a, b;
+				a[0] = mat3(in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z,
+							in0.x, in0.y, in0.z);
+				a[1] = mat3(in0.z, in0.x, in0.y,
+							in0.z, in0.x, in0.y,
+							in0.z, in0.x, in0.y);
+				a[2] = mat3(in0.z, in0.z, in0.z,
+							in0.y, in0.y, in0.y,
+							in0.x, in0.x, in0.x);
+
+				b = func(a);
+
+				mat3 b0 = b[0];
+				mat3 b1 = b[1];
+				mat3 b2 = b[2];
+
+				float ret0 = b0[0][0];
+				float ret1 = b1[1][1];
+				float ret2 = b2[2][2];
+
+				out0 = bvec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+
+			mat3[3] func(mat3[3] x)
+			{
+				mat3[3] r;
+				r[0] = x[1];
+				r[1] = x[2];
+				r[2] = x[0];
+				return r;
+			}
+		""
+	end
+
+end # unnamed_parameter
+
+group declaration "Declaring arrays"
+
+	case implicit_size_float
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(7.4, -1.0, 2.0) | vec3(3.0, 1.6, -2.0) ];
+			output vec3 out0 = [ vec3(2.0, 0.5, 1.0) | vec3(2.0, 7.4, -1.0) | vec3(-2.0, 3.0, 1.6) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				float[] x = float[] (in0.z, in0.x, in0.y);
+				float[] y = x;
+
+				out0 = vec3(y[0], y[1], y[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_int
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 1, 2) | ivec3(7, -1, 2) | ivec3(3, 1, -2) ];
+			output ivec3 out0 = [ ivec3(2, 0, 1) | ivec3(2, 7, -1) | ivec3(-2, 3, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				int[] x = int[] (in0.z, in0.x, in0.y);
+				int[] y = x;
+
+				out0 = ivec3(y[0], y[1], y[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_bool
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(false, true, true) ];
+			output bvec3 out0 = [ bvec3(true, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				bool[] x = bool[] (in0.z, in0.x, in0.y);
+				bool[] y = x;
+
+				out0 = bvec3(y[0], y[1], y[2]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_struct
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(-1.0, -0.5, 2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct test
+			{
+				float f;
+				vec3 v;
+			};
+
+			void main()
+			{
+				${SETUP}
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+
+				test[] x = test[] (c, b, a);
+				test[] y = x;
+
+				out0 = vec3(y[0].v.x, y[1].v.y, y[2].v.z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_float_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0 =	[ vec3(0.5, 1.0, 2.0) | vec3(-0.5, 11.2, -1.0) ];
+			output vec3 out0 = [ vec3(0.5, -2.0, 1.0) | vec3(-0.5, 1.0, 11.2) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				vec3[] x = vec3[] (	vec3(in0.x, in0.y, -in0.z)	,
+									vec3(in0.y, -in0.z, in0.x)	,
+									vec3(-in0.z, in0.x, in0.y)	);
+				vec3[] y = x;
+				out0 = vec3(y[0].x, y[1].y, y[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_int_ivec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 =	[ ivec3(0, 1, 2) | ivec3(5, 11, -1) ];
+			output ivec3 out0 = [ ivec3(0, -2, 1) | ivec3(5, 1, 11) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				ivec3[] x = ivec3[] (	ivec3(in0.x, in0.y, -in0.z)	,
+										ivec3(in0.y, -in0.z, in0.x)	,
+										ivec3(-in0.z, in0.x, in0.y)	);
+				ivec3[] y = x;
+				out0 = ivec3(y[0].x, y[1].y, y[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_bool_bvec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 =	[ bvec3(true, false, true) ];
+			output bvec3 out0 = [ bvec3(true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				bvec3[] x = bvec3[] (	bvec3(in0.x, in0.y, in0.z)	,
+										bvec3(in0.y, in0.z, in0.x)	,
+										bvec3(in0.z, in0.x, in0.y)	);
+				bvec3[] y = x;
+				out0 = bvec3(y[0].x, y[1].y, y[2].z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_float_mat3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) | vec3(-1.5, 0.0, -2.3) ];
+			output vec3 out0 = [ vec3(0.5, -1.0, 1.0) | vec3(-1.5, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[] a = mat3[] (	mat3(	in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z)	,
+									mat3(	in0.z, in0.x, -in0.y,
+											in0.z, in0.x, -in0.y,
+											in0.z, in0.x, -in0.y)	,
+									mat3(	-in0.z, -in0.z, in0.z,
+											-in0.y, -in0.y, in0.y,
+											-in0.x, -in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = vec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_int_mat3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 1, 2) | ivec3(-1, 0, -2) ];
+			output ivec3 out0 = [ ivec3(0, -1, 1) | ivec3(-1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[] a = mat3[] (	mat3(	in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z)	,
+									mat3(	in0.z, in0.x, -in0.y,
+											in0.z, in0.x, -in0.y,
+											in0.z, in0.x, -in0.y)	,
+									mat3(	-in0.z, -in0.z, in0.z,
+											-in0.y, -in0.y, in0.y,
+											-in0.x, -in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = ivec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case implicit_size_bool_mat3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) ];
+			output bvec3 out0 = [ bvec3(true, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				mat3[] a = mat3[] (	mat3(	in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z,
+											in0.x, in0.y, in0.z)	,
+									mat3(	in0.z, in0.x, in0.y,
+											in0.z, in0.x, in0.y,
+											in0.z, in0.x, in0.y)	,
+									mat3(	in0.z, in0.z, in0.z,
+											in0.y, in0.y, in0.y,
+											in0.x, in0.x, in0.x)	);
+
+				mat3 a0 = a[0];
+				mat3 a1 = a[1];
+				mat3 a2 = a[2];
+
+				float ret0 = a0[2][0];
+				float ret1 = a1[0][2];
+				float ret2 = a2[1][2];
+
+				out0 = bvec3(ret0, ret1, ret2);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case constant_expression_array_size
+		version 310 es
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			const int a = 4;
+
+			void main ()
+			{
+				const int b = 5;
+				float[a] array1;
+				float[b] array2;
+				float[array1.length()] array3;
+				float[a+b] array4;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case constant_expression_array_access
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(-2.0, -1.0, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			const int a = 3;
+
+			void main ()
+			{
+				${SETUP}
+				const int b = 2;
+				float x = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [a];
+				float y = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [b+2];
+				float z = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [5];
+				out0 = vec3(x, y, z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case dynamic_expression_array_access
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			input ivec2 in1 = ivec2(3, 2);
+			output vec3 out0 = [ vec3(-2.0, -1.0, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main ()
+			{
+				${SETUP}
+				float x = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [in1.x];
+				float y = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [in1.y+2];
+				float z = float[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x) [in1.x+in1.y];
+				out0 = vec3(x, y, z);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case multiple_declarations_single_statement_explicit
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output vec3 out0 = [ vec3(2.0, -1.0, 0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main ()
+			{
+				${SETUP}
+				float[] x = float[6] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x),
+						y = float[2] (in0.x, -in0.y);
+				out0 = vec3(x[2], y[1], x[0]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case multiple_declarations_single_statement_implicit
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(5, 1, 2) ];
+			output ivec3 out0 = [ ivec3(2, -1, 5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main ()
+			{
+				${SETUP}
+				int[] x = int[] (in0.x, in0.y, in0.z, -in0.z, -in0.y, -in0.x),
+					  y = int[] (in0.x, -in0.y);
+				out0 = ivec3(x[2], y[1], x[0]);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # declaration
+
+group length "Array length method"
+
+	case float
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output ivec3 out0 = [ ivec3(3, 5, 13) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				float[] x = float[3] (in0.z, in0.x, in0.y);
+				float[] y = float[] (in0.z, in0.x, in0.y, in0.x, in0.y);
+				float[13] z;
+
+				out0 = ivec3(x.length(), y.length(), z.length());
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 1, 2) ];
+			output ivec3 out0 = [ ivec3(3, 5, 13) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump int;
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				int[] x = int[3] (in0.z, in0.x, in0.y);
+				int[] y = int[] (in0.z, in0.x, in0.y, in0.x, in0.y);
+				int[13] z;
+
+				out0 = ivec3(x.length(), y.length(), z.length());
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, true) ];
+			output ivec3 out0 = [ ivec3(3, 5, 13) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				bool[] x = bool[3] (in0.z, in0.x, in0.y);
+				bool[] y = bool[] (in0.z, in0.x, in0.y, in0.x, in0.y);
+				bool[13] z;
+
+				out0 = ivec3(x.length(), y.length(), z.length());
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 1.0, 2.0) ];
+			output ivec3 out0 = [ ivec3(3, 5, 13) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct test
+			{
+				float f;
+				vec3 v;
+			};
+
+			void main()
+			{
+				${SETUP}
+
+				test a = test(in0.z, vec3(in0.x, in0.y, in0.z));
+				test b = test(in0.y, vec3(-in0.z, -in0.x, -in0.y));
+				test c = test(in0.x, vec3(-in0.y, in0.z, -in0.x));
+
+				test[] x = test[3] (a, b, c);
+				test[] y = test[] (c, a, b, b, a);
+				test[13] z;
+
+				out0 = ivec3(x.length(), y.length(), z.length());
+				${OUTPUT}
+			}
+		""
+	end
+
+end # length
diff --git a/external/vulkancts/data/vulkan/glsl/es310/conditionals.test b/external/vulkancts/data/vulkan/glsl/es310/conditionals.test
new file mode 100644
index 0000000..55cc523
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/conditionals.test
@@ -0,0 +1,223 @@
+group if "If Statements"
+
+	case single_statement
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				if (in0 >= 1.0)
+					out0 = 1.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case compound_statement
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+			output float out1 = [ 1.0 | 0.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				out1 = 1.0;
+				if (in0 >= 1.0)
+				{
+					out0 = 1.0;
+					out1 = 0.0;
+				}
+				${OUTPUT}
+			}
+		""
+	end
+
+	case sequence_statements
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+			output float out1 = [ 1.0 | 0.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				out1 = 1.0;
+				if (in0 >= 1.0)
+					out0 = 1.0, out1 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case sequence_condition
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+			output float out1 = [ 1.0 | 0.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				out1 = 1.0;
+				if (false, in0 >= 1.0)
+					out0 = 1.0, out1 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case complex_condition
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+			output float out1 = [ 1.0 | 0.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				out1 = 1.0;
+				if (false || (in0 >= 1.0) && (in0 - 2.0*in0 < 0.0))
+					out0 = 1.0, out1 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case if_else
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 1.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				if (in0 >= 1.0)
+					out0 = 1.0;
+				else
+					out0 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case if_elseif
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 2.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				out0 = 0.0;
+				if (in0 >= 2.0)
+					out0 = 2.0;
+				else if (in0 >= 1.0)
+					out0 = 1.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case if_elseif_else
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 2.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				if (in0 >= 2.0)
+					out0 = 2.0;
+				else if (in0 >= 1.0)
+					out0 = 1.0;
+				else
+					out0 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mixed_if_elseif_else
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 ];
+			output float out0 = [ 0.0 | 1.0 | 2.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				if (in0 >= 2.0)
+				{
+					out0 = 2.0;
+				}
+				else if (in0 >= 1.0)
+					out0 = 2.0, out0 = 1.0;
+				else
+					out0 = 0.0;
+				${OUTPUT}
+			}
+		""
+	end
+
+end # if
diff --git a/external/vulkancts/data/vulkan/glsl/es310/constant_expressions.test b/external/vulkancts/data/vulkan/glsl/es310/constant_expressions.test
new file mode 100644
index 0000000..046eb2a
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/constant_expressions.test
@@ -0,0 +1,483 @@
+group trivial "Trivial expressions"
+
+	case float
+		version 310 es
+		values { output float out0 = 5.0; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a = 5.0;
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int
+		version 310 es
+		values { output int out0 = 5; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int a = 5;
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool
+		version 310 es
+		values { output bool out0 = true; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const bool a = true;
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case cast
+		version 310 es
+		values { output float out0 = 1.0; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a = float(int(bool(true)));
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+end # trivial
+
+group operators "Operators"
+
+	case math_float
+		version 310 es
+		values { output float out0 = 2.19; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a = 6.0/3.5 + 1.8*2.6 - 4.2;
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case math_vec
+		version 310 es
+		values { output float out0 = 15.0; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const vec3 a = (vec4(1.0, 2.0, 3.0, 4.0).zyx * vec3(1.0, 1.5, 3.0).xyz).xzy + (vec2(5.0)/vec2(2.5)).xxy;
+				out0 = a.x + a.y + a.z;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case math_int
+		version 310 es
+		values { output int out0 = 7; }
+		both ""
+			#version 310 es
+			precision highp int;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int a = 25%7 + 2*3 - 9/3;
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case math_ivec
+		version 310 es
+		values { output int out0 = 21; }
+		both ""
+			#version 310 es
+			precision highp int;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const ivec3 a = ivec2(25%7, 4).xxy + ivec4(1*3, 9/3, 1+2, 8/4).xyz;
+				out0 = a.x + a.y + a.z;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case math_mat
+		version 310 es
+		values { output float out0 = 8.0; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const mat3 a = mat3(3.0) * mat3(4.0);
+				const mat4 b = mat4(a[1][1])*2.0;
+				const mat2 c = mat2(b[0][0]) / 3.0;
+				out0 = c[0][0]+c[1][0];
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bitwise
+		version 310 es
+		values { output int out0 = 678332; }
+		both ""
+			#version 310 es
+			precision highp int;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int a = (((0xABBA<<4) ^ 0xCAFE) | (0xDCBA & (0xABCD>>2))) ^ (~0xDEAD & 0xBEEF);
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case logical
+		version 310 es
+		values { output bool out0 = true; }
+		both ""
+			#version 310 es
+			precision highp int;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const bool a = (!false || false) && (true ^^ false);
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case compare
+		version 310 es
+		values { output bool out0 = true; }
+		both ""
+			#version 310 es
+			precision highp int;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const bool a = (false == false) && (true != false) && (1 < 2) && (3 <= 3) && ((1 > 1) != (1 >= 1));
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case selection
+		version 310 es
+		values { output float out0 = 5.3; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a = false ? 0.0 : (true ? 5.3 : 1.0);
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+end # operators
+
+group complex_types "Arrays & Structs"
+
+	case struct
+		version 310 es
+		values { output float out0 = 260.922; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			struct S
+			{
+				vec4 a;
+				int  b;
+			};
+
+			void main()
+			{
+				const S s = S(vec4(1.5), 123);
+				out0 = length(s.a.xy)*float(s.b);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case nested_struct
+		version 310 es
+		values { output float out0 = 965.9; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			struct S
+			{
+				vec4 v;
+				int  i;
+			};
+
+			struct T
+			{
+				S s;
+				bool b;
+				int i;
+			};
+
+			struct U
+			{
+				S s;
+				T t;
+			};
+
+			void main()
+			{
+				const S s = S(vec4(1.5), 123);
+				const T t = T(s, false, 3);
+				const U u = U(s, t);
+				const U v = U(S(vec4(1.3), 4), T(S(vec4(2.0), 5), true, 6));
+				out0 = float(u.s.i*v.t.i + v.t.s.i)*v.s.v.x; // float(123*6 + 5)*1.3
+				${OUTPUT}
+			}
+		""
+	end
+
+	case array_size
+		version 310 es
+		values { output int out0 = 1; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int a[max(-1, 1)] = int[1](1);
+				out0 = a[0];
+				${OUTPUT}
+			}
+		""
+	end
+
+	case array_length
+		version 310 es
+		values { output int out0 = 2; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int a[1] = int[1](1);
+				out0 = a.length() + a[0];
+				${OUTPUT}
+			}
+		""
+	end
+
+	case array
+		version 310 es
+		values { output float out0 = 4.0; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a[1+2+5] = float[8](0.5, 1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0);
+				const float f = a[1+2+4];
+				out0 = f + float(a.length()-8);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # complex_types
+
+group other "Other operations"
+
+	case switch_case
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.0 | 4.0 | 5.0 ];
+			output int out0 = [ 0 | 1 | 2 | 3 | 4 | 10];
+		}
+
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const int _0 = 0;
+				const int _1 = 1;
+				const int _2 = 2;
+				const int _3 = 3;
+				const int _4 = 4;
+
+				switch(int(in0))
+				{
+					case _0:
+						out0 = 0;
+						break;
+					case _1:
+						out0 = 1;
+						break;
+					case _2:
+						out0 = 2;
+						break;
+					case _3:
+						out0 = 3;
+						break;
+					case _4:
+						out0 = 4;
+						break;
+					case 5:
+						out0 = 10;
+						break;
+					default:
+						out0 = 100;
+
+				}
+				${OUTPUT}
+			}
+		""
+	end
+
+	case nested_builtin_funcs
+		version 310 es
+		values { output float out0 = 3.05; }
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			void main()
+			{
+				const float a = sqrt( atan(sin(1.5)/cos(1.5)) /*1.5*/ * log2(exp2(log(exp(6.2) + 0.1)) + 0.1) /*~6.2*/);
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case complex
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.0 | 4.0 | 5.0 ];
+			output int out0 = [ 0 | 1 | 2 | 3 | 4 | 10];
+		}
+
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			struct T
+			{
+				vec4 v;
+			};
+
+			struct S
+			{
+				T t;
+				int i;
+				bool b;
+			};
+
+			void main()
+			{
+				const T   t  = T(vec4(1.0));
+				const S   s  = S(t, 42, true);
+				const int _0 = int(sin(0.0));
+				const int _1 = int(1.0);
+				const int _2 = 2 + int(float(_0>_1));
+				const int _3 = min(gl_MaxVertexAttribs, 16)/4 - 1;
+				const int _4 = min(gl_MaxDrawBuffers, 4);
+				const ivec4 nums = ivec4(0, 1, 2, 3);
+
+				switch(int(in0))
+				{
+					case int(float(_0)):
+						out0 = ((true!=false) && (!false)) ? 0 : 25;
+						break;
+					case ivec3(_1).x:
+						out0 = 3*18/9-5;
+						break;
+					case nums[_2]:
+						out0 = int(length(vec4(1.0))+0.001);
+						break;
+					case _3:
+						out0 = 3;
+						break;
+					case clamp(_4, 1, 6):
+						out0 = (s.i-2)/10;
+						break;
+					case max(3, 5):
+						out0 = 10;
+						break;
+					default:
+						out0 = 100;
+
+				}
+				${OUTPUT}
+			}
+		""
+	end
+end
diff --git a/external/vulkancts/data/vulkan/glsl/es310/constants.test b/external/vulkancts/data/vulkan/glsl/es310/constants.test
new file mode 100644
index 0000000..2878333
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/constants.test
@@ -0,0 +1,838 @@
+case float_input
+	version 310 es
+	values
+	{
+		input float in0		= [ 1.123 | 0.75 | -512.0 | -72.13 | 199.91 | -1.123 | -0.75 | 512.0 | -72.13 | -199.91 ];
+		output float out0	= [ 1.123 | 0.75 | -512.0 | -72.13 | 199.91 | -1.123 | -0.75 | 512.0 | -72.13 | -199.91 ];
+	}
+
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = in0;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_uniform
+	version 310 es
+	values
+	{
+		uniform float uni0	= [ 1.123 | 0.75 | -512.0 | -72.13 | 199.91 ];
+		output float out0	= [ 1.123 | 0.75 | -512.0 | -72.13 | 199.91 ];
+	}
+
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = uni0;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_0
+	version 310 es
+	values { output float out0 = 1.123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = +1.123;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_1
+	version 310 es
+	values { output float out0 = -1.123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = -1.123;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_2
+	version 310 es
+	values { output float out0 = 123.0; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 123.;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_3
+	version 310 es
+	values { output float out0 = 0.123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = .123;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_4
+	version 310 es
+	values { output float out0 = 123.0; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 1.23e+2;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_5
+	version 310 es
+	values { output float out0 = -123.0; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = -1.23E+2;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_6
+	version 310 es
+	values { output float out0 = -123.0; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = -1.23e2;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_7
+	version 310 es
+	values { output float out0 = 0.123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 1.23e-1;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_8
+	version 310 es
+	values { output float out0 = 1000.0; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 1e3;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_f_suffix_0
+	version 310 es
+	values { output float out0 = 1.0; }
+	both ""
+		#version 310 es
+		precision mediump float;
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			float value = 1.0f;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case float_f_suffix_1
+	version 310 es
+	values { output float out0 = 1.0; }
+	both ""
+		#version 310 es
+		precision mediump float;
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			float value = 1.0F;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_0
+	version 310 es
+	values { output int out0 = 123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 123;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_1
+	version 310 es
+	values { output int out0 = -321; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = -321;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_2
+	version 310 es
+	values { output int out0 = 123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 0x7B;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_3
+	version 310 es
+	values { output int out0 = 123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 0X7b;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_4
+	version 310 es
+	values { output int out0 = 123; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = 0173;
+			${OUTPUT}
+		}
+	""
+end
+
+case bool_0
+	version 310 es
+	values { output bool out0 = true; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = true;
+			${OUTPUT}
+		}
+	""
+end
+
+case bool_1
+	version 310 es
+	values { output bool out0 = false; }
+	both ""
+		#version 310 es
+		precision highp float;
+		${DECLARATIONS}
+		void main()
+		{
+			out0 = false;
+			${OUTPUT}
+		}
+	""
+end
+
+case const_float_global
+	 version 310 es
+	 values { output float out0 = 1000.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = 1000.0;
+			void main()
+			{
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_main
+	 version 310 es
+	 values { output float out0 = -1000.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+					const float theConstant = -1000.0;
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_function
+	 version 310 es
+	 values { output float out0 = -0.012; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			float func()
+			{
+					const float theConstant = -0.012;
+				return theConstant;
+			}
+			void main()
+			{
+				out0 = func();
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_scope
+	 version 310 es
+	 values { output float out0 = 1.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				{
+					 const float theConstant = 1.0;
+					 out0 = theConstant;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_scope_shawdowing_1
+	 version 310 es
+	 values { output float out0 = 1.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				const float theConstant = 100.0;
+				{
+					 const float theConstant = 1.0;
+					 out0 = theConstant;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_scope_shawdowing_2
+	 version 310 es
+	 values { output float out0 = 1.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = 100.0;
+			void main()
+			{
+				{
+					 const float theConstant = 1.0;
+					 out0 = theConstant;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_scope_shawdowing_3
+	 version 310 es
+	 values { output float out0 = 1.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = 100.0;
+			void main()
+			{
+				const float theConstant = -100.0;
+				{
+					 const float theConstant = 1.0;
+					 out0 = theConstant;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_scope_shawdowing_4
+	 version 310 es
+	 values { output float out0 = 2.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = 100.0;
+			float func()
+			{
+				const float theConstant = 2.0;
+				return theConstant;
+			}
+			void main()
+			{
+				const float theConstant = -100.0;
+				{
+					 const float theConstant = 1.0;
+					 out0 = func();
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_operations_with_const
+	 version 310 es
+	 values { output float out0 = 21.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theGlobalConstant = 10.0;
+			float func()
+			{
+				const float theConstant = 2.0;
+				return theConstant;
+			}
+			void main()
+			{
+				const float theConstant = -100.0;
+				{
+					 const float theConstant = 1.0;
+					 out0 = func() * theGlobalConstant + theConstant;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_assignment_1
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				const float theConstant1 = 10.0;
+				const float theConstant2 = theConstant1;
+				out0 = theConstant2;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_assignment_2
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				const float theConstant1 = 10.0;
+				{
+					const float theConstant2 = theConstant1;
+					out0 = theConstant2;
+				}
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_assignment_3
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant1 = 10.0;
+			void main()
+			{
+				const float theConstant2 = theConstant1;
+				out0 = theConstant2;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_assignment_4
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant1 = 10.0;
+			float func()
+			{
+				const float theConstant2 = theConstant1;
+				return theConstant2;
+			}
+			void main()
+			{
+				out0 = func();
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_from_int
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = float(10);
+			void main()
+			{
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_from_vec2
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = vec2(1.0, 10.0).y;
+			void main()
+			{
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_from_vec3
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = vec3(1.0, 10.0, 20.0).y;
+			void main()
+			{
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case const_float_from_vec4
+	 version 310 es
+	 values { output float out0 = 10.0; }
+
+	 both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			const float theConstant = vec4(1.0, 10.0, 20.0, -10.0).y;
+			void main()
+			{
+				out0 = theConstant;
+				${OUTPUT}
+			}
+	 ""
+end
+
+case int_decimal
+	version 310 es
+	values { output int out0 = 7; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			int value = 7;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_octal
+	version 310 es
+	values { output int out0 = 15; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			int value = 017;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_hexadecimal_0
+	version 310 es
+	values { output int out0 = 47; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			int value = 0x2f;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case int_hexadecimal_1
+	version 310 es
+	values { output int out0 = 47; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			int value = 0X2f;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_decimal_0
+	version 310 es
+	values { output uint out0 = 7; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 7u;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_decimal_1
+	version 310 es
+	values { output uint out0 = 7; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 7U;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_decimal_2
+	version 310 es
+	values { output uint out0 = 0; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 0u;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_decimal_3
+	version 310 es
+	values { output uint out0 = 0; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 0U;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_octal_0
+	version 310 es
+	values { output uint out0 = 15; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 017u;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_octal_1
+	version 310 es
+	values { output uint out0 = 15; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 017U;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_hexadecimal_0
+	version 310 es
+	values { output uint out0 = 47; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 0x2fU;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
+
+case uint_hexadecimal_1
+	version 310 es
+	values { output uint out0 = 47; }
+	both ""
+		#version 310 es
+		${DECLARATIONS}
+		void main ()
+		{
+			${SETUP}
+			uint value = 0X2fu;
+			out0 = value;
+			${OUTPUT}
+		}
+	""
+end
diff --git a/external/vulkancts/data/vulkan/glsl/es310/conversions.test b/external/vulkancts/data/vulkan/glsl/es310/conversions.test
new file mode 100644
index 0000000..bc8ebc3
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/conversions.test
@@ -0,0 +1,13186 @@
+# WARNING: This file is auto-generated. Do NOT modify it manually, but rather
+# modify the generating script file. Otherwise changes will be lost!
+
+group scalar_to_scalar "Scalar to Scalar Conversions"
+
+	case float_to_float
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output float out0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_int
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output int out0 = [ 0 | 1 | 2 | 3 | 0 | -8 | -20 | 36 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_bool
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output bool out0 = [ false | true | true | true | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_float
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output float out0 = [ 0.0 | 1.0 | 2.0 | 5.0 | 8.0 | 11.0 | -12.0 | -66.0 | -192.0 | 255.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_int
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output int out0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_bool
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output bool out0 = [ false | true | true | true | true | true | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_float
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output float out0 = [ 0.0 | 2.0 | 3.0 | 8.0 | 9.0 | 12.0 | 10.0 | 45.0 | 193.0 | 255.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_int
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output int out0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_bool
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output bool out0 = [ false | true | true | true | true | true | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_float
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output float out0 = [ 1.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_int
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output int out0 = [ 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_bool
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output bool out0 = [ true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_uint
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | 0.5 | 8.25 | 20.125 | 36.8125 ];
+			output uint out0 = [ 0 | 1 | 2 | 3 | 0 | 8 | 20 | 36 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_uint
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | 12 | 66 | 192 | 255 ];
+			output uint out0 = [ 0 | 1 | 2 | 5 | 8 | 11 | 12 | 66 | 192 | 255 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_uint
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output uint out0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_uint
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output uint out0 = [ 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # scalar_to_scalar
+group scalar_to_vector "Scalar to Vector Conversions"
+
+	case float_to_vec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(2.0, 2.0) | vec2(3.5, 3.5) | vec2(-0.5, -0.5) | vec2(-8.25, -8.25) | vec2(-20.125, -20.125) | vec2(36.8125, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_vec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(2.0, 2.0, 2.0) | vec3(3.5, 3.5, 3.5) | vec3(-0.5, -0.5, -0.5) | vec3(-8.25, -8.25, -8.25) | vec3(-20.125, -20.125, -20.125) | vec3(36.8125, 36.8125, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_vec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(2.0, 2.0, 2.0, 2.0) | vec4(3.5, 3.5, 3.5, 3.5) | vec4(-0.5, -0.5, -0.5, -0.5) | vec4(-8.25, -8.25, -8.25, -8.25) | vec4(-20.125, -20.125, -20.125, -20.125) | vec4(36.8125, 36.8125, 36.8125, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_ivec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(2, 2) | ivec2(3, 3) | ivec2(0, 0) | ivec2(-8, -8) | ivec2(-20, -20) | ivec2(36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_ivec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(2, 2, 2) | ivec3(3, 3, 3) | ivec3(0, 0, 0) | ivec3(-8, -8, -8) | ivec3(-20, -20, -20) | ivec3(36, 36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_ivec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(2, 2, 2, 2) | ivec4(3, 3, 3, 3) | ivec4(0, 0, 0, 0) | ivec4(-8, -8, -8, -8) | ivec4(-20, -20, -20, -20) | ivec4(36, 36, 36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_bvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_bvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_bvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_vec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(2.0, 2.0) | vec2(5.0, 5.0) | vec2(8.0, 8.0) | vec2(11.0, 11.0) | vec2(-12.0, -12.0) | vec2(-66.0, -66.0) | vec2(-192.0, -192.0) | vec2(255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_vec3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(2.0, 2.0, 2.0) | vec3(5.0, 5.0, 5.0) | vec3(8.0, 8.0, 8.0) | vec3(11.0, 11.0, 11.0) | vec3(-12.0, -12.0, -12.0) | vec3(-66.0, -66.0, -66.0) | vec3(-192.0, -192.0, -192.0) | vec3(255.0, 255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_vec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(2.0, 2.0, 2.0, 2.0) | vec4(5.0, 5.0, 5.0, 5.0) | vec4(8.0, 8.0, 8.0, 8.0) | vec4(11.0, 11.0, 11.0, 11.0) | vec4(-12.0, -12.0, -12.0, -12.0) | vec4(-66.0, -66.0, -66.0, -66.0) | vec4(-192.0, -192.0, -192.0, -192.0) | vec4(255.0, 255.0, 255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_ivec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(2, 2) | ivec2(5, 5) | ivec2(8, 8) | ivec2(11, 11) | ivec2(-12, -12) | ivec2(-66, -66) | ivec2(-192, -192) | ivec2(255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_ivec3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(2, 2, 2) | ivec3(5, 5, 5) | ivec3(8, 8, 8) | ivec3(11, 11, 11) | ivec3(-12, -12, -12) | ivec3(-66, -66, -66) | ivec3(-192, -192, -192) | ivec3(255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_ivec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(2, 2, 2, 2) | ivec4(5, 5, 5, 5) | ivec4(8, 8, 8, 8) | ivec4(11, 11, 11, 11) | ivec4(-12, -12, -12, -12) | ivec4(-66, -66, -66, -66) | ivec4(-192, -192, -192, -192) | ivec4(255, 255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_bvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_bvec3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_bvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_vec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(2.0, 2.0) | vec2(3.0, 3.0) | vec2(8.0, 8.0) | vec2(9.0, 9.0) | vec2(12.0, 12.0) | vec2(10.0, 10.0) | vec2(45.0, 45.0) | vec2(193.0, 193.0) | vec2(255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_vec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(2.0, 2.0, 2.0) | vec3(3.0, 3.0, 3.0) | vec3(8.0, 8.0, 8.0) | vec3(9.0, 9.0, 9.0) | vec3(12.0, 12.0, 12.0) | vec3(10.0, 10.0, 10.0) | vec3(45.0, 45.0, 45.0) | vec3(193.0, 193.0, 193.0) | vec3(255.0, 255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_vec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(2.0, 2.0, 2.0, 2.0) | vec4(3.0, 3.0, 3.0, 3.0) | vec4(8.0, 8.0, 8.0, 8.0) | vec4(9.0, 9.0, 9.0, 9.0) | vec4(12.0, 12.0, 12.0, 12.0) | vec4(10.0, 10.0, 10.0, 10.0) | vec4(45.0, 45.0, 45.0, 45.0) | vec4(193.0, 193.0, 193.0, 193.0) | vec4(255.0, 255.0, 255.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_ivec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(2, 2) | ivec2(3, 3) | ivec2(8, 8) | ivec2(9, 9) | ivec2(12, 12) | ivec2(10, 10) | ivec2(45, 45) | ivec2(193, 193) | ivec2(255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_ivec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(2, 2, 2) | ivec3(3, 3, 3) | ivec3(8, 8, 8) | ivec3(9, 9, 9) | ivec3(12, 12, 12) | ivec3(10, 10, 10) | ivec3(45, 45, 45) | ivec3(193, 193, 193) | ivec3(255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_ivec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(2, 2, 2, 2) | ivec4(3, 3, 3, 3) | ivec4(8, 8, 8, 8) | ivec4(9, 9, 9, 9) | ivec4(12, 12, 12, 12) | ivec4(10, 10, 10, 10) | ivec4(45, 45, 45, 45) | ivec4(193, 193, 193, 193) | ivec4(255, 255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_bvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_bvec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_bvec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_vec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output vec2 out0 = [ vec2(1.0, 1.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_vec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output vec3 out0 = [ vec3(1.0, 1.0, 1.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_vec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output vec4 out0 = [ vec4(1.0, 1.0, 1.0, 1.0) | vec4(0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_ivec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output ivec2 out0 = [ ivec2(1, 1) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_ivec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output ivec3 out0 = [ ivec3(1, 1, 1) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_ivec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output ivec4 out0 = [ ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_bvec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_bvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_bvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_uvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | 0.5 | 8.25 | 20.125 | 36.8125 ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(2, 2) | uvec2(3, 3) | uvec2(0, 0) | uvec2(8, 8) | uvec2(20, 20) | uvec2(36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_uvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | 0.5 | 8.25 | 20.125 | 36.8125 ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(2, 2, 2) | uvec3(3, 3, 3) | uvec3(0, 0, 0) | uvec3(8, 8, 8) | uvec3(20, 20, 20) | uvec3(36, 36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_uvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | 0.5 | 8.25 | 20.125 | 36.8125 ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(2, 2, 2, 2) | uvec4(3, 3, 3, 3) | uvec4(0, 0, 0, 0) | uvec4(8, 8, 8, 8) | uvec4(20, 20, 20, 20) | uvec4(36, 36, 36, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_uvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | 12 | 66 | 192 | 255 ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(2, 2) | uvec2(5, 5) | uvec2(8, 8) | uvec2(11, 11) | uvec2(12, 12) | uvec2(66, 66) | uvec2(192, 192) | uvec2(255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_uvec3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | 12 | 66 | 192 | 255 ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(2, 2, 2) | uvec3(5, 5, 5) | uvec3(8, 8, 8) | uvec3(11, 11, 11) | uvec3(12, 12, 12) | uvec3(66, 66, 66) | uvec3(192, 192, 192) | uvec3(255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_uvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | 12 | 66 | 192 | 255 ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(2, 2, 2, 2) | uvec4(5, 5, 5, 5) | uvec4(8, 8, 8, 8) | uvec4(11, 11, 11, 11) | uvec4(12, 12, 12, 12) | uvec4(66, 66, 66, 66) | uvec4(192, 192, 192, 192) | uvec4(255, 255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_uvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(2, 2) | uvec2(3, 3) | uvec2(8, 8) | uvec2(9, 9) | uvec2(12, 12) | uvec2(10, 10) | uvec2(45, 45) | uvec2(193, 193) | uvec2(255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_uvec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(2, 2, 2) | uvec3(3, 3, 3) | uvec3(8, 8, 8) | uvec3(9, 9, 9) | uvec3(12, 12, 12) | uvec3(10, 10, 10) | uvec3(45, 45, 45) | uvec3(193, 193, 193) | uvec3(255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_uvec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(2, 2, 2, 2) | uvec4(3, 3, 3, 3) | uvec4(8, 8, 8, 8) | uvec4(9, 9, 9, 9) | uvec4(12, 12, 12, 12) | uvec4(10, 10, 10, 10) | uvec4(45, 45, 45, 45) | uvec4(193, 193, 193, 193) | uvec4(255, 255, 255, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_uvec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output uvec2 out0 = [ uvec2(1, 1) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_uvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output uvec3 out0 = [ uvec3(1, 1, 1) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_uvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output uvec4 out0 = [ uvec4(1, 1, 1, 1) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # scalar_to_vector
+group vector_to_scalar "Vector to Scalar Conversions"
+
+	case vec2_to_float
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_int
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_bool
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output bool out0 = [ false | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_float
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_int
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_bool
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output bool out0 = [ false | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_float
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_int
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_bool
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output bool out0 = [ false | true | true | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_float
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | -32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_int
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_bool
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_float
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | -32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_int
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_bool
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_float
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | -32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_int
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_bool
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_float
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | 32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_int
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_bool
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_float
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | 32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_int
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_bool
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_float
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output float out0 = [ 0.0 | 1.0 | 0.0 | 32.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_int
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_bool
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output bool out0 = [ false | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_float
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output float out0 = [ 1.0 | 0.0 | 0.0 | 1.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_int
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output int out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_bool
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_float
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output float out0 = [ 1.0 | 0.0 | 0.0 | 1.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_int
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output int out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_bool
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_float
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output float out0 = [ 1.0 | 0.0 | 0.0 | 1.0 | 0.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = float(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_int
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output int out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = int(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_bool
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bool(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_uint
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(0.5, 2.25) | vec2(32.0, 64.0) | vec2(0.75, 0.0322580645161) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_uint
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(0.5, 2.25, 4.875) | vec3(32.0, 64.0, 51.0) | vec3(0.75, 0.0322580645161, 0.0526315789474) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_uint
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.5, 2.25, 4.875, 9.0) | vec4(32.0, 64.0, 51.0, 24.0) | vec4(0.75, 0.0322580645161, 0.0526315789474, 0.25) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_uint
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_uint
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 2, 4) | ivec3(32, 64, 51) | ivec3(0, 0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_uint
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 2, 4, 9) | ivec4(32, 64, 51, 24) | ivec4(0, 0, 0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_uint
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_uint
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_uint
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output uint out0 = [ 0 | 1 | 0 | 32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_uint
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output uint out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_uint
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output uint out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_uint
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output uint out0 = [ 1 | 0 | 0 | 1 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uint(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # vector_to_scalar
+
+group vector_to_vector "Vector to Vector Conversions"
+
+	case vec4_to_vec4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_vec3
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_vec2
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_ivec4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_ivec3
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_ivec2
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_bvec4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output bvec4 out0 = [ bvec4(false, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_bvec3
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output bvec3 out0 = [ bvec3(false, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_bvec2
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_vec4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(0.0, -2.0, -4.0, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_vec3
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, -2.0, -4.0) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_vec2
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, -2.0) | vec2(-32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_ivec4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_ivec3
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_ivec2
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_bvec4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_bvec3
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_bvec2
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_vec4
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(0.0, 2.0, 4.0, 9.0) | vec4(32.0, 64.0, 51.0, 24.0) | vec4(0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_vec3
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, 2.0, 4.0) | vec3(32.0, 64.0, 51.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_vec2
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, 2.0) | vec2(32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_ivec4
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 2, 4, 9) | ivec4(32, 64, 51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_ivec3
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 2, 4) | ivec3(32, 64, 51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_ivec2
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_bvec4
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_bvec3
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_bvec2
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_vec4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output vec4 out0 = [ vec4(1.0, 0.0, 0.0, 1.0) | vec4(0.0, 0.0, 0.0, 1.0) | vec4(0.0, 1.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_vec3
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output vec3 out0 = [ vec3(1.0, 0.0, 0.0) | vec3(0.0, 0.0, 0.0) | vec3(0.0, 1.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_vec2
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output vec2 out0 = [ vec2(1.0, 0.0) | vec2(0.0, 0.0) | vec2(0.0, 1.0) | vec2(1.0, 1.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_ivec4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output ivec4 out0 = [ ivec4(1, 0, 0, 1) | ivec4(0, 0, 0, 1) | ivec4(0, 1, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_ivec3
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output ivec3 out0 = [ ivec3(1, 0, 0) | ivec3(0, 0, 0) | ivec3(0, 1, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_ivec2
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output ivec2 out0 = [ ivec2(1, 0) | ivec2(0, 0) | ivec2(0, 1) | ivec2(1, 1) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_bvec4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_bvec3
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_bvec2
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_uvec4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.5, 2.25, 4.875, 9.0) | vec4(32.0, 64.0, 51.0, 24.0) | vec4(0.75, 0.0322580645161, 0.0526315789474, 0.25) ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_uvec3
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.5, 2.25, 4.875, 9.0) | vec4(32.0, 64.0, 51.0, 24.0) | vec4(0.75, 0.0322580645161, 0.0526315789474, 0.25) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_to_uvec2
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.5, 2.25, 4.875, 9.0) | vec4(32.0, 64.0, 51.0, 24.0) | vec4(0.75, 0.0322580645161, 0.0526315789474, 0.25) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_uvec4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 2, 4, 9) | ivec4(32, 64, 51, 24) | ivec4(0, 0, 0, 0) ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_uvec3
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 2, 4, 9) | ivec4(32, 64, 51, 24) | ivec4(0, 0, 0, 0) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_to_uvec2
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 2, 4, 9) | ivec4(32, 64, 51, 24) | ivec4(0, 0, 0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_uvec4
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output uvec4 out0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_uvec3
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec4_to_uvec2
+		version 310 es
+		values
+		{
+			input uvec4 in0 = [ uvec4(0, 0, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 2, 4, 9) | uvec4(32, 64, 51, 24) | uvec4(0, 0, 0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_uvec4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output uvec4 out0 = [ uvec4(1, 0, 0, 1) | uvec4(0, 0, 0, 1) | uvec4(0, 1, 0, 0) | uvec4(1, 1, 1, 1) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_uvec3
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output uvec3 out0 = [ uvec3(1, 0, 0) | uvec3(0, 0, 0) | uvec3(0, 1, 0) | uvec3(1, 1, 1) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_to_uvec2
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output uvec2 out0 = [ uvec2(1, 0) | uvec2(0, 0) | uvec2(0, 1) | uvec2(1, 1) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_vec2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_ivec3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_ivec2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_bvec3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output bvec3 out0 = [ bvec3(false, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_bvec2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_vec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, -2.0, -4.0) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_vec2
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, -2.0) | vec2(-32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_ivec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_ivec2
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_bvec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_bvec2
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_vec3
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, 2.0, 4.0) | vec3(32.0, 64.0, 51.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_vec2
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, 2.0) | vec2(32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_ivec3
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 2, 4) | ivec3(32, 64, 51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_ivec2
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_bvec3
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_bvec2
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_vec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output vec3 out0 = [ vec3(1.0, 0.0, 0.0) | vec3(0.0, 0.0, 0.0) | vec3(0.0, 1.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_vec2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output vec2 out0 = [ vec2(1.0, 0.0) | vec2(0.0, 0.0) | vec2(0.0, 1.0) | vec2(1.0, 1.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_ivec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output ivec3 out0 = [ ivec3(1, 0, 0) | ivec3(0, 0, 0) | ivec3(0, 1, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_ivec2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output ivec2 out0 = [ ivec2(1, 0) | ivec2(0, 0) | ivec2(0, 1) | ivec2(1, 1) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_bvec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_bvec2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_uvec3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(0.5, 2.25, 4.875) | vec3(32.0, 64.0, 51.0) | vec3(0.75, 0.0322580645161, 0.0526315789474) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_to_uvec2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(0.5, 2.25, 4.875) | vec3(32.0, 64.0, 51.0) | vec3(0.75, 0.0322580645161, 0.0526315789474) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_uvec3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 2, 4) | ivec3(32, 64, 51) | ivec3(0, 0, 0) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_to_uvec2
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 2, 4) | ivec3(32, 64, 51) | ivec3(0, 0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_uvec3
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec3_to_uvec2
+		version 310 es
+		values
+		{
+			input uvec3 in0 = [ uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_uvec3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output uvec3 out0 = [ uvec3(1, 0, 0) | uvec3(0, 0, 0) | uvec3(0, 1, 0) | uvec3(1, 1, 1) | uvec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_to_uvec2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output uvec2 out0 = [ uvec2(1, 0) | uvec2(0, 0) | uvec2(0, 1) | uvec2(1, 1) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_vec2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_ivec2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_bvec2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_vec2
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, -2.0) | vec2(-32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_ivec2
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_bvec2
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_vec2
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(0.0, 2.0) | vec2(32.0, 64.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_ivec2
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_bvec2
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_vec2
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output vec2 out0 = [ vec2(1.0, 0.0) | vec2(0.0, 0.0) | vec2(0.0, 1.0) | vec2(1.0, 1.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_ivec2
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output ivec2 out0 = [ ivec2(1, 0) | ivec2(0, 0) | ivec2(0, 1) | ivec2(1, 1) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_bvec2
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_to_uvec2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(0.5, 2.25) | vec2(32.0, 64.0) | vec2(0.75, 0.0322580645161) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec2_to_uvec2
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uvec2_to_uvec2
+		version 310 es
+		values
+		{
+			input uvec2 in0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+			output uvec2 out0 = [ uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_to_uvec2
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output uvec2 out0 = [ uvec2(1, 0) | uvec2(0, 0) | uvec2(0, 1) | uvec2(1, 1) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # vector_to_vector
+group scalar_to_matrix "Scalar to Matrix Conversions"
+
+	case float_to_mat4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat4 out0 = [ mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0) | mat4(3.5, 0.0, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 0.0, 3.5) | mat4(-0.5, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, -0.5) | mat4(-8.25, 0.0, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, 0.0, -8.25) | mat4(-20.125, 0.0, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, 0.0, -20.125) | mat4(36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat4x3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat4x3 out0 = [ mat4x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0) | mat4x3(3.5, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0) | mat4x3(-0.5, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0) | mat4x3(-8.25, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0) | mat4x3(-20.125, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0) | mat4x3(36.8125, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat4x2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat4x2 out0 = [ mat4x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(3.5, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 0.0) | mat4x2(-0.5, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0) | mat4x2(-8.25, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, 0.0) | mat4x2(-20.125, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, 0.0) | mat4x2(36.8125, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat3x4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat3x4 out0 = [ mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat3x4(3.5, 0.0, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 0.0, 3.5, 0.0) | mat3x4(-0.5, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0) | mat3x4(-8.25, 0.0, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, 0.0, -8.25, 0.0) | mat3x4(-20.125, 0.0, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, 0.0, -20.125, 0.0) | mat3x4(36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat3 out0 = [ mat3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0) | mat3(3.5, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0, 0.0, 3.5) | mat3(-0.5, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0, 0.0, -0.5) | mat3(-8.25, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, -8.25) | mat3(-20.125, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0, 0.0, -20.125) | mat3(36.8125, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0, 0.0, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat3x2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat3x2(3.5, 0.0, 0.0, 3.5, 0.0, 0.0) | mat3x2(-0.5, 0.0, 0.0, -0.5, 0.0, 0.0) | mat3x2(-8.25, 0.0, 0.0, -8.25, 0.0, 0.0) | mat3x2(-20.125, 0.0, 0.0, -20.125, 0.0, 0.0) | mat3x2(36.8125, 0.0, 0.0, 36.8125, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat2x4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat2x4 out0 = [ mat2x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat2x4(3.5, 0.0, 0.0, 0.0, 0.0, 3.5, 0.0, 0.0) | mat2x4(-0.5, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0, 0.0) | mat2x4(-8.25, 0.0, 0.0, 0.0, 0.0, -8.25, 0.0, 0.0) | mat2x4(-20.125, 0.0, 0.0, 0.0, 0.0, -20.125, 0.0, 0.0) | mat2x4(36.8125, 0.0, 0.0, 0.0, 0.0, 36.8125, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat2x3
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat2x3(3.5, 0.0, 0.0, 0.0, 3.5, 0.0) | mat2x3(-0.5, 0.0, 0.0, 0.0, -0.5, 0.0) | mat2x3(-8.25, 0.0, 0.0, 0.0, -8.25, 0.0) | mat2x3(-20.125, 0.0, 0.0, 0.0, -20.125, 0.0) | mat2x3(36.8125, 0.0, 0.0, 0.0, 36.8125, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_to_mat2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -8.25 | -20.125 | 36.8125 ];
+			output mat2 out0 = [ mat2(0.0, 0.0, 0.0, 0.0) | mat2(1.0, 0.0, 0.0, 1.0) | mat2(2.0, 0.0, 0.0, 2.0) | mat2(3.5, 0.0, 0.0, 3.5) | mat2(-0.5, 0.0, 0.0, -0.5) | mat2(-8.25, 0.0, 0.0, -8.25) | mat2(-20.125, 0.0, 0.0, -20.125) | mat2(36.8125, 0.0, 0.0, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat4 out0 = [ mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0) | mat4(5.0, 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0, 5.0) | mat4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0) | mat4(11.0, 0.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 0.0, 11.0) | mat4(-12.0, 0.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, 0.0, -12.0) | mat4(-66.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, 0.0, -66.0) | mat4(-192.0, 0.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, 0.0, -192.0) | mat4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat4x3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat4x3 out0 = [ mat4x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0) | mat4x3(5.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0) | mat4x3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0) | mat4x3(11.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0) | mat4x3(-12.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0) | mat4x3(-66.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0) | mat4x3(-192.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0) | mat4x3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat4x2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat4x2 out0 = [ mat4x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(5.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(8.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(11.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(-12.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(-66.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(-192.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(255.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat3x4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat3x4 out0 = [ mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat3x4(5.0, 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 0.0, 5.0, 0.0) | mat3x4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0) | mat3x4(11.0, 0.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 0.0, 11.0, 0.0) | mat3x4(-12.0, 0.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, 0.0, -12.0, 0.0) | mat3x4(-66.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0) | mat3x4(-192.0, 0.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, 0.0, -192.0, 0.0) | mat3x4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat3 out0 = [ mat3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0) | mat3(5.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0, 0.0, 5.0) | mat3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 8.0) | mat3(11.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0, 0.0, 11.0) | mat3(-12.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0, 0.0, -12.0) | mat3(-66.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0, 0.0, -66.0) | mat3(-192.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0, 0.0, -192.0) | mat3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat3x2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat3x2(5.0, 0.0, 0.0, 5.0, 0.0, 0.0) | mat3x2(8.0, 0.0, 0.0, 8.0, 0.0, 0.0) | mat3x2(11.0, 0.0, 0.0, 11.0, 0.0, 0.0) | mat3x2(-12.0, 0.0, 0.0, -12.0, 0.0, 0.0) | mat3x2(-66.0, 0.0, 0.0, -66.0, 0.0, 0.0) | mat3x2(-192.0, 0.0, 0.0, -192.0, 0.0, 0.0) | mat3x2(255.0, 0.0, 0.0, 255.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat2x4
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat2x4 out0 = [ mat2x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat2x4(5.0, 0.0, 0.0, 0.0, 0.0, 5.0, 0.0, 0.0) | mat2x4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0) | mat2x4(11.0, 0.0, 0.0, 0.0, 0.0, 11.0, 0.0, 0.0) | mat2x4(-12.0, 0.0, 0.0, 0.0, 0.0, -12.0, 0.0, 0.0) | mat2x4(-66.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.0) | mat2x4(-192.0, 0.0, 0.0, 0.0, 0.0, -192.0, 0.0, 0.0) | mat2x4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat2x3
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat2x3(5.0, 0.0, 0.0, 0.0, 5.0, 0.0) | mat2x3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0) | mat2x3(11.0, 0.0, 0.0, 0.0, 11.0, 0.0) | mat2x3(-12.0, 0.0, 0.0, 0.0, -12.0, 0.0) | mat2x3(-66.0, 0.0, 0.0, 0.0, -66.0, 0.0) | mat2x3(-192.0, 0.0, 0.0, 0.0, -192.0, 0.0) | mat2x3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_to_mat2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 1 | 2 | 5 | 8 | 11 | -12 | -66 | -192 | 255 ];
+			output mat2 out0 = [ mat2(0.0, 0.0, 0.0, 0.0) | mat2(1.0, 0.0, 0.0, 1.0) | mat2(2.0, 0.0, 0.0, 2.0) | mat2(5.0, 0.0, 0.0, 5.0) | mat2(8.0, 0.0, 0.0, 8.0) | mat2(11.0, 0.0, 0.0, 11.0) | mat2(-12.0, 0.0, 0.0, -12.0) | mat2(-66.0, 0.0, 0.0, -66.0) | mat2(-192.0, 0.0, 0.0, -192.0) | mat2(255.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat4 out0 = [ mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0) | mat4(3.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 3.0) | mat4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0) | mat4(9.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 0.0, 9.0) | mat4(12.0, 0.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 0.0, 12.0) | mat4(10.0, 0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0, 10.0) | mat4(45.0, 0.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 0.0, 45.0) | mat4(193.0, 0.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 0.0, 193.0) | mat4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat4x3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat4x3 out0 = [ mat4x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0) | mat4x3(3.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0) | mat4x3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0) | mat4x3(9.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0) | mat4x3(12.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0) | mat4x3(10.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0) | mat4x3(45.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0) | mat4x3(193.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0) | mat4x3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat4x2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat4x2 out0 = [ mat4x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(3.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(8.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(9.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(12.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(10.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(45.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(193.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(255.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat3x4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat3x4 out0 = [ mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat3x4(3.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0) | mat3x4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0) | mat3x4(9.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0) | mat3x4(12.0, 0.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 0.0, 12.0, 0.0) | mat3x4(10.0, 0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 0.0, 10.0, 0.0) | mat3x4(45.0, 0.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 0.0, 45.0, 0.0) | mat3x4(193.0, 0.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 0.0, 193.0, 0.0) | mat3x4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat3 out0 = [ mat3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0, 0.0, 2.0) | mat3(3.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 3.0) | mat3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0, 0.0, 8.0) | mat3(9.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0, 0.0, 9.0) | mat3(12.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0, 0.0, 12.0) | mat3(10.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0, 0.0, 10.0) | mat3(45.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0, 0.0, 45.0) | mat3(193.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0, 0.0, 193.0) | mat3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat3x2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(2.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat3x2(3.0, 0.0, 0.0, 3.0, 0.0, 0.0) | mat3x2(8.0, 0.0, 0.0, 8.0, 0.0, 0.0) | mat3x2(9.0, 0.0, 0.0, 9.0, 0.0, 0.0) | mat3x2(12.0, 0.0, 0.0, 12.0, 0.0, 0.0) | mat3x2(10.0, 0.0, 0.0, 10.0, 0.0, 0.0) | mat3x2(45.0, 0.0, 0.0, 45.0, 0.0, 0.0) | mat3x2(193.0, 0.0, 0.0, 193.0, 0.0, 0.0) | mat3x2(255.0, 0.0, 0.0, 255.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat2x4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat2x4 out0 = [ mat2x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x4(2.0, 0.0, 0.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat2x4(3.0, 0.0, 0.0, 0.0, 0.0, 3.0, 0.0, 0.0) | mat2x4(8.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.0) | mat2x4(9.0, 0.0, 0.0, 0.0, 0.0, 9.0, 0.0, 0.0) | mat2x4(12.0, 0.0, 0.0, 0.0, 0.0, 12.0, 0.0, 0.0) | mat2x4(10.0, 0.0, 0.0, 0.0, 0.0, 10.0, 0.0, 0.0) | mat2x4(45.0, 0.0, 0.0, 0.0, 0.0, 45.0, 0.0, 0.0) | mat2x4(193.0, 0.0, 0.0, 0.0, 0.0, 193.0, 0.0, 0.0) | mat2x4(255.0, 0.0, 0.0, 0.0, 0.0, 255.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat2x3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x3(2.0, 0.0, 0.0, 0.0, 2.0, 0.0) | mat2x3(3.0, 0.0, 0.0, 0.0, 3.0, 0.0) | mat2x3(8.0, 0.0, 0.0, 0.0, 8.0, 0.0) | mat2x3(9.0, 0.0, 0.0, 0.0, 9.0, 0.0) | mat2x3(12.0, 0.0, 0.0, 0.0, 12.0, 0.0) | mat2x3(10.0, 0.0, 0.0, 0.0, 10.0, 0.0) | mat2x3(45.0, 0.0, 0.0, 0.0, 45.0, 0.0) | mat2x3(193.0, 0.0, 0.0, 0.0, 193.0, 0.0) | mat2x3(255.0, 0.0, 0.0, 0.0, 255.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_to_mat2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 0 | 2 | 3 | 8 | 9 | 12 | 10 | 45 | 193 | 255 ];
+			output mat2 out0 = [ mat2(0.0, 0.0, 0.0, 0.0) | mat2(2.0, 0.0, 0.0, 2.0) | mat2(3.0, 0.0, 0.0, 3.0) | mat2(8.0, 0.0, 0.0, 8.0) | mat2(9.0, 0.0, 0.0, 9.0) | mat2(12.0, 0.0, 0.0, 12.0) | mat2(10.0, 0.0, 0.0, 10.0) | mat2(45.0, 0.0, 0.0, 45.0) | mat2(193.0, 0.0, 0.0, 193.0) | mat2(255.0, 0.0, 0.0, 255.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat4x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat4x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat3x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat3x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat2x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat2x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_to_mat2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # scalar_to_matrix
+group matrix_to_matrix "Matrix to Matrix Conversions"
+
+	case mat4_to_mat4
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat3
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4_to_mat2
+		version 310 es
+		values
+		{
+			input mat4 in0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 9.975, -6.542, 0.015625, 9.975) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat4
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, -0.75, -8.425, 0.03125, 0.0, 9.975, -6.542, 0.015625, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, -0.75, -8.425, 0.03125, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat3
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x3_to_mat2
+		version 310 es
+		values
+		{
+			input mat4x3 in0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 9.975, -6.542, 0.015625) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat4
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0, -0.75, -8.425, 1.0, 0.0, 9.975, -6.542, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0, -0.75, -8.425, 1.0, 9.975, -6.542, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0, -0.75, -8.425, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat3
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0, -0.75, -8.425, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat4x2_to_mat2
+		version 310 es
+		values
+		{
+			input mat4x2 in0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 9.975, -6.542) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat4
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat3
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x4_to_mat2
+		version 310 es
+		values
+		{
+			input mat3x4 in0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, -0.75, -8.425, 0.03125, -0.0125) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat4
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, -0.75, -8.425, 0.03125, 0.0, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, -0.75, -8.425, 0.03125, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat3
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3_to_mat2
+		version 310 es
+		values
+		{
+			input mat3 in0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, -0.75, -8.425, 0.03125) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat4
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0, -0.75, -8.425, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0, -0.75, -8.425, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0, -0.75, -8.425, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat3
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0, -0.75, -8.425, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.0, 0.0, 12.5, 0.0208333333333, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.0, 12.5, 0.0208333333333, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat3x2_to_mat2
+		version 310 es
+		values
+		{
+			input mat3x2 in0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, -0.75, -8.425) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat4
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5, 0.0, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat3
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x4_to_mat2
+		version 310 es
+		values
+		{
+			input mat2x4 in0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, -6.725, 12.5, 0.0208333333333, 0.0625, -0.5) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat4
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, 32.0, 12.5, 0.0208333333333, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat3
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, 32.0, 12.5, 0.0208333333333, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, 32.0, 0.125, 0.0, 12.5, 0.0208333333333, 0.0625, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2x3_to_mat2
+		version 310 es
+		values
+		{
+			input mat2x3 in0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, 32.0, 0.125, 12.5, 0.0208333333333, 0.0625) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, 32.0, 12.5, 0.0208333333333) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat4
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat4 out0 = [ mat4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(8.0, -24.0, 0.0, 0.0, 16.0, -16.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(0.125, 0.03125, 0.0, 0.0, 0.0625, 0.015625, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(-18.725, -0.0125, 0.0, 0.0, -0.5, 19.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat4x3
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(8.0, -24.0, 0.0, 16.0, -16.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(0.125, 0.03125, 0.0, 0.0625, 0.015625, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat4x3(-18.725, -0.0125, 0.0, -0.5, 19.975, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat4x2
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, -0.75, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0) | mat4x2(6.5, -0.75, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0) | mat4x2(8.0, -24.0, 16.0, -16.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(0.125, 0.03125, 0.0625, 0.015625, 0.0, 0.0, 0.0, 0.0) | mat4x2(-18.725, -0.0125, -0.5, 19.975, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat3x4
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat3x4 out0 = [ mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(8.0, -24.0, 0.0, 0.0, 16.0, -16.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(0.125, 0.03125, 0.0, 0.0, 0.0625, 0.015625, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(-18.725, -0.0125, 0.0, 0.0, -0.5, 19.975, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat3
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 1.0) | mat3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0, 0.0, 0.0, 1.0) | mat3(8.0, -24.0, 0.0, 16.0, -16.0, 0.0, 0.0, 0.0, 1.0) | mat3(0.125, 0.03125, 0.0, 0.0625, 0.015625, 0.0, 0.0, 0.0, 1.0) | mat3(-18.725, -0.0125, 0.0, -0.5, 19.975, 0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat3x2
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat3x2(6.5, -0.75, 12.5, 9.975, 0.0, 0.0) | mat3x2(6.5, -0.75, 12.5, 9.975, 0.0, 0.0) | mat3x2(8.0, -24.0, 16.0, -16.0, 0.0, 0.0) | mat3x2(0.125, 0.03125, 0.0625, 0.015625, 0.0, 0.0) | mat3x2(-18.725, -0.0125, -0.5, 19.975, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat2x4
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0) | mat2x4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0) | mat2x4(6.5, -0.75, 0.0, 0.0, 12.5, 9.975, 0.0, 0.0) | mat2x4(8.0, -24.0, 0.0, 0.0, 16.0, -16.0, 0.0, 0.0) | mat2x4(0.125, 0.03125, 0.0, 0.0, 0.0625, 0.015625, 0.0, 0.0) | mat2x4(-18.725, -0.0125, 0.0, 0.0, -0.5, 19.975, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat2x3
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat2x3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0) | mat2x3(6.5, -0.75, 0.0, 12.5, 9.975, 0.0) | mat2x3(8.0, -24.0, 0.0, 16.0, -16.0, 0.0) | mat2x3(0.125, 0.03125, 0.0, 0.0625, 0.015625, 0.0) | mat2x3(-18.725, -0.0125, 0.0, -0.5, 19.975, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2_to_mat2
+		version 310 es
+		values
+		{
+			input mat2 in0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 1.0) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(6.5, -0.75, 12.5, 9.975) | mat2(8.0, -24.0, 16.0, -16.0) | mat2(0.125, 0.03125, 0.0625, 0.015625) | mat2(-18.725, -0.0125, -0.5, 19.975) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # matrix_to_matrix
+group vector_combine "Vector Combine Constructors"
+
+	case vec2_vec2_to_vec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) ];
+			input vec2 in1 = [ vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(-0.5, -2.25, -32.0, 64.0) | vec4(-32.0, 64.0, 1.0, 1.25) | vec4(1.0, 1.25, 0.0, 0.5) | vec4(0.0, 0.5, -0.5, -2.25) | vec4(-0.75, -0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_vec2_to_ivec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) ];
+			input vec2 in1 = [ vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) ];
+			output ivec4 out0 = [ ivec4(0, -2, -32, 64) | ivec4(-32, 64, 1, 1) | ivec4(1, 1, 0, 0) | ivec4(0, 0, 0, -2) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_vec2_to_bvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) ];
+			input vec2 in1 = [ vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, false, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_bvec2_to_vec4
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input bvec2 in1 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 1.0, 0.0) | vec4(1.0, 0.0, 0.0, 0.0) | vec4(0.0, 1.0, 0.0, 1.0) | vec4(0.0, 0.0, 1.0, 1.0) | vec4(1.0, 1.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_bvec2_to_ivec4
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input bvec2 in1 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output ivec4 out0 = [ ivec4(0, 0, 1, 0) | ivec4(1, 0, 0, 0) | ivec4(0, 1, 0, 1) | ivec4(0, 0, 1, 1) | ivec4(1, 1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_bvec2_to_bvec4
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input bvec2 in1 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, true, false) | bvec4(true, false, false, false) | bvec4(false, true, false, true) | bvec4(false, false, true, true) | bvec4(true, true, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_to_vec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | 0.0 | -0.5 | -8.25 | 3.5 | -20.125 | 36.8125 | 2.0 ];
+			input float in1 = [ 0.0 | 36.8125 | -8.25 | 2.0 | 3.5 | 1.0 | -20.125 | -0.5 ];
+			input float in2 = [ 3.5 | 36.8125 | -8.25 | 1.0 | 2.0 | 0.0 | -20.125 | -0.5 ];
+			input float in3 = [ 3.5 | 36.8125 | 1.0 | -8.25 | 2.0 | 0.0 | -0.5 | -20.125 ];
+			output vec4 out0 = [ vec4(1.0, 0.0, 3.5, 3.5) | vec4(0.0, 36.8125, 36.8125, 36.8125) | vec4(-0.5, -8.25, -8.25, 1.0) | vec4(-8.25, 2.0, 1.0, -8.25) | vec4(3.5, 3.5, 2.0, 2.0) | vec4(-20.125, 1.0, 0.0, 0.0) | vec4(36.8125, -20.125, -20.125, -0.5) | vec4(2.0, -0.5, -0.5, -20.125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_to_ivec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | 0.0 | -0.5 | -8.25 | 3.5 | -20.125 | 36.8125 | 2.0 ];
+			input float in1 = [ 0.0 | 36.8125 | -8.25 | 2.0 | 3.5 | 1.0 | -20.125 | -0.5 ];
+			input float in2 = [ 3.5 | 36.8125 | -8.25 | 1.0 | 2.0 | 0.0 | -20.125 | -0.5 ];
+			input float in3 = [ 3.5 | 36.8125 | 1.0 | -8.25 | 2.0 | 0.0 | -0.5 | -20.125 ];
+			output ivec4 out0 = [ ivec4(1, 0, 3, 3) | ivec4(0, 36, 36, 36) | ivec4(0, -8, -8, 1) | ivec4(-8, 2, 1, -8) | ivec4(3, 3, 2, 2) | ivec4(-20, 1, 0, 0) | ivec4(36, -20, -20, 0) | ivec4(2, 0, 0, -20) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_to_bvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | 0.0 | -0.5 | -8.25 | 3.5 | -20.125 | 36.8125 | 2.0 ];
+			input float in1 = [ 0.0 | 36.8125 | -8.25 | 2.0 | 3.5 | 1.0 | -20.125 | -0.5 ];
+			input float in2 = [ 3.5 | 36.8125 | -8.25 | 1.0 | 2.0 | 0.0 | -20.125 | -0.5 ];
+			input float in3 = [ 3.5 | 36.8125 | 1.0 | -8.25 | 2.0 | 0.0 | -0.5 | -20.125 ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_to_vec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | -66 | 2 | 5 | 8 | -192 | 255 | 1 | 0 | 11 ];
+			input int in1 = [ 2 | 5 | -66 | 11 | -192 | 8 | -12 | 1 | 255 | 0 ];
+			input int in2 = [ 11 | 255 | 5 | 8 | 2 | -192 | -12 | -66 | 1 | 0 ];
+			input int in3 = [ -192 | -66 | 8 | -12 | 1 | 2 | 0 | 255 | 5 | 11 ];
+			output vec4 out0 = [ vec4(-12.0, 2.0, 11.0, -192.0) | vec4(-66.0, 5.0, 255.0, -66.0) | vec4(2.0, -66.0, 5.0, 8.0) | vec4(5.0, 11.0, 8.0, -12.0) | vec4(8.0, -192.0, 2.0, 1.0) | vec4(-192.0, 8.0, -192.0, 2.0) | vec4(255.0, -12.0, -12.0, 0.0) | vec4(1.0, 1.0, -66.0, 255.0) | vec4(0.0, 255.0, 1.0, 5.0) | vec4(11.0, 0.0, 0.0, 11.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_to_ivec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | -66 | 2 | 5 | 8 | -192 | 255 | 1 | 0 | 11 ];
+			input int in1 = [ 2 | 5 | -66 | 11 | -192 | 8 | -12 | 1 | 255 | 0 ];
+			input int in2 = [ 11 | 255 | 5 | 8 | 2 | -192 | -12 | -66 | 1 | 0 ];
+			input int in3 = [ -192 | -66 | 8 | -12 | 1 | 2 | 0 | 255 | 5 | 11 ];
+			output ivec4 out0 = [ ivec4(-12, 2, 11, -192) | ivec4(-66, 5, 255, -66) | ivec4(2, -66, 5, 8) | ivec4(5, 11, 8, -12) | ivec4(8, -192, 2, 1) | ivec4(-192, 8, -192, 2) | ivec4(255, -12, -12, 0) | ivec4(1, 1, -66, 255) | ivec4(0, 255, 1, 5) | ivec4(11, 0, 0, 11) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_to_bvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | -66 | 2 | 5 | 8 | -192 | 255 | 1 | 0 | 11 ];
+			input int in1 = [ 2 | 5 | -66 | 11 | -192 | 8 | -12 | 1 | 255 | 0 ];
+			input int in2 = [ 11 | 255 | 5 | 8 | 2 | -192 | -12 | -66 | 1 | 0 ];
+			input int in3 = [ -192 | -66 | 8 | -12 | 1 | 2 | 0 | 255 | 5 | 11 ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, false) | bvec4(true, true, true, true) | bvec4(false, true, true, true) | bvec4(true, false, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_uint_to_vec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 193 | 3 | 255 | 8 | 10 | 9 | 2 | 12 | 0 | 45 ];
+			input uint in1 = [ 12 | 45 | 193 | 2 | 8 | 255 | 0 | 3 | 9 | 10 ];
+			input uint in2 = [ 9 | 8 | 12 | 2 | 255 | 45 | 3 | 0 | 193 | 10 ];
+			input uint in3 = [ 3 | 9 | 12 | 2 | 255 | 193 | 0 | 10 | 45 | 8 ];
+			output vec4 out0 = [ vec4(193.0, 12.0, 9.0, 3.0) | vec4(3.0, 45.0, 8.0, 9.0) | vec4(255.0, 193.0, 12.0, 12.0) | vec4(8.0, 2.0, 2.0, 2.0) | vec4(10.0, 8.0, 255.0, 255.0) | vec4(9.0, 255.0, 45.0, 193.0) | vec4(2.0, 0.0, 3.0, 0.0) | vec4(12.0, 3.0, 0.0, 10.0) | vec4(0.0, 9.0, 193.0, 45.0) | vec4(45.0, 10.0, 10.0, 8.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_uint_to_ivec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 193 | 3 | 255 | 8 | 10 | 9 | 2 | 12 | 0 | 45 ];
+			input uint in1 = [ 12 | 45 | 193 | 2 | 8 | 255 | 0 | 3 | 9 | 10 ];
+			input uint in2 = [ 9 | 8 | 12 | 2 | 255 | 45 | 3 | 0 | 193 | 10 ];
+			input uint in3 = [ 3 | 9 | 12 | 2 | 255 | 193 | 0 | 10 | 45 | 8 ];
+			output ivec4 out0 = [ ivec4(193, 12, 9, 3) | ivec4(3, 45, 8, 9) | ivec4(255, 193, 12, 12) | ivec4(8, 2, 2, 2) | ivec4(10, 8, 255, 255) | ivec4(9, 255, 45, 193) | ivec4(2, 0, 3, 0) | ivec4(12, 3, 0, 10) | ivec4(0, 9, 193, 45) | ivec4(45, 10, 10, 8) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_uint_to_bvec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 193 | 3 | 255 | 8 | 10 | 9 | 2 | 12 | 0 | 45 ];
+			input uint in1 = [ 12 | 45 | 193 | 2 | 8 | 255 | 0 | 3 | 9 | 10 ];
+			input uint in2 = [ 9 | 8 | 12 | 2 | 255 | 45 | 3 | 0 | 193 | 10 ];
+			input uint in3 = [ 3 | 9 | 12 | 2 | 255 | 193 | 0 | 10 | 45 | 8 ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, false, true, false) | bvec4(true, true, false, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_to_vec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ false | true ];
+			input bool in3 = [ false | true ];
+			output vec4 out0 = [ vec4(1.0, 1.0, 0.0, 0.0) | vec4(0.0, 0.0, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_to_ivec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ false | true ];
+			input bool in3 = [ false | true ];
+			output ivec4 out0 = [ ivec4(1, 1, 0, 0) | ivec4(0, 0, 1, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_to_bvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ false | true ];
+			input bool in3 = [ false | true ];
+			output bvec4 out0 = [ bvec4(true, true, false, false) | bvec4(false, false, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_to_vec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | false | true | false | false | true | true ];
+			input float in1 = [ 36.8125 | 0.0 | -8.25 | 1.0 | -0.5 | 0.0 | 2.0 | -20.125 | 3.5 | 1.0 ];
+			input int in2 = [ -66 | 2 | 255 | 11 | 1 | 8 | -192 | -12 | 0 | 5 ];
+			input bool in3 = [ true | true | false | false | true | true | false | true | false | false ];
+			output vec4 out0 = [ vec4(1.0, 36.8125, -66.0, 1.0) | vec4(1.0, 0.0, 2.0, 1.0) | vec4(0.0, -8.25, 255.0, 0.0) | vec4(0.0, 1.0, 11.0, 0.0) | vec4(0.0, -0.5, 1.0, 1.0) | vec4(1.0, 0.0, 8.0, 1.0) | vec4(0.0, 2.0, -192.0, 0.0) | vec4(0.0, -20.125, -12.0, 1.0) | vec4(1.0, 3.5, 0.0, 0.0) | vec4(1.0, 1.0, 5.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_to_ivec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | false | true | false | false | true | true ];
+			input float in1 = [ 36.8125 | 0.0 | -8.25 | 1.0 | -0.5 | 0.0 | 2.0 | -20.125 | 3.5 | 1.0 ];
+			input int in2 = [ -66 | 2 | 255 | 11 | 1 | 8 | -192 | -12 | 0 | 5 ];
+			input bool in3 = [ true | true | false | false | true | true | false | true | false | false ];
+			output ivec4 out0 = [ ivec4(1, 36, -66, 1) | ivec4(1, 0, 2, 1) | ivec4(0, -8, 255, 0) | ivec4(0, 1, 11, 0) | ivec4(0, 0, 1, 1) | ivec4(1, 0, 8, 1) | ivec4(0, 2, -192, 0) | ivec4(0, -20, -12, 1) | ivec4(1, 3, 0, 0) | ivec4(1, 1, 5, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_to_bvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | false | true | false | false | true | true ];
+			input float in1 = [ 36.8125 | 0.0 | -8.25 | 1.0 | -0.5 | 0.0 | 2.0 | -20.125 | 3.5 | 1.0 ];
+			input int in2 = [ -66 | 2 | 255 | 11 | 1 | 8 | -192 | -12 | 0 | 5 ];
+			input bool in3 = [ true | true | false | false | true | true | false | true | false | false ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, false, true, true) | bvec4(false, true, true, false) | bvec4(false, true, true, false) | bvec4(false, true, true, true) | bvec4(true, false, true, true) | bvec4(false, true, true, false) | bvec4(false, true, true, true) | bvec4(true, true, false, false) | bvec4(true, true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_to_vec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(-0.75, -0.0322580645161, 0.0, -2.0) | vec4(-0.5, -2.25, 0.0, 0.0) | vec4(-32.0, 64.0, -32.0, 64.0) | vec4(1.0, 1.25, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_to_ivec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, -2) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, 64) | ivec4(1, 1, 1, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_to_bvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) ];
+			output bvec4 out0 = [ bvec4(false, true, false, false) | bvec4(true, true, false, true) | bvec4(true, true, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec2_to_vec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) ];
+			input bvec2 in1 = [ bvec2(false, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(true, false) ];
+			output vec4 out0 = [ vec4(-0.75, -0.0322580645161, 0.0, 1.0) | vec4(-32.0, 64.0, 0.0, 0.0) | vec4(1.0, 1.25, 0.0, 0.0) | vec4(0.0, 0.5, 1.0, 1.0) | vec4(-0.5, -2.25, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec2_to_ivec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) ];
+			input bvec2 in1 = [ bvec2(false, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(true, false) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 1) | ivec4(-32, 64, 0, 0) | ivec4(1, 1, 0, 0) | ivec4(0, 0, 1, 1) | ivec4(0, -2, 1, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec2_to_bvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) ];
+			input bvec2 in1 = [ bvec2(false, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(true, false) ];
+			output bvec4 out0 = [ bvec4(true, true, false, true) | bvec4(true, true, false, false) | bvec4(true, true, false, false) | bvec4(false, true, true, true) | bvec4(true, true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_to_vec4
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, false, false) | bvec3(false, true, false) ];
+			input float in1 = [ -0.5 | 3.5 | 2.0 | 0.0 | -8.25 | 1.0 | 36.8125 | -20.125 ];
+			output vec4 out0 = [ vec4(1.0, 0.0, 0.0, -0.5) | vec4(0.0, 0.0, 0.0, 3.5) | vec4(0.0, 1.0, 0.0, 2.0) | vec4(1.0, 1.0, 1.0, 0.0) | vec4(0.0, 0.0, 0.0, -8.25) | vec4(0.0, 0.0, 0.0, 1.0) | vec4(1.0, 0.0, 0.0, 36.8125) | vec4(0.0, 1.0, 0.0, -20.125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_to_ivec4
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, false, false) | bvec3(false, true, false) ];
+			input float in1 = [ -0.5 | 3.5 | 2.0 | 0.0 | -8.25 | 1.0 | 36.8125 | -20.125 ];
+			output ivec4 out0 = [ ivec4(1, 0, 0, 0) | ivec4(0, 0, 0, 3) | ivec4(0, 1, 0, 2) | ivec4(1, 1, 1, 0) | ivec4(0, 0, 0, -8) | ivec4(0, 0, 0, 1) | ivec4(1, 0, 0, 36) | ivec4(0, 1, 0, -20) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_to_bvec4
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, false, false) | bvec3(false, true, false) ];
+			input float in1 = [ -0.5 | 3.5 | 2.0 | 0.0 | -8.25 | 1.0 | 36.8125 | -20.125 ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, true) | bvec4(true, true, true, false) | bvec4(false, false, false, true) | bvec4(false, false, false, true) | bvec4(true, false, false, true) | bvec4(false, true, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_to_vec4
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) ];
+			input float in1 = [ -0.5 | 0.0 | 3.5 | -20.125 | 2.0 | -8.25 | 1.0 | 36.8125 ];
+			output vec4 out0 = [ vec4(-32.0, 64.0, -51.0, -0.5) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.0) | vec4(1.0, 1.25, 1.125, 3.5) | vec4(-0.5, -2.25, -4.875, -20.125) | vec4(0.0, 0.5, 0.75, 2.0) | vec4(-0.5, -2.25, -4.875, -8.25) | vec4(0.0, 0.5, 0.75, 1.0) | vec4(1.0, 1.25, 1.125, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_to_ivec4
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) ];
+			input float in1 = [ -0.5 | 0.0 | 3.5 | -20.125 | 2.0 | -8.25 | 1.0 | 36.8125 ];
+			output ivec4 out0 = [ ivec4(-32, 64, -51, 0) | ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 3) | ivec4(0, -2, -4, -20) | ivec4(0, 0, 0, 2) | ivec4(0, -2, -4, -8) | ivec4(0, 0, 0, 1) | ivec4(1, 1, 1, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_to_bvec4
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) ];
+			input float in1 = [ -0.5 | 0.0 | 3.5 | -20.125 | 2.0 | -8.25 | 1.0 | 36.8125 ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) | bvec4(false, true, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2_int_to_vec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | 11 | 8 | 255 | 0 | 1 | -66 | 2 | -192 | 5 ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(0, 0) | ivec2(0, 0) ];
+			input int in2 = [ 1 | 11 | 2 | -66 | -192 | 0 | -12 | 255 | 5 | 8 ];
+			output vec4 out0 = [ vec4(-12.0, 0.0, -2.0, 1.0) | vec4(11.0, -32.0, 64.0, 11.0) | vec4(8.0, 0.0, 0.0, 2.0) | vec4(255.0, 0.0, -2.0, -66.0) | vec4(0.0, 1.0, 1.0, -192.0) | vec4(1.0, 0.0, 0.0, 0.0) | vec4(-66.0, -32.0, 64.0, -12.0) | vec4(2.0, 1.0, 1.0, 255.0) | vec4(-192.0, 0.0, 0.0, 5.0) | vec4(5.0, 0.0, 0.0, 8.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2_int_to_ivec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | 11 | 8 | 255 | 0 | 1 | -66 | 2 | -192 | 5 ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(0, 0) | ivec2(0, 0) ];
+			input int in2 = [ 1 | 11 | 2 | -66 | -192 | 0 | -12 | 255 | 5 | 8 ];
+			output ivec4 out0 = [ ivec4(-12, 0, -2, 1) | ivec4(11, -32, 64, 11) | ivec4(8, 0, 0, 2) | ivec4(255, 0, -2, -66) | ivec4(0, 1, 1, -192) | ivec4(1, 0, 0, 0) | ivec4(-66, -32, 64, -12) | ivec4(2, 1, 1, 255) | ivec4(-192, 0, 0, 5) | ivec4(5, 0, 0, 8) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2_int_to_bvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ -12 | 11 | 8 | 255 | 0 | 1 | -66 | 2 | -192 | 5 ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(0, 0) | ivec2(0, 0) ];
+			input int in2 = [ 1 | 11 | 2 | -66 | -192 | 0 | -12 | 255 | 5 | 8 ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(true, true, true, true) | bvec4(true, false, false, true) | bvec4(true, false, true, true) | bvec4(false, true, true, true) | bvec4(true, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, false, false, true) | bvec4(true, false, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_ivec2_to_vec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | false | false | true | true | true | false ];
+			input float in1 = [ 1.0 | 0.0 | 2.0 | 3.5 | -20.125 | -0.5 | 36.8125 | -8.25 ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, -2) | ivec2(0, 0) | ivec2(1, 1) ];
+			output vec4 out0 = [ vec4(1.0, 1.0, 1.0, 1.0) | vec4(0.0, 0.0, -32.0, 64.0) | vec4(0.0, 2.0, 0.0, 0.0) | vec4(0.0, 3.5, 0.0, 0.0) | vec4(1.0, -20.125, 0.0, -2.0) | vec4(1.0, -0.5, 0.0, -2.0) | vec4(1.0, 36.8125, 0.0, 0.0) | vec4(0.0, -8.25, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_ivec2_to_ivec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | false | false | true | true | true | false ];
+			input float in1 = [ 1.0 | 0.0 | 2.0 | 3.5 | -20.125 | -0.5 | 36.8125 | -8.25 ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, -2) | ivec2(0, 0) | ivec2(1, 1) ];
+			output ivec4 out0 = [ ivec4(1, 1, 1, 1) | ivec4(0, 0, -32, 64) | ivec4(0, 2, 0, 0) | ivec4(0, 3, 0, 0) | ivec4(1, -20, 0, -2) | ivec4(1, 0, 0, -2) | ivec4(1, 36, 0, 0) | ivec4(0, -8, 1, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_ivec2_to_bvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | false | false | true | true | true | false ];
+			input float in1 = [ 1.0 | 0.0 | 2.0 | 3.5 | -20.125 | -0.5 | 36.8125 | -8.25 ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, -2) | ivec2(0, 0) | ivec2(1, 1) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(false, false, true, true) | bvec4(false, true, false, false) | bvec4(false, true, false, false) | bvec4(true, true, false, true) | bvec4(true, true, false, true) | bvec4(true, true, false, false) | bvec4(false, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec3_to_vec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | -20.125 | -8.25 | -0.5 | 0.0 | 2.0 | 3.5 | 36.8125 ];
+			input uvec3 in1 = [ uvec3(0, 0, 0) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) ];
+			output vec4 out0 = [ vec4(1.0, 0.0, 0.0, 0.0) | vec4(-20.125, 0.0, 0.0, 0.0) | vec4(-8.25, 1.0, 1.0, 1.0) | vec4(-0.5, 0.0, 2.0, 4.0) | vec4(0.0, 0.0, 0.0, 0.0) | vec4(2.0, 1.0, 1.0, 1.0) | vec4(3.5, 0.0, 2.0, 4.0) | vec4(36.8125, 32.0, 64.0, 51.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec3_to_ivec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | -20.125 | -8.25 | -0.5 | 0.0 | 2.0 | 3.5 | 36.8125 ];
+			input uvec3 in1 = [ uvec3(0, 0, 0) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) ];
+			output ivec4 out0 = [ ivec4(1, 0, 0, 0) | ivec4(-20, 0, 0, 0) | ivec4(-8, 1, 1, 1) | ivec4(0, 0, 2, 4) | ivec4(0, 0, 0, 0) | ivec4(2, 1, 1, 1) | ivec4(3, 0, 2, 4) | ivec4(36, 32, 64, 51) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec3_to_bvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | -20.125 | -8.25 | -0.5 | 0.0 | 2.0 | 3.5 | 36.8125 ];
+			input uvec3 in1 = [ uvec3(0, 0, 0) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(0, 0, 0) | uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(32, 64, 51) ];
+			output bvec4 out0 = [ bvec4(true, false, false, false) | bvec4(true, false, false, false) | bvec4(true, true, true, true) | bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, false, true, true) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uvec2_bool_to_vec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | -192 | -66 | -12 | 0 | 2 | 11 | 8 | 255 | 5 ];
+			input uvec2 in1 = [ uvec2(0, 2) | uvec2(1, 1) | uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(32, 64) | uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 0) ];
+			input bool in2 = [ false | false | true | true | true | true | true | false | false | false ];
+			output vec4 out0 = [ vec4(1.0, 0.0, 2.0, 0.0) | vec4(-192.0, 1.0, 1.0, 0.0) | vec4(-66.0, 0.0, 0.0, 1.0) | vec4(-12.0, 0.0, 2.0, 1.0) | vec4(0.0, 32.0, 64.0, 1.0) | vec4(2.0, 0.0, 0.0, 1.0) | vec4(11.0, 32.0, 64.0, 1.0) | vec4(8.0, 0.0, 0.0, 0.0) | vec4(255.0, 1.0, 1.0, 0.0) | vec4(5.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uvec2_bool_to_ivec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | -192 | -66 | -12 | 0 | 2 | 11 | 8 | 255 | 5 ];
+			input uvec2 in1 = [ uvec2(0, 2) | uvec2(1, 1) | uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(32, 64) | uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 0) ];
+			input bool in2 = [ false | false | true | true | true | true | true | false | false | false ];
+			output ivec4 out0 = [ ivec4(1, 0, 2, 0) | ivec4(-192, 1, 1, 0) | ivec4(-66, 0, 0, 1) | ivec4(-12, 0, 2, 1) | ivec4(0, 32, 64, 1) | ivec4(2, 0, 0, 1) | ivec4(11, 32, 64, 1) | ivec4(8, 0, 0, 0) | ivec4(255, 1, 1, 0) | ivec4(5, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uvec2_bool_to_bvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | -192 | -66 | -12 | 0 | 2 | 11 | 8 | 255 | 5 ];
+			input uvec2 in1 = [ uvec2(0, 2) | uvec2(1, 1) | uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(32, 64) | uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 0) ];
+			input bool in2 = [ false | false | true | true | true | true | true | false | false | false ];
+			output bvec4 out0 = [ bvec4(true, false, true, false) | bvec4(true, true, true, false) | bvec4(true, false, false, true) | bvec4(true, false, true, true) | bvec4(false, true, true, true) | bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(true, false, false, false) | bvec4(true, true, true, false) | bvec4(true, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_vec2_to_uvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(32.0, 64.0) | vec2(0.75, 0.0322580645161) | vec2(0.0, 0.5) | vec2(0.5, 2.25) | vec2(1.0, 1.25) ];
+			input vec2 in1 = [ vec2(0.5, 2.25) | vec2(1.0, 1.25) | vec2(32.0, 64.0) | vec2(0.0, 0.5) | vec2(0.75, 0.0322580645161) ];
+			output uvec4 out0 = [ uvec4(32, 64, 0, 2) | uvec4(0, 0, 1, 1) | uvec4(0, 0, 32, 64) | uvec4(0, 2, 0, 0) | uvec4(1, 1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_bvec2_to_uvec4
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) | bvec2(false, false) ];
+			input bvec2 in1 = [ bvec2(true, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) ];
+			output uvec4 out0 = [ uvec4(1, 0, 1, 1) | uvec4(0, 1, 0, 0) | uvec4(1, 1, 1, 0) | uvec4(0, 0, 0, 1) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_to_uvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 36.8125 | 0.0 | 20.125 | 1.0 | 0.5 | 8.25 | 3.5 ];
+			input float in1 = [ 1.0 | 8.25 | 2.0 | 20.125 | 36.8125 | 0.0 | 0.5 | 3.5 ];
+			input float in2 = [ 0.0 | 8.25 | 2.0 | 1.0 | 36.8125 | 20.125 | 0.5 | 3.5 ];
+			input float in3 = [ 1.0 | 8.25 | 0.0 | 2.0 | 20.125 | 3.5 | 0.5 | 36.8125 ];
+			output uvec4 out0 = [ uvec4(2, 1, 0, 1) | uvec4(36, 8, 8, 8) | uvec4(0, 2, 2, 0) | uvec4(20, 20, 1, 2) | uvec4(1, 36, 36, 20) | uvec4(0, 0, 20, 3) | uvec4(8, 0, 0, 0) | uvec4(3, 3, 3, 36) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_to_uvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 192 | 2 | 12 | 11 | 0 | 8 | 5 | 66 | 1 | 255 ];
+			input int in1 = [ 66 | 12 | 5 | 1 | 0 | 192 | 8 | 255 | 11 | 2 ];
+			input int in2 = [ 192 | 5 | 1 | 66 | 255 | 11 | 8 | 12 | 2 | 0 ];
+			input int in3 = [ 255 | 5 | 11 | 12 | 2 | 1 | 66 | 0 | 8 | 192 ];
+			output uvec4 out0 = [ uvec4(192, 66, 192, 255) | uvec4(2, 12, 5, 5) | uvec4(12, 5, 1, 11) | uvec4(11, 1, 66, 12) | uvec4(0, 0, 255, 2) | uvec4(8, 192, 11, 1) | uvec4(5, 8, 8, 66) | uvec4(66, 255, 12, 0) | uvec4(1, 11, 2, 8) | uvec4(255, 2, 0, 192) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_uint_to_uvec4
+		version 310 es
+		values
+		{
+			input uint in0 = [ 255 | 8 | 3 | 193 | 45 | 2 | 0 | 12 | 9 | 10 ];
+			input uint in1 = [ 255 | 45 | 0 | 12 | 2 | 10 | 8 | 9 | 193 | 3 ];
+			input uint in2 = [ 3 | 0 | 2 | 9 | 12 | 10 | 255 | 45 | 193 | 8 ];
+			input uint in3 = [ 2 | 255 | 10 | 193 | 8 | 12 | 3 | 9 | 0 | 45 ];
+			output uvec4 out0 = [ uvec4(255, 255, 3, 2) | uvec4(8, 45, 0, 255) | uvec4(3, 0, 2, 10) | uvec4(193, 12, 9, 193) | uvec4(45, 2, 12, 8) | uvec4(2, 10, 10, 12) | uvec4(0, 8, 255, 3) | uvec4(12, 9, 45, 9) | uvec4(9, 193, 193, 0) | uvec4(10, 3, 8, 45) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_to_uvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true ];
+			input bool in1 = [ false | true ];
+			input bool in2 = [ true | false ];
+			input bool in3 = [ true | false ];
+			output uvec4 out0 = [ uvec4(0, 0, 1, 1) | uvec4(1, 1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_to_uvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | false | true | false | true | true | true | false | false ];
+			input float in1 = [ 20.125 | 0.0 | 1.0 | 3.5 | 1.0 | 0.0 | 0.5 | 8.25 | 2.0 | 36.8125 ];
+			input int in2 = [ 66 | 192 | 1 | 2 | 5 | 11 | 8 | 12 | 255 | 0 ];
+			input bool in3 = [ true | true | true | true | false | true | false | false | false | false ];
+			output uvec4 out0 = [ uvec4(1, 20, 66, 1) | uvec4(0, 0, 192, 1) | uvec4(0, 1, 1, 1) | uvec4(1, 3, 2, 1) | uvec4(0, 1, 5, 0) | uvec4(1, 0, 11, 1) | uvec4(1, 0, 8, 0) | uvec4(1, 8, 12, 0) | uvec4(0, 2, 255, 0) | uvec4(0, 36, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_to_uvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.5, 2.25) | vec2(1.0, 1.25) | vec2(32.0, 64.0) | vec2(0.75, 0.0322580645161) | vec2(0.0, 0.5) ];
+			input ivec2 in1 = [ ivec2(1, 1) | ivec2(0, 2) | ivec2(32, 64) | ivec2(0, 0) | ivec2(0, 0) ];
+			output uvec4 out0 = [ uvec4(0, 2, 1, 1) | uvec4(1, 1, 0, 2) | uvec4(32, 64, 32, 64) | uvec4(0, 0, 0, 0) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec2_to_uvec4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(32.0, 64.0) | vec2(0.75, 0.0322580645161) | vec2(0.5, 2.25) | vec2(0.0, 0.5) | vec2(1.0, 1.25) ];
+			input bvec2 in1 = [ bvec2(false, false) | bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(true, true) ];
+			output uvec4 out0 = [ uvec4(32, 64, 0, 0) | uvec4(0, 0, 0, 0) | uvec4(0, 2, 1, 0) | uvec4(0, 0, 0, 1) | uvec4(1, 1, 1, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_to_uvec4
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(true, true, true) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, false, false) | bvec3(false, false, false) ];
+			input float in1 = [ 2.0 | 0.0 | 36.8125 | 0.5 | 1.0 | 8.25 | 3.5 | 20.125 ];
+			output uvec4 out0 = [ uvec4(1, 0, 0, 2) | uvec4(1, 1, 1, 0) | uvec4(0, 1, 0, 36) | uvec4(0, 0, 0, 0) | uvec4(0, 0, 0, 1) | uvec4(0, 1, 0, 8) | uvec4(1, 0, 0, 3) | uvec4(0, 0, 0, 20) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_to_uvec4
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.5, 2.25, 4.875) | vec3(1.0, 1.25, 1.125) | vec3(0.0, 0.5, 0.75) | vec3(0.5, 2.25, 4.875) | vec3(32.0, 64.0, 51.0) | vec3(1.0, 1.25, 1.125) | vec3(0.75, 0.0322580645161, 0.0526315789474) | vec3(0.0, 0.5, 0.75) ];
+			input float in1 = [ 8.25 | 1.0 | 0.0 | 36.8125 | 2.0 | 0.5 | 20.125 | 3.5 ];
+			output uvec4 out0 = [ uvec4(0, 2, 4, 8) | uvec4(1, 1, 1, 1) | uvec4(0, 0, 0, 0) | uvec4(0, 2, 4, 36) | uvec4(32, 64, 51, 2) | uvec4(1, 1, 1, 0) | uvec4(0, 0, 0, 20) | uvec4(0, 0, 0, 3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2_int_to_uvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 8 | 1 | 11 | 192 | 255 | 12 | 66 | 2 | 0 | 5 ];
+			input ivec2 in1 = [ ivec2(1, 1) | ivec2(0, 0) | ivec2(1, 1) | ivec2(32, 64) | ivec2(0, 2) | ivec2(0, 2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(32, 64) | ivec2(0, 0) ];
+			input int in2 = [ 2 | 192 | 12 | 0 | 8 | 1 | 66 | 255 | 11 | 5 ];
+			output uvec4 out0 = [ uvec4(8, 1, 1, 2) | uvec4(1, 0, 0, 192) | uvec4(11, 1, 1, 12) | uvec4(192, 32, 64, 0) | uvec4(255, 0, 2, 8) | uvec4(12, 0, 2, 1) | uvec4(66, 0, 0, 66) | uvec4(2, 0, 0, 255) | uvec4(0, 32, 64, 11) | uvec4(5, 0, 0, 5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_ivec2_to_uvec4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true | false | false | true ];
+			input float in1 = [ 8.25 | 0.5 | 2.0 | 20.125 | 0.0 | 36.8125 | 3.5 | 1.0 ];
+			input ivec2 in2 = [ ivec2(32, 64) | ivec2(1, 1) | ivec2(0, 2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, 2) | ivec2(1, 1) | ivec2(0, 0) ];
+			output uvec4 out0 = [ uvec4(1, 8, 32, 64) | uvec4(1, 0, 1, 1) | uvec4(0, 2, 0, 2) | uvec4(0, 20, 0, 0) | uvec4(1, 0, 0, 0) | uvec4(0, 36, 0, 2) | uvec4(0, 3, 1, 1) | uvec4(1, 1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec3_to_uvec4
+		version 310 es
+		values
+		{
+			input float in0 = [ 8.25 | 20.125 | 1.0 | 0.5 | 3.5 | 2.0 | 36.8125 | 0.0 ];
+			input uvec3 in1 = [ uvec3(0, 0, 0) | uvec3(0, 0, 0) | uvec3(0, 2, 4) | uvec3(32, 64, 51) | uvec3(0, 2, 4) | uvec3(1, 1, 1) | uvec3(1, 1, 1) | uvec3(0, 0, 0) ];
+			output uvec4 out0 = [ uvec4(8, 0, 0, 0) | uvec4(20, 0, 0, 0) | uvec4(1, 0, 2, 4) | uvec4(0, 32, 64, 51) | uvec4(3, 0, 2, 4) | uvec4(2, 1, 1, 1) | uvec4(36, 1, 1, 1) | uvec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uvec2_bool_to_uvec4
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | 1 | 11 | 66 | 192 | 12 | 8 | 255 | 0 | 5 ];
+			input uvec2 in1 = [ uvec2(1, 1) | uvec2(0, 0) | uvec2(0, 0) | uvec2(0, 2) | uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(1, 1) | uvec2(32, 64) | uvec2(0, 0) ];
+			input bool in2 = [ true | false | false | false | false | true | true | true | false | true ];
+			output uvec4 out0 = [ uvec4(2, 1, 1, 1) | uvec4(1, 0, 0, 0) | uvec4(11, 0, 0, 0) | uvec4(66, 0, 2, 0) | uvec4(192, 0, 0, 0) | uvec4(12, 0, 2, 1) | uvec4(8, 32, 64, 1) | uvec4(255, 1, 1, 1) | uvec4(0, 32, 64, 0) | uvec4(5, 0, 0, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_to_vec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | 1.0 | 3.5 | -8.25 | 36.8125 | 0.0 | 2.0 | -20.125 ];
+			input float in1 = [ 2.0 | -20.125 | 3.5 | 36.8125 | -8.25 | 1.0 | -0.5 | 0.0 ];
+			input float in2 = [ 1.0 | 3.5 | 2.0 | -8.25 | -20.125 | -0.5 | 36.8125 | 0.0 ];
+			output vec3 out0 = [ vec3(-0.5, 2.0, 1.0) | vec3(1.0, -20.125, 3.5) | vec3(3.5, 3.5, 2.0) | vec3(-8.25, 36.8125, -8.25) | vec3(36.8125, -8.25, -20.125) | vec3(0.0, 1.0, -0.5) | vec3(2.0, -0.5, 36.8125) | vec3(-20.125, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_to_ivec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | 1.0 | 3.5 | -8.25 | 36.8125 | 0.0 | 2.0 | -20.125 ];
+			input float in1 = [ 2.0 | -20.125 | 3.5 | 36.8125 | -8.25 | 1.0 | -0.5 | 0.0 ];
+			input float in2 = [ 1.0 | 3.5 | 2.0 | -8.25 | -20.125 | -0.5 | 36.8125 | 0.0 ];
+			output ivec3 out0 = [ ivec3(0, 2, 1) | ivec3(1, -20, 3) | ivec3(3, 3, 2) | ivec3(-8, 36, -8) | ivec3(36, -8, -20) | ivec3(0, 1, 0) | ivec3(2, 0, 36) | ivec3(-20, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_to_bvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | 1.0 | 3.5 | -8.25 | 36.8125 | 0.0 | 2.0 | -20.125 ];
+			input float in1 = [ 2.0 | -20.125 | 3.5 | 36.8125 | -8.25 | 1.0 | -0.5 | 0.0 ];
+			input float in2 = [ 1.0 | 3.5 | 2.0 | -8.25 | -20.125 | -0.5 | 36.8125 | 0.0 ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(true, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_to_vec3
+		version 310 es
+		values
+		{
+			input int in0 = [ -192 | 5 | -12 | 0 | 11 | 8 | 1 | -66 | 255 | 2 ];
+			input int in1 = [ 5 | 1 | 8 | 0 | 2 | -192 | -12 | 255 | -66 | 11 ];
+			input int in2 = [ -192 | 1 | 2 | 5 | -12 | 8 | 11 | 0 | 255 | -66 ];
+			output vec3 out0 = [ vec3(-192.0, 5.0, -192.0) | vec3(5.0, 1.0, 1.0) | vec3(-12.0, 8.0, 2.0) | vec3(0.0, 0.0, 5.0) | vec3(11.0, 2.0, -12.0) | vec3(8.0, -192.0, 8.0) | vec3(1.0, -12.0, 11.0) | vec3(-66.0, 255.0, 0.0) | vec3(255.0, -66.0, 255.0) | vec3(2.0, 11.0, -66.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_to_ivec3
+		version 310 es
+		values
+		{
+			input int in0 = [ -192 | 5 | -12 | 0 | 11 | 8 | 1 | -66 | 255 | 2 ];
+			input int in1 = [ 5 | 1 | 8 | 0 | 2 | -192 | -12 | 255 | -66 | 11 ];
+			input int in2 = [ -192 | 1 | 2 | 5 | -12 | 8 | 11 | 0 | 255 | -66 ];
+			output ivec3 out0 = [ ivec3(-192, 5, -192) | ivec3(5, 1, 1) | ivec3(-12, 8, 2) | ivec3(0, 0, 5) | ivec3(11, 2, -12) | ivec3(8, -192, 8) | ivec3(1, -12, 11) | ivec3(-66, 255, 0) | ivec3(255, -66, 255) | ivec3(2, 11, -66) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_to_bvec3
+		version 310 es
+		values
+		{
+			input int in0 = [ -192 | 5 | -12 | 0 | 11 | 8 | 1 | -66 | 255 | 2 ];
+			input int in1 = [ 5 | 1 | 8 | 0 | 2 | -192 | -12 | 255 | -66 | 11 ];
+			input int in2 = [ -192 | 1 | 2 | 5 | -12 | 8 | 11 | 0 | 255 | -66 ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, false) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_to_vec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 8 | 45 | 2 | 3 | 255 | 193 | 12 | 0 | 9 | 10 ];
+			input uint in1 = [ 193 | 2 | 9 | 8 | 0 | 255 | 45 | 3 | 10 | 12 ];
+			input uint in2 = [ 12 | 2 | 193 | 255 | 8 | 10 | 45 | 0 | 3 | 9 ];
+			output vec3 out0 = [ vec3(8.0, 193.0, 12.0) | vec3(45.0, 2.0, 2.0) | vec3(2.0, 9.0, 193.0) | vec3(3.0, 8.0, 255.0) | vec3(255.0, 0.0, 8.0) | vec3(193.0, 255.0, 10.0) | vec3(12.0, 45.0, 45.0) | vec3(0.0, 3.0, 0.0) | vec3(9.0, 10.0, 3.0) | vec3(10.0, 12.0, 9.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_to_ivec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 8 | 45 | 2 | 3 | 255 | 193 | 12 | 0 | 9 | 10 ];
+			input uint in1 = [ 193 | 2 | 9 | 8 | 0 | 255 | 45 | 3 | 10 | 12 ];
+			input uint in2 = [ 12 | 2 | 193 | 255 | 8 | 10 | 45 | 0 | 3 | 9 ];
+			output ivec3 out0 = [ ivec3(8, 193, 12) | ivec3(45, 2, 2) | ivec3(2, 9, 193) | ivec3(3, 8, 255) | ivec3(255, 0, 8) | ivec3(193, 255, 10) | ivec3(12, 45, 45) | ivec3(0, 3, 0) | ivec3(9, 10, 3) | ivec3(10, 12, 9) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_to_bvec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 8 | 45 | 2 | 3 | 255 | 193 | 12 | 0 | 9 | 10 ];
+			input uint in1 = [ 193 | 2 | 9 | 8 | 0 | 255 | 45 | 3 | 10 | 12 ];
+			input uint in2 = [ 12 | 2 | 193 | 255 | 8 | 10 | 45 | 0 | 3 | 9 ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_to_vec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ true | false ];
+			output vec3 out0 = [ vec3(1.0, 1.0, 1.0) | vec3(0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_to_ivec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ true | false ];
+			output ivec3 out0 = [ ivec3(1, 1, 1) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_to_bvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ true | false ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_to_vec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true | false | true | false | false | true ];
+			input float in1 = [ 0.0 | 36.8125 | 0.0 | -8.25 | 1.0 | 3.5 | 1.0 | -0.5 | -20.125 | 2.0 ];
+			input int in2 = [ 8 | -192 | -66 | 2 | 1 | -12 | 11 | 255 | 5 | 0 ];
+			output vec3 out0 = [ vec3(1.0, 0.0, 8.0) | vec3(1.0, 36.8125, -192.0) | vec3(0.0, 0.0, -66.0) | vec3(0.0, -8.25, 2.0) | vec3(1.0, 1.0, 1.0) | vec3(0.0, 3.5, -12.0) | vec3(1.0, 1.0, 11.0) | vec3(0.0, -0.5, 255.0) | vec3(0.0, -20.125, 5.0) | vec3(1.0, 2.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_to_ivec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true | false | true | false | false | true ];
+			input float in1 = [ 0.0 | 36.8125 | 0.0 | -8.25 | 1.0 | 3.5 | 1.0 | -0.5 | -20.125 | 2.0 ];
+			input int in2 = [ 8 | -192 | -66 | 2 | 1 | -12 | 11 | 255 | 5 | 0 ];
+			output ivec3 out0 = [ ivec3(1, 0, 8) | ivec3(1, 36, -192) | ivec3(0, 0, -66) | ivec3(0, -8, 2) | ivec3(1, 1, 1) | ivec3(0, 3, -12) | ivec3(1, 1, 11) | ivec3(0, 0, 255) | ivec3(0, -20, 5) | ivec3(1, 2, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_to_bvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true | false | true | false | false | true ];
+			input float in1 = [ 0.0 | 36.8125 | 0.0 | -8.25 | 1.0 | 3.5 | 1.0 | -0.5 | -20.125 | 2.0 ];
+			input int in2 = [ 8 | -192 | -66 | 2 | 1 | -12 | 11 | 255 | 5 | 0 ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(true, true, true) | bvec3(false, true, true) | bvec3(false, true, true) | bvec3(true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bool_to_vec3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) ];
+			input bool in1 = [ false | true | true | true | false ];
+			output vec3 out0 = [ vec3(1.0, 1.25, 0.0) | vec3(-0.75, -0.0322580645161, 1.0) | vec3(-32.0, 64.0, 1.0) | vec3(-0.5, -2.25, 1.0) | vec3(0.0, 0.5, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bool_to_ivec3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) ];
+			input bool in1 = [ false | true | true | true | false ];
+			output ivec3 out0 = [ ivec3(1, 1, 0) | ivec3(0, 0, 1) | ivec3(-32, 64, 1) | ivec3(0, -2, 1) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bool_to_bvec3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) ];
+			input bool in1 = [ false | true | true | true | false ];
+			output bvec3 out0 = [ bvec3(true, true, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_float_to_vec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, true) | bvec2(false, true) | bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+			input float in1 = [ -20.125 | 2.0 | 36.8125 | 1.0 | 3.5 | 0.0 | -8.25 | -0.5 ];
+			output vec3 out0 = [ vec3(1.0, 0.0, -20.125) | vec3(0.0, 1.0, 2.0) | vec3(0.0, 1.0, 36.8125) | vec3(1.0, 0.0, 1.0) | vec3(0.0, 0.0, 3.5) | vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, -8.25) | vec3(0.0, 0.0, -0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_float_to_ivec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, true) | bvec2(false, true) | bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+			input float in1 = [ -20.125 | 2.0 | 36.8125 | 1.0 | 3.5 | 0.0 | -8.25 | -0.5 ];
+			output ivec3 out0 = [ ivec3(1, 0, -20) | ivec3(0, 1, 2) | ivec3(0, 1, 36) | ivec3(1, 0, 1) | ivec3(0, 0, 3) | ivec3(0, 0, 0) | ivec3(1, 1, -8) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_float_to_bvec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, true) | bvec2(false, true) | bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+			input float in1 = [ -20.125 | 2.0 | 36.8125 | 1.0 | 3.5 | 0.0 | -8.25 | -0.5 ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, true, true) | bvec3(false, true, true) | bvec3(true, false, true) | bvec3(false, false, true) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_int_to_vec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input int in1 = [ 1 | -66 | 255 | 8 | -192 | 2 | 5 | 11 | -12 | 0 ];
+			output vec3 out0 = [ vec3(1.0, 1.0, 1.0) | vec3(0.0, 0.0, -66.0) | vec3(0.0, 0.0, 255.0) | vec3(1.0, 0.0, 8.0) | vec3(0.0, 0.0, -192.0) | vec3(0.0, 1.0, 2.0) | vec3(1.0, 0.0, 5.0) | vec3(0.0, 1.0, 11.0) | vec3(0.0, 0.0, -12.0) | vec3(1.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_int_to_ivec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input int in1 = [ 1 | -66 | 255 | 8 | -192 | 2 | 5 | 11 | -12 | 0 ];
+			output ivec3 out0 = [ ivec3(1, 1, 1) | ivec3(0, 0, -66) | ivec3(0, 0, 255) | ivec3(1, 0, 8) | ivec3(0, 0, -192) | ivec3(0, 1, 2) | ivec3(1, 0, 5) | ivec3(0, 1, 11) | ivec3(0, 0, -12) | ivec3(1, 1, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_int_to_bvec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) ];
+			input int in1 = [ 1 | -66 | 255 | 8 | -192 | 2 | 5 | 11 | -12 | 0 ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, true) | bvec3(false, false, true) | bvec3(true, false, true) | bvec3(false, false, true) | bvec3(false, true, true) | bvec3(true, false, true) | bvec3(false, true, true) | bvec3(false, false, true) | bvec3(true, true, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_ivec2_to_vec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(-32, 64) ];
+			output vec3 out0 = [ vec3(1.0, 0.0, -2.0) | vec3(1.0, 0.0, 0.0) | vec3(0.0, 0.0, 0.0) | vec3(0.0, 1.0, 1.0) | vec3(1.0, -32.0, 64.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_ivec2_to_ivec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(-32, 64) ];
+			output ivec3 out0 = [ ivec3(1, 0, -2) | ivec3(1, 0, 0) | ivec3(0, 0, 0) | ivec3(0, 1, 1) | ivec3(1, -32, 64) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_ivec2_to_bvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(-32, 64) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec2_to_vec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -20.125 | 36.8125 | -8.25 | 2.0 | -0.5 | 0.0 | 3.5 | 1.0 ];
+			input uvec2 in1 = [ uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(0, 2) | uvec2(0, 0) | uvec2(1, 1) | uvec2(1, 1) ];
+			output vec3 out0 = [ vec3(-20.125, 0.0, 0.0) | vec3(36.8125, 0.0, 2.0) | vec3(-8.25, 32.0, 64.0) | vec3(2.0, 0.0, 0.0) | vec3(-0.5, 0.0, 2.0) | vec3(0.0, 0.0, 0.0) | vec3(3.5, 1.0, 1.0) | vec3(1.0, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec2_to_ivec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -20.125 | 36.8125 | -8.25 | 2.0 | -0.5 | 0.0 | 3.5 | 1.0 ];
+			input uvec2 in1 = [ uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(0, 2) | uvec2(0, 0) | uvec2(1, 1) | uvec2(1, 1) ];
+			output ivec3 out0 = [ ivec3(-20, 0, 0) | ivec3(36, 0, 2) | ivec3(-8, 32, 64) | ivec3(2, 0, 0) | ivec3(0, 0, 2) | ivec3(0, 0, 0) | ivec3(3, 1, 1) | ivec3(1, 1, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec2_to_bvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ -20.125 | 36.8125 | -8.25 | 2.0 | -0.5 | 0.0 | 3.5 | 1.0 ];
+			input uvec2 in1 = [ uvec2(0, 0) | uvec2(0, 2) | uvec2(32, 64) | uvec2(0, 0) | uvec2(0, 2) | uvec2(0, 0) | uvec2(1, 1) | uvec2(1, 1) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(true, false, false) | bvec3(true, false, true) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_to_uvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 8.25 | 20.125 | 2.0 | 3.5 | 0.5 | 36.8125 | 1.0 | 0.0 ];
+			input float in1 = [ 1.0 | 0.0 | 3.5 | 36.8125 | 8.25 | 2.0 | 0.5 | 20.125 ];
+			input float in2 = [ 20.125 | 0.5 | 8.25 | 36.8125 | 1.0 | 0.0 | 3.5 | 2.0 ];
+			output uvec3 out0 = [ uvec3(8, 1, 20) | uvec3(20, 0, 0) | uvec3(2, 3, 8) | uvec3(3, 36, 36) | uvec3(0, 8, 1) | uvec3(36, 2, 0) | uvec3(1, 0, 3) | uvec3(0, 20, 2) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_to_uvec3
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 255 | 192 | 2 | 5 | 12 | 0 | 11 | 8 | 66 ];
+			input int in1 = [ 192 | 66 | 5 | 8 | 11 | 1 | 0 | 255 | 12 | 2 ];
+			input int in2 = [ 192 | 5 | 1 | 11 | 66 | 8 | 12 | 0 | 2 | 255 ];
+			output uvec3 out0 = [ uvec3(1, 192, 192) | uvec3(255, 66, 5) | uvec3(192, 5, 1) | uvec3(2, 8, 11) | uvec3(5, 11, 66) | uvec3(12, 1, 8) | uvec3(0, 0, 12) | uvec3(11, 255, 0) | uvec3(8, 12, 2) | uvec3(66, 2, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_to_uvec3
+		version 310 es
+		values
+		{
+			input uint in0 = [ 193 | 9 | 45 | 255 | 2 | 0 | 10 | 8 | 12 | 3 ];
+			input uint in1 = [ 3 | 45 | 2 | 9 | 10 | 0 | 8 | 12 | 255 | 193 ];
+			input uint in2 = [ 2 | 3 | 9 | 10 | 255 | 8 | 12 | 0 | 193 | 45 ];
+			output uvec3 out0 = [ uvec3(193, 3, 2) | uvec3(9, 45, 3) | uvec3(45, 2, 9) | uvec3(255, 9, 10) | uvec3(2, 10, 255) | uvec3(0, 0, 8) | uvec3(10, 8, 12) | uvec3(8, 12, 0) | uvec3(12, 255, 193) | uvec3(3, 193, 45) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_to_uvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true ];
+			input bool in1 = [ false | true ];
+			input bool in2 = [ true | false ];
+			output uvec3 out0 = [ uvec3(0, 0, 1) | uvec3(1, 1, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_to_uvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | true | false | false | true | false | true | true | false ];
+			input float in1 = [ 36.8125 | 20.125 | 1.0 | 0.0 | 3.5 | 1.0 | 2.0 | 0.5 | 0.0 | 8.25 ];
+			input int in2 = [ 1 | 0 | 8 | 66 | 2 | 11 | 192 | 5 | 12 | 255 ];
+			output uvec3 out0 = [ uvec3(1, 36, 1) | uvec3(0, 20, 0) | uvec3(1, 1, 8) | uvec3(0, 0, 66) | uvec3(0, 3, 2) | uvec3(1, 1, 11) | uvec3(0, 2, 192) | uvec3(1, 0, 5) | uvec3(1, 0, 12) | uvec3(0, 8, 255) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bool_to_uvec3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(32.0, 64.0) | vec2(0.5, 2.25) | vec2(1.0, 1.25) | vec2(0.75, 0.0322580645161) ];
+			input bool in1 = [ false | false | true | true | true ];
+			output uvec3 out0 = [ uvec3(0, 0, 0) | uvec3(32, 64, 0) | uvec3(0, 2, 1) | uvec3(1, 1, 1) | uvec3(0, 0, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_float_to_uvec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(false, false) ];
+			input float in1 = [ 8.25 | 36.8125 | 20.125 | 2.0 | 0.0 | 1.0 | 0.5 | 3.5 ];
+			output uvec3 out0 = [ uvec3(1, 1, 8) | uvec3(1, 0, 36) | uvec3(0, 1, 20) | uvec3(0, 0, 2) | uvec3(1, 0, 0) | uvec3(0, 1, 1) | uvec3(0, 0, 0) | uvec3(0, 0, 3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_int_to_uvec3
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, true) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, false) ];
+			input int in1 = [ 255 | 1 | 2 | 5 | 0 | 11 | 192 | 12 | 8 | 66 ];
+			output uvec3 out0 = [ uvec3(1, 1, 255) | uvec3(0, 1, 1) | uvec3(0, 0, 2) | uvec3(1, 1, 5) | uvec3(0, 0, 0) | uvec3(1, 0, 11) | uvec3(0, 0, 192) | uvec3(0, 0, 12) | uvec3(0, 1, 8) | uvec3(1, 0, 66) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_ivec2_to_uvec3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | true | true | false ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, 2) | ivec2(1, 1) | ivec2(0, 0) | ivec2(32, 64) ];
+			output uvec3 out0 = [ uvec3(1, 0, 0) | uvec3(0, 0, 2) | uvec3(1, 1, 1) | uvec3(1, 0, 0) | uvec3(0, 32, 64) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec2_to_uvec3
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 8.25 | 20.125 | 3.5 | 0.0 | 0.5 | 36.8125 | 1.0 ];
+			input uvec2 in1 = [ uvec2(0, 2) | uvec2(1, 1) | uvec2(32, 64) | uvec2(0, 0) | uvec2(0, 2) | uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 0) ];
+			output uvec3 out0 = [ uvec3(2, 0, 2) | uvec3(8, 1, 1) | uvec3(20, 32, 64) | uvec3(3, 0, 0) | uvec3(0, 0, 2) | uvec3(0, 0, 0) | uvec3(36, 1, 1) | uvec3(1, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_to_vec2
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | -20.125 | 1.0 | 2.0 | 3.5 | 36.8125 | -8.25 | 0.0 ];
+			input float in1 = [ 1.0 | -20.125 | 0.0 | 3.5 | -8.25 | 36.8125 | -0.5 | 2.0 ];
+			output vec2 out0 = [ vec2(-0.5, 1.0) | vec2(-20.125, -20.125) | vec2(1.0, 0.0) | vec2(2.0, 3.5) | vec2(3.5, -8.25) | vec2(36.8125, 36.8125) | vec2(-8.25, -0.5) | vec2(0.0, 2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_to_ivec2
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | -20.125 | 1.0 | 2.0 | 3.5 | 36.8125 | -8.25 | 0.0 ];
+			input float in1 = [ 1.0 | -20.125 | 0.0 | 3.5 | -8.25 | 36.8125 | -0.5 | 2.0 ];
+			output ivec2 out0 = [ ivec2(0, 1) | ivec2(-20, -20) | ivec2(1, 0) | ivec2(2, 3) | ivec2(3, -8) | ivec2(36, 36) | ivec2(-8, 0) | ivec2(0, 2) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_to_bvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ -0.5 | -20.125 | 1.0 | 2.0 | 3.5 | 36.8125 | -8.25 | 0.0 ];
+			input float in1 = [ 1.0 | -20.125 | 0.0 | 3.5 | -8.25 | 36.8125 | -0.5 | 2.0 ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(false, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_to_vec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | -66 | 0 | 5 | -12 | 8 | -192 | 1 | 11 | 255 ];
+			input int in1 = [ -192 | 8 | 1 | 0 | 5 | -66 | 2 | 255 | 11 | -12 ];
+			output vec2 out0 = [ vec2(2.0, -192.0) | vec2(-66.0, 8.0) | vec2(0.0, 1.0) | vec2(5.0, 0.0) | vec2(-12.0, 5.0) | vec2(8.0, -66.0) | vec2(-192.0, 2.0) | vec2(1.0, 255.0) | vec2(11.0, 11.0) | vec2(255.0, -12.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_to_ivec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | -66 | 0 | 5 | -12 | 8 | -192 | 1 | 11 | 255 ];
+			input int in1 = [ -192 | 8 | 1 | 0 | 5 | -66 | 2 | 255 | 11 | -12 ];
+			output ivec2 out0 = [ ivec2(2, -192) | ivec2(-66, 8) | ivec2(0, 1) | ivec2(5, 0) | ivec2(-12, 5) | ivec2(8, -66) | ivec2(-192, 2) | ivec2(1, 255) | ivec2(11, 11) | ivec2(255, -12) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_to_bvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | -66 | 0 | 5 | -12 | 8 | -192 | 1 | 11 | 255 ];
+			input int in1 = [ -192 | 8 | 1 | 0 | 5 | -66 | 2 | 255 | 11 | -12 ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, true) | bvec2(false, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_to_vec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 9 | 12 | 0 | 255 | 8 | 45 | 3 | 2 | 10 | 193 ];
+			input uint in1 = [ 8 | 9 | 45 | 2 | 12 | 193 | 255 | 0 | 3 | 10 ];
+			output vec2 out0 = [ vec2(9.0, 8.0) | vec2(12.0, 9.0) | vec2(0.0, 45.0) | vec2(255.0, 2.0) | vec2(8.0, 12.0) | vec2(45.0, 193.0) | vec2(3.0, 255.0) | vec2(2.0, 0.0) | vec2(10.0, 3.0) | vec2(193.0, 10.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_to_ivec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 9 | 12 | 0 | 255 | 8 | 45 | 3 | 2 | 10 | 193 ];
+			input uint in1 = [ 8 | 9 | 45 | 2 | 12 | 193 | 255 | 0 | 3 | 10 ];
+			output ivec2 out0 = [ ivec2(9, 8) | ivec2(12, 9) | ivec2(0, 45) | ivec2(255, 2) | ivec2(8, 12) | ivec2(45, 193) | ivec2(3, 255) | ivec2(2, 0) | ivec2(10, 3) | ivec2(193, 10) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_to_bvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 9 | 12 | 0 | 255 | 8 | 45 | 3 | 2 | 10 | 193 ];
+			input uint in1 = [ 8 | 9 | 45 | 2 | 12 | 193 | 255 | 0 | 3 | 10 ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_to_vec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			output vec2 out0 = [ vec2(1.0, 1.0) | vec2(0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_to_ivec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			output ivec2 out0 = [ ivec2(1, 1) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_to_bvec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_int_to_vec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 3.5 | 0.0 | 2.0 | -8.25 | 36.8125 | -20.125 | 1.0 | 1.0 | -0.5 ];
+			input int in1 = [ -66 | 1 | 255 | -192 | 8 | 2 | 0 | 5 | -12 | 11 ];
+			output vec2 out0 = [ vec2(0.0, -66.0) | vec2(3.5, 1.0) | vec2(0.0, 255.0) | vec2(2.0, -192.0) | vec2(-8.25, 8.0) | vec2(36.8125, 2.0) | vec2(-20.125, 0.0) | vec2(1.0, 5.0) | vec2(1.0, -12.0) | vec2(-0.5, 11.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_int_to_ivec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 3.5 | 0.0 | 2.0 | -8.25 | 36.8125 | -20.125 | 1.0 | 1.0 | -0.5 ];
+			input int in1 = [ -66 | 1 | 255 | -192 | 8 | 2 | 0 | 5 | -12 | 11 ];
+			output ivec2 out0 = [ ivec2(0, -66) | ivec2(3, 1) | ivec2(0, 255) | ivec2(2, -192) | ivec2(-8, 8) | ivec2(36, 2) | ivec2(-20, 0) | ivec2(1, 5) | ivec2(1, -12) | ivec2(0, 11) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_int_to_bvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 3.5 | 0.0 | 2.0 | -8.25 | 36.8125 | -20.125 | 1.0 | 1.0 | -0.5 ];
+			input int in1 = [ -66 | 1 | 255 | -192 | 8 | 2 | 0 | 5 | -12 | 11 ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bool_to_vec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 3.5 | 1.0 | -20.125 | -8.25 | 0.0 | -0.5 | 36.8125 ];
+			input bool in1 = [ false | true | true | false | false | true | false | true ];
+			output vec2 out0 = [ vec2(2.0, 0.0) | vec2(3.5, 1.0) | vec2(1.0, 1.0) | vec2(-20.125, 0.0) | vec2(-8.25, 0.0) | vec2(0.0, 1.0) | vec2(-0.5, 0.0) | vec2(36.8125, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bool_to_ivec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 3.5 | 1.0 | -20.125 | -8.25 | 0.0 | -0.5 | 36.8125 ];
+			input bool in1 = [ false | true | true | false | false | true | false | true ];
+			output ivec2 out0 = [ ivec2(2, 0) | ivec2(3, 1) | ivec2(1, 1) | ivec2(-20, 0) | ivec2(-8, 0) | ivec2(0, 1) | ivec2(0, 0) | ivec2(36, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bool_to_bvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 3.5 | 1.0 | -20.125 | -8.25 | 0.0 | -0.5 | 36.8125 ];
+			input bool in1 = [ false | true | true | false | false | true | false | true ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, false) | bvec2(true, false) | bvec2(false, true) | bvec2(true, false) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_bool_to_vec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 255 | 2 | -12 | 1 | -192 | 5 | 8 | 0 | -66 | 11 ];
+			input bool in1 = [ true | false | false | false | true | false | true | false | true | true ];
+			output vec2 out0 = [ vec2(255.0, 1.0) | vec2(2.0, 0.0) | vec2(-12.0, 0.0) | vec2(1.0, 0.0) | vec2(-192.0, 1.0) | vec2(5.0, 0.0) | vec2(8.0, 1.0) | vec2(0.0, 0.0) | vec2(-66.0, 1.0) | vec2(11.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_bool_to_ivec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 255 | 2 | -12 | 1 | -192 | 5 | 8 | 0 | -66 | 11 ];
+			input bool in1 = [ true | false | false | false | true | false | true | false | true | true ];
+			output ivec2 out0 = [ ivec2(255, 1) | ivec2(2, 0) | ivec2(-12, 0) | ivec2(1, 0) | ivec2(-192, 1) | ivec2(5, 0) | ivec2(8, 1) | ivec2(0, 0) | ivec2(-66, 1) | ivec2(11, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_bool_to_bvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 255 | 2 | -12 | 1 | -192 | 5 | 8 | 0 | -66 | 11 ];
+			input bool in1 = [ true | false | false | false | true | false | true | false | true | true ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(true, false) | bvec2(true, false) | bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(false, false) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uint_to_vec2
+		version 310 es
+		values
+		{
+			input int in0 = [ -66 | 1 | -192 | -12 | 5 | 255 | 11 | 0 | 2 | 8 ];
+			input uint in1 = [ 193 | 0 | 2 | 10 | 255 | 12 | 45 | 8 | 9 | 3 ];
+			output vec2 out0 = [ vec2(-66.0, 193.0) | vec2(1.0, 0.0) | vec2(-192.0, 2.0) | vec2(-12.0, 10.0) | vec2(5.0, 255.0) | vec2(255.0, 12.0) | vec2(11.0, 45.0) | vec2(0.0, 8.0) | vec2(2.0, 9.0) | vec2(8.0, 3.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uint_to_ivec2
+		version 310 es
+		values
+		{
+			input int in0 = [ -66 | 1 | -192 | -12 | 5 | 255 | 11 | 0 | 2 | 8 ];
+			input uint in1 = [ 193 | 0 | 2 | 10 | 255 | 12 | 45 | 8 | 9 | 3 ];
+			output ivec2 out0 = [ ivec2(-66, 193) | ivec2(1, 0) | ivec2(-192, 2) | ivec2(-12, 10) | ivec2(5, 255) | ivec2(255, 12) | ivec2(11, 45) | ivec2(0, 8) | ivec2(2, 9) | ivec2(8, 3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uint_to_bvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ -66 | 1 | -192 | -12 | 5 | 255 | 11 | 0 | 2 | 8 ];
+			input uint in1 = [ 193 | 0 | 2 | 10 | 255 | 12 | 45 | 8 | 9 | 3 ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_float_to_vec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 12 | 0 | 8 | 193 | 3 | 10 | 9 | 2 | 45 | 255 ];
+			input float in1 = [ -20.125 | 0.0 | 3.5 | -8.25 | 0.0 | 1.0 | 2.0 | 36.8125 | -0.5 | 1.0 ];
+			output vec2 out0 = [ vec2(12.0, -20.125) | vec2(0.0, 0.0) | vec2(8.0, 3.5) | vec2(193.0, -8.25) | vec2(3.0, 0.0) | vec2(10.0, 1.0) | vec2(9.0, 2.0) | vec2(2.0, 36.8125) | vec2(45.0, -0.5) | vec2(255.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = vec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_float_to_ivec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 12 | 0 | 8 | 193 | 3 | 10 | 9 | 2 | 45 | 255 ];
+			input float in1 = [ -20.125 | 0.0 | 3.5 | -8.25 | 0.0 | 1.0 | 2.0 | 36.8125 | -0.5 | 1.0 ];
+			output ivec2 out0 = [ ivec2(12, -20) | ivec2(0, 0) | ivec2(8, 3) | ivec2(193, -8) | ivec2(3, 0) | ivec2(10, 1) | ivec2(9, 2) | ivec2(2, 36) | ivec2(45, 0) | ivec2(255, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = ivec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_float_to_bvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 12 | 0 | 8 | 193 | 3 | 10 | 9 | 2 | 45 | 255 ];
+			input float in1 = [ -20.125 | 0.0 | 3.5 | -8.25 | 0.0 | 1.0 | 2.0 | 36.8125 | -0.5 | 1.0 ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, false) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) | bvec2(true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = bvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_to_uvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 8.25 | 36.8125 | 1.0 | 0.5 | 20.125 | 0.0 | 2.0 | 3.5 ];
+			input float in1 = [ 0.5 | 36.8125 | 0.0 | 2.0 | 8.25 | 20.125 | 1.0 | 3.5 ];
+			output uvec2 out0 = [ uvec2(8, 0) | uvec2(36, 36) | uvec2(1, 0) | uvec2(0, 2) | uvec2(20, 8) | uvec2(0, 20) | uvec2(2, 1) | uvec2(3, 3) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_to_uvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 0 | 12 | 8 | 11 | 255 | 66 | 192 | 2 | 5 | 1 ];
+			input int in1 = [ 11 | 12 | 2 | 5 | 66 | 192 | 255 | 0 | 1 | 8 ];
+			output uvec2 out0 = [ uvec2(0, 11) | uvec2(12, 12) | uvec2(8, 2) | uvec2(11, 5) | uvec2(255, 66) | uvec2(66, 192) | uvec2(192, 255) | uvec2(2, 0) | uvec2(5, 1) | uvec2(1, 8) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_to_uvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 3 | 9 | 10 | 193 | 8 | 0 | 255 | 45 | 2 | 12 ];
+			input uint in1 = [ 0 | 2 | 12 | 3 | 10 | 9 | 45 | 193 | 255 | 8 ];
+			output uvec2 out0 = [ uvec2(3, 0) | uvec2(9, 2) | uvec2(10, 12) | uvec2(193, 3) | uvec2(8, 10) | uvec2(0, 9) | uvec2(255, 45) | uvec2(45, 193) | uvec2(2, 255) | uvec2(12, 8) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_to_uvec2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			output uvec2 out0 = [ uvec2(1, 1) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_int_to_uvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 8.25 | 1.0 | 0.5 | 3.5 | 2.0 | 0.0 | 36.8125 | 1.0 | 0.0 | 20.125 ];
+			input int in1 = [ 0 | 255 | 12 | 5 | 192 | 2 | 66 | 11 | 1 | 8 ];
+			output uvec2 out0 = [ uvec2(8, 0) | uvec2(1, 255) | uvec2(0, 12) | uvec2(3, 5) | uvec2(2, 192) | uvec2(0, 2) | uvec2(36, 66) | uvec2(1, 11) | uvec2(0, 1) | uvec2(20, 8) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bool_to_uvec2
+		version 310 es
+		values
+		{
+			input float in0 = [ 3.5 | 1.0 | 8.25 | 0.5 | 2.0 | 36.8125 | 0.0 | 20.125 ];
+			input bool in1 = [ true | false | true | false | true | false | false | true ];
+			output uvec2 out0 = [ uvec2(3, 1) | uvec2(1, 0) | uvec2(8, 1) | uvec2(0, 0) | uvec2(2, 1) | uvec2(36, 0) | uvec2(0, 0) | uvec2(20, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_bool_to_uvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 12 | 11 | 0 | 5 | 8 | 255 | 2 | 1 | 66 | 192 ];
+			input bool in1 = [ true | true | false | false | false | true | false | false | true | true ];
+			output uvec2 out0 = [ uvec2(12, 1) | uvec2(11, 1) | uvec2(0, 0) | uvec2(5, 0) | uvec2(8, 0) | uvec2(255, 1) | uvec2(2, 0) | uvec2(1, 0) | uvec2(66, 1) | uvec2(192, 1) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uint_to_uvec2
+		version 310 es
+		values
+		{
+			input int in0 = [ 8 | 5 | 1 | 0 | 11 | 12 | 192 | 66 | 255 | 2 ];
+			input uint in1 = [ 8 | 10 | 45 | 255 | 9 | 193 | 2 | 3 | 0 | 12 ];
+			output uvec2 out0 = [ uvec2(8, 8) | uvec2(5, 10) | uvec2(1, 45) | uvec2(0, 255) | uvec2(11, 9) | uvec2(12, 193) | uvec2(192, 2) | uvec2(66, 3) | uvec2(255, 0) | uvec2(2, 12) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_float_to_uvec2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 9 | 45 | 8 | 3 | 10 | 2 | 193 | 12 | 255 | 0 ];
+			input float in1 = [ 3.5 | 20.125 | 2.0 | 0.0 | 1.0 | 36.8125 | 8.25 | 1.0 | 0.0 | 0.5 ];
+			output uvec2 out0 = [ uvec2(9, 3) | uvec2(45, 20) | uvec2(8, 2) | uvec2(3, 0) | uvec2(10, 1) | uvec2(2, 36) | uvec2(193, 8) | uvec2(12, 1) | uvec2(255, 0) | uvec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = uvec2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # vector_combine
+group matrix_combine "Matrix Combine Constructors"
+
+	case vec2_vec2_to_mat2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) | vec2(0.0, 0.5) ];
+			input vec2 in1 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) ];
+			output mat2 out0 = [ mat2(1.0, 1.25, 0.0, 0.5) | mat2(-32.0, 64.0, 1.0, 1.25) | mat2(-0.5, -2.25, -0.75, -0.0322580645161) | mat2(-0.75, -0.0322580645161, -32.0, 64.0) | mat2(0.0, 0.5, -0.5, -2.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec2_bvec2_to_mat2
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(false, true) | bvec2(true, true) | bvec2(true, false) | bvec2(false, false) | bvec2(false, false) ];
+			input bvec2 in1 = [ bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, true) ];
+			output mat2 out0 = [ mat2(0.0, 1.0, 0.0, 0.0) | mat2(1.0, 1.0, 1.0, 0.0) | mat2(1.0, 0.0, 0.0, 0.0) | mat2(0.0, 0.0, 1.0, 1.0) | mat2(0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_to_mat2
+		version 310 es
+		values
+		{
+			input float in0 = [ -8.25 | 3.5 | 36.8125 | 2.0 | -20.125 | 1.0 | -0.5 | 0.0 ];
+			input float in1 = [ 3.5 | -20.125 | 1.0 | 0.0 | -8.25 | 2.0 | 36.8125 | -0.5 ];
+			input float in2 = [ 36.8125 | 3.5 | 0.0 | -20.125 | -0.5 | -8.25 | 1.0 | 2.0 ];
+			input float in3 = [ -0.5 | 0.0 | -8.25 | -20.125 | 2.0 | 3.5 | 1.0 | 36.8125 ];
+			output mat2 out0 = [ mat2(-8.25, 3.5, 36.8125, -0.5) | mat2(3.5, -20.125, 3.5, 0.0) | mat2(36.8125, 1.0, 0.0, -8.25) | mat2(2.0, 0.0, -20.125, -20.125) | mat2(-20.125, -8.25, -0.5, 2.0) | mat2(1.0, 2.0, -8.25, 3.5) | mat2(-0.5, 36.8125, 1.0, 1.0) | mat2(0.0, -0.5, 2.0, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_to_mat2
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | 1 | -192 | 11 | -66 | 255 | 0 | 8 | -12 | 5 ];
+			input int in1 = [ 11 | 255 | 5 | -66 | 8 | 2 | 0 | -12 | 1 | -192 ];
+			input int in2 = [ 11 | 1 | -12 | 255 | 5 | 0 | 8 | -192 | 2 | -66 ];
+			input int in3 = [ 2 | 1 | 0 | 8 | 255 | -66 | -192 | 11 | 5 | -12 ];
+			output mat2 out0 = [ mat2(2.0, 11.0, 11.0, 2.0) | mat2(1.0, 255.0, 1.0, 1.0) | mat2(-192.0, 5.0, -12.0, 0.0) | mat2(11.0, -66.0, 255.0, 8.0) | mat2(-66.0, 8.0, 5.0, 255.0) | mat2(255.0, 2.0, 0.0, -66.0) | mat2(0.0, 0.0, 8.0, -192.0) | mat2(8.0, -12.0, -192.0, 11.0) | mat2(-12.0, 1.0, 2.0, 5.0) | mat2(5.0, -192.0, -66.0, -12.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint_uint_uint_to_mat2
+		version 310 es
+		values
+		{
+			input uint in0 = [ 193 | 9 | 12 | 45 | 10 | 2 | 8 | 3 | 255 | 0 ];
+			input uint in1 = [ 0 | 255 | 12 | 193 | 3 | 2 | 45 | 9 | 8 | 10 ];
+			input uint in2 = [ 3 | 9 | 10 | 2 | 12 | 193 | 255 | 0 | 8 | 45 ];
+			input uint in3 = [ 45 | 12 | 9 | 3 | 2 | 255 | 10 | 8 | 193 | 0 ];
+			output mat2 out0 = [ mat2(193.0, 0.0, 3.0, 45.0) | mat2(9.0, 255.0, 9.0, 12.0) | mat2(12.0, 12.0, 10.0, 9.0) | mat2(45.0, 193.0, 2.0, 3.0) | mat2(10.0, 3.0, 12.0, 2.0) | mat2(2.0, 2.0, 193.0, 255.0) | mat2(8.0, 45.0, 255.0, 10.0) | mat2(3.0, 9.0, 0.0, 8.0) | mat2(255.0, 8.0, 8.0, 193.0) | mat2(0.0, 10.0, 45.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_to_mat2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ true | false ];
+			input bool in2 = [ true | false ];
+			input bool in3 = [ false | true ];
+			output mat2 out0 = [ mat2(1.0, 1.0, 1.0, 0.0) | mat2(0.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_to_mat2
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true | false | true | true | false | true | true | false | false ];
+			input float in1 = [ 2.0 | -0.5 | 0.0 | 1.0 | 1.0 | 36.8125 | 3.5 | 0.0 | -8.25 | -20.125 ];
+			input int in2 = [ 0 | -12 | 8 | -192 | 1 | -66 | 5 | 11 | 2 | 255 ];
+			input bool in3 = [ true | false | true | false | true | false | false | true | true | false ];
+			output mat2 out0 = [ mat2(0.0, 2.0, 0.0, 1.0) | mat2(1.0, -0.5, -12.0, 0.0) | mat2(0.0, 0.0, 8.0, 1.0) | mat2(1.0, 1.0, -192.0, 0.0) | mat2(1.0, 1.0, 1.0, 1.0) | mat2(0.0, 36.8125, -66.0, 0.0) | mat2(1.0, 3.5, 5.0, 0.0) | mat2(1.0, 0.0, 11.0, 1.0) | mat2(0.0, -8.25, 2.0, 1.0) | mat2(0.0, -20.125, 255.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_to_mat2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) | vec2(0.0, 0.5) ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) ];
+			output mat2 out0 = [ mat2(1.0, 1.25, 0.0, -2.0) | mat2(-32.0, 64.0, 0.0, 0.0) | mat2(-0.5, -2.25, 0.0, 0.0) | mat2(-0.75, -0.0322580645161, -32.0, 64.0) | mat2(0.0, 0.5, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec2_to_mat2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(0.0, 0.5) ];
+			input bvec2 in1 = [ bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) ];
+			output mat2 out0 = [ mat2(-0.5, -2.25, 0.0, 0.0) | mat2(-32.0, 64.0, 1.0, 0.0) | mat2(1.0, 1.25, 0.0, 0.0) | mat2(-0.75, -0.0322580645161, 0.0, 1.0) | mat2(0.0, 0.5, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_to_mat2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(false, false, false) ];
+			input float in1 = [ 36.8125 | -8.25 | 1.0 | -0.5 | 2.0 | -20.125 | 0.0 | 3.5 ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 0.0, 36.8125) | mat2(0.0, 1.0, 0.0, -8.25) | mat2(0.0, 0.0, 0.0, 1.0) | mat2(1.0, 1.0, 1.0, -0.5) | mat2(1.0, 0.0, 0.0, 2.0) | mat2(0.0, 1.0, 0.0, -20.125) | mat2(0.0, 0.0, 0.0, 0.0) | mat2(0.0, 0.0, 0.0, 3.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_to_mat2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-0.5, -2.25, -4.875) | vec3(1.0, 1.25, 1.125) | vec3(0.0, 0.5, 0.75) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input float in1 = [ -0.5 | -20.125 | 0.0 | 36.8125 | 3.5 | -8.25 | 2.0 | 1.0 ];
+			output mat2 out0 = [ mat2(1.0, 1.25, 1.125, -0.5) | mat2(-0.5, -2.25, -4.875, -20.125) | mat2(-0.5, -2.25, -4.875, 0.0) | mat2(1.0, 1.25, 1.125, 36.8125) | mat2(0.0, 0.5, 0.75, 3.5) | mat2(-32.0, 64.0, -51.0, -8.25) | mat2(0.0, 0.5, 0.75, 2.0) | mat2(-0.75, -0.0322580645161, 0.0526315789474, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2_int_to_mat2
+		version 310 es
+		values
+		{
+			input int in0 = [ 8 | -192 | 2 | 0 | -66 | 255 | 5 | 1 | -12 | 11 ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(1, 1) ];
+			input int in2 = [ 2 | 1 | -12 | 0 | 11 | 8 | 255 | -192 | 5 | -66 ];
+			output mat2 out0 = [ mat2(8.0, 0.0, -2.0, 2.0) | mat2(-192.0, 0.0, 0.0, 1.0) | mat2(2.0, -32.0, 64.0, -12.0) | mat2(0.0, 0.0, 0.0, 0.0) | mat2(-66.0, 0.0, 0.0, 11.0) | mat2(255.0, -32.0, 64.0, 8.0) | mat2(5.0, 0.0, 0.0, 255.0) | mat2(1.0, 0.0, -2.0, -192.0) | mat2(-12.0, 1.0, 1.0, 5.0) | mat2(11.0, 1.0, 1.0, -66.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_ivec2_to_mat2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | true | false | false | true | false | false ];
+			input float in1 = [ 1.0 | -0.5 | -20.125 | 36.8125 | 2.0 | 0.0 | -8.25 | 3.5 ];
+			input ivec2 in2 = [ ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, -2) ];
+			output mat2 out0 = [ mat2(1.0, 1.0, 0.0, 0.0) | mat2(1.0, -0.5, 0.0, 0.0) | mat2(1.0, -20.125, 1.0, 1.0) | mat2(0.0, 36.8125, 0.0, 0.0) | mat2(0.0, 2.0, 1.0, 1.0) | mat2(1.0, 0.0, 0.0, -2.0) | mat2(0.0, -8.25, -32.0, 64.0) | mat2(0.0, 3.5, 0.0, -2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_uvec3_to_mat2
+		version 310 es
+		values
+		{
+			input float in0 = [ 36.8125 | 2.0 | -0.5 | 0.0 | -20.125 | 1.0 | 3.5 | -8.25 ];
+			input uvec3 in1 = [ uvec3(1, 1, 1) | uvec3(0, 2, 4) | uvec3(0, 2, 4) | uvec3(0, 0, 0) | uvec3(32, 64, 51) | uvec3(0, 0, 0) | uvec3(0, 0, 0) | uvec3(1, 1, 1) ];
+			output mat2 out0 = [ mat2(36.8125, 1.0, 1.0, 1.0) | mat2(2.0, 0.0, 2.0, 4.0) | mat2(-0.5, 0.0, 2.0, 4.0) | mat2(0.0, 0.0, 0.0, 0.0) | mat2(-20.125, 32.0, 64.0, 51.0) | mat2(1.0, 0.0, 0.0, 0.0) | mat2(3.5, 0.0, 0.0, 0.0) | mat2(-8.25, 1.0, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_uvec2_bool_to_mat2
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | -66 | 5 | -192 | 11 | 0 | 8 | 2 | -12 | 255 ];
+			input uvec2 in1 = [ uvec2(0, 2) | uvec2(32, 64) | uvec2(1, 1) | uvec2(0, 0) | uvec2(0, 0) | uvec2(1, 1) | uvec2(0, 2) | uvec2(0, 0) | uvec2(0, 0) | uvec2(32, 64) ];
+			input bool in2 = [ true | false | true | false | false | false | true | true | true | false ];
+			output mat2 out0 = [ mat2(1.0, 0.0, 2.0, 1.0) | mat2(-66.0, 32.0, 64.0, 0.0) | mat2(5.0, 1.0, 1.0, 1.0) | mat2(-192.0, 0.0, 0.0, 0.0) | mat2(11.0, 0.0, 0.0, 0.0) | mat2(0.0, 1.0, 1.0, 0.0) | mat2(8.0, 0.0, 2.0, 1.0) | mat2(2.0, 0.0, 0.0, 1.0) | mat2(-12.0, 0.0, 0.0, 1.0) | mat2(255.0, 32.0, 64.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_vec3_to_mat2x3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) ];
+			input vec3 in1 = [ vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 1.25, 1.125, -32.0, 64.0, -51.0) | mat2x3(-0.75, -0.0322580645161, 0.0526315789474, -0.5, -2.25, -4.875) | mat2x3(-0.5, -2.25, -4.875, 0.0, 0.5, 0.75) | mat2x3(-32.0, 64.0, -51.0, 1.0, 1.25, 1.125) | mat2x3(0.0, 0.5, 0.75, -0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_bvec3_to_mat2x3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, false) | bvec3(true, false, false) ];
+			input bvec3 in1 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x3(0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat2x3(1.0, 1.0, 1.0, 1.0, 0.0, 0.0) | mat2x3(0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat2x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_float_float_to_mat2x3
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | -8.25 | -20.125 | 36.8125 | -0.5 | 2.0 | 3.5 | 0.0 ];
+			input float in1 = [ -0.5 | -20.125 | 36.8125 | -8.25 | 2.0 | 1.0 | 3.5 | 0.0 ];
+			input float in2 = [ 3.5 | 1.0 | 36.8125 | -20.125 | -8.25 | -0.5 | 2.0 | 0.0 ];
+			input float in3 = [ 1.0 | -8.25 | 0.0 | -20.125 | 2.0 | 3.5 | -0.5 | 36.8125 ];
+			input float in4 = [ 1.0 | 0.0 | 3.5 | 2.0 | -8.25 | -20.125 | -0.5 | 36.8125 ];
+			input float in5 = [ 2.0 | -20.125 | -8.25 | -0.5 | 3.5 | 1.0 | 36.8125 | 0.0 ];
+			output mat2x3 out0 = [ mat2x3(1.0, -0.5, 3.5, 1.0, 1.0, 2.0) | mat2x3(-8.25, -20.125, 1.0, -8.25, 0.0, -20.125) | mat2x3(-20.125, 36.8125, 36.8125, 0.0, 3.5, -8.25) | mat2x3(36.8125, -8.25, -20.125, -20.125, 2.0, -0.5) | mat2x3(-0.5, 2.0, -8.25, 2.0, -8.25, 3.5) | mat2x3(2.0, 1.0, -0.5, 3.5, -20.125, 1.0) | mat2x3(3.5, 3.5, 2.0, -0.5, -0.5, 36.8125) | mat2x3(0.0, 0.0, 0.0, 36.8125, 36.8125, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_int_int_to_mat2x3
+		version 310 es
+		values
+		{
+			input int in0 = [ 2 | 8 | -192 | 0 | 5 | -12 | 1 | 255 | -66 | 11 ];
+			input int in1 = [ 1 | -192 | 8 | 0 | -12 | 2 | 11 | 255 | -66 | 5 ];
+			input int in2 = [ -192 | 2 | -66 | 8 | 11 | 255 | 0 | 5 | -12 | 1 ];
+			input int in3 = [ 2 | 11 | 255 | 0 | -66 | -12 | 5 | -192 | 8 | 1 ];
+			input int in4 = [ 8 | 0 | -12 | -192 | 2 | -66 | 1 | 255 | 5 | 11 ];
+			input int in5 = [ 0 | 11 | 5 | 8 | -12 | 255 | -192 | 2 | 1 | -66 ];
+			output mat2x3 out0 = [ mat2x3(2.0, 1.0, -192.0, 2.0, 8.0, 0.0) | mat2x3(8.0, -192.0, 2.0, 11.0, 0.0, 11.0) | mat2x3(-192.0, 8.0, -66.0, 255.0, -12.0, 5.0) | mat2x3(0.0, 0.0, 8.0, 0.0, -192.0, 8.0) | mat2x3(5.0, -12.0, 11.0, -66.0, 2.0, -12.0) | mat2x3(-12.0, 2.0, 255.0, -12.0, -66.0, 255.0) | mat2x3(1.0, 11.0, 0.0, 5.0, 1.0, -192.0) | mat2x3(255.0, 255.0, 5.0, -192.0, 255.0, 2.0) | mat2x3(-66.0, -66.0, -12.0, 8.0, 5.0, 1.0) | mat2x3(11.0, 5.0, 1.0, 1.0, 11.0, -66.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_bool_bool_to_mat2x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ false | true ];
+			input bool in2 = [ false | true ];
+			input bool in3 = [ true | false ];
+			input bool in4 = [ true | false ];
+			input bool in5 = [ true | false ];
+			output mat2x3 out0 = [ mat2x3(1.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat2x3(0.0, 1.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_float_int_to_mat2x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true | false | true | false | true | false | true | true | false ];
+			input float in1 = [ 0.0 | -8.25 | 2.0 | -20.125 | 3.5 | 0.0 | -0.5 | 36.8125 | 1.0 | 1.0 ];
+			input int in2 = [ -66 | -12 | 2 | 8 | 255 | 11 | -192 | 1 | 5 | 0 ];
+			input bool in3 = [ true | false | true | false | false | true | true | false | true | false ];
+			input float in4 = [ 1.0 | 0.0 | -8.25 | 1.0 | 3.5 | -20.125 | -0.5 | 0.0 | 2.0 | 36.8125 ];
+			input int in5 = [ 255 | -192 | 1 | 2 | -12 | -66 | 8 | 0 | 11 | 5 ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, -66.0, 1.0, 1.0, 255.0) | mat2x3(1.0, -8.25, -12.0, 0.0, 0.0, -192.0) | mat2x3(0.0, 2.0, 2.0, 1.0, -8.25, 1.0) | mat2x3(1.0, -20.125, 8.0, 0.0, 1.0, 2.0) | mat2x3(0.0, 3.5, 255.0, 0.0, 3.5, -12.0) | mat2x3(1.0, 0.0, 11.0, 1.0, -20.125, -66.0) | mat2x3(0.0, -0.5, -192.0, 1.0, -0.5, 8.0) | mat2x3(1.0, 36.8125, 1.0, 0.0, 0.0, 0.0) | mat2x3(1.0, 1.0, 5.0, 1.0, 2.0, 11.0) | mat2x3(0.0, 1.0, 0.0, 0.0, 36.8125, 5.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_ivec3_to_mat2x3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input ivec3 in1 = [ ivec3(1, 1, 1) | ivec3(-32, 64, -51) | ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(0, 0, 0) ];
+			output mat2x3 out0 = [ mat2x3(1.0, 1.25, 1.125, 1.0, 1.0, 1.0) | mat2x3(-0.5, -2.25, -4.875, -32.0, 64.0, -51.0) | mat2x3(-32.0, 64.0, -51.0, 0.0, -2.0, -4.0) | mat2x3(0.0, 0.5, 0.75, 0.0, 0.0, 0.0) | mat2x3(-0.75, -0.0322580645161, 0.0526315789474, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec4_to_mat2x3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-32.0, 64.0) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) ];
+			input bvec4 in1 = [ bvec4(true, false, false, true) | bvec4(false, true, false, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output mat2x3 out0 = [ mat2x3(-32.0, 64.0, 1.0, 0.0, 0.0, 1.0) | mat2x3(0.0, 0.5, 0.0, 1.0, 0.0, 0.0) | mat2x3(-0.5, -2.25, 0.0, 0.0, 0.0, 1.0) | mat2x3(1.0, 1.25, 1.0, 1.0, 1.0, 1.0) | mat2x3(-0.75, -0.0322580645161, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_ivec2_to_mat2x3
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, false, false) | bvec3(false, true, false) ];
+			input float in1 = [ 1.0 | -8.25 | 36.8125 | 2.0 | 3.5 | -0.5 | -20.125 | 0.0 ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat2x3(0.0, 0.0, 0.0, -8.25, 0.0, 0.0) | mat2x3(1.0, 1.0, 1.0, 36.8125, -32.0, 64.0) | mat2x3(1.0, 0.0, 0.0, 2.0, 0.0, -2.0) | mat2x3(0.0, 0.0, 0.0, 3.5, 1.0, 1.0) | mat2x3(0.0, 1.0, 0.0, -0.5, 0.0, -2.0) | mat2x3(1.0, 0.0, 0.0, -20.125, 0.0, 0.0) | mat2x3(0.0, 1.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_bvec2_to_mat2x3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(1.0, 1.25, 1.125) ];
+			input float in1 = [ 0.0 | 36.8125 | 3.5 | -0.5 | -8.25 | 2.0 | 1.0 | -20.125 ];
+			input bvec2 in2 = [ bvec2(false, false) | bvec2(false, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(true, true) | bvec2(false, false) ];
+			output mat2x3 out0 = [ mat2x3(0.0, 0.5, 0.75, 0.0, 0.0, 0.0) | mat2x3(-0.5, -2.25, -4.875, 36.8125, 0.0, 1.0) | mat2x3(0.0, 0.5, 0.75, 3.5, 1.0, 0.0) | mat2x3(1.0, 1.25, 1.125, -0.5, 0.0, 1.0) | mat2x3(-0.5, -2.25, -4.875, -8.25, 0.0, 0.0) | mat2x3(-0.75, -0.0322580645161, 0.0526315789474, 2.0, 1.0, 0.0) | mat2x3(-32.0, 64.0, -51.0, 1.0, 1.0, 1.0) | mat2x3(1.0, 1.25, 1.125, -20.125, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_vec3_vec2_to_mat2x4
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(-0.5, -2.25, -4.875) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-32.0, 64.0, -51.0) ];
+			input vec3 in1 = [ vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(1.0, 1.25, 1.125) ];
+			input vec2 in2 = [ vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) ];
+			output mat2x4 out0 = [ mat2x4(-0.5, -2.25, -4.875, 0.0, 0.5, 0.75, -0.75, -0.0322580645161) | mat2x4(-0.75, -0.0322580645161, 0.0526315789474, -0.5, -2.25, -4.875, -32.0, 64.0) | mat2x4(0.0, 0.5, 0.75, -0.75, -0.0322580645161, 0.0526315789474, 1.0, 1.25) | mat2x4(1.0, 1.25, 1.125, -32.0, 64.0, -51.0, 0.0, 0.5) | mat2x4(-32.0, 64.0, -51.0, 1.0, 1.25, 1.125, -0.5, -2.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_ivec3_ivec2_to_mat2x4
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, 64, -51) ];
+			input ivec3 in1 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			input ivec2 in2 = [ ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) ];
+			output mat2x4 out0 = [ mat2x4(0.0, -2.0, -4.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x4(0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, -2.0) | mat2x4(1.0, 1.0, 1.0, 0.0, -2.0, -4.0, 1.0, 1.0) | mat2x4(0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 0.0, 0.0) | mat2x4(-32.0, 64.0, -51.0, 0.0, 0.0, 0.0, -32.0, 64.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_float_float_int_bool_to_mat2x4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) ];
+			input ivec2 in1 = [ ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) ];
+			input float in2 = [ -8.25 | -0.5 | 3.5 | 36.8125 | 0.0 | 0.0 | 2.0 | -20.125 | 1.0 | 1.0 ];
+			input float in3 = [ 1.0 | 2.0 | -0.5 | 3.5 | 36.8125 | -8.25 | 1.0 | 0.0 | 0.0 | -20.125 ];
+			input int in4 = [ 255 | 8 | 11 | -12 | -192 | 0 | 2 | 1 | -66 | 5 ];
+			input bool in5 = [ true | false | false | true | false | true | true | false | true | false ];
+			output mat2x4 out0 = [ mat2x4(0.0, 0.5, 1.0, 1.0, -8.25, 1.0, 255.0, 1.0) | mat2x4(-32.0, 64.0, 0.0, -2.0, -0.5, 2.0, 8.0, 0.0) | mat2x4(-0.5, -2.25, -32.0, 64.0, 3.5, -0.5, 11.0, 0.0) | mat2x4(0.0, 0.5, 0.0, -2.0, 36.8125, 3.5, -12.0, 1.0) | mat2x4(-32.0, 64.0, 0.0, 0.0, 0.0, 36.8125, -192.0, 0.0) | mat2x4(1.0, 1.25, 0.0, 0.0, 0.0, -8.25, 0.0, 1.0) | mat2x4(-0.75, -0.0322580645161, -32.0, 64.0, 2.0, 1.0, 2.0, 1.0) | mat2x4(1.0, 1.25, 0.0, 0.0, -20.125, 0.0, 1.0, 0.0) | mat2x4(-0.5, -2.25, 0.0, 0.0, 1.0, 0.0, -66.0, 1.0) | mat2x4(-0.75, -0.0322580645161, 1.0, 1.0, 1.0, -20.125, 5.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_vec2_bool_bvec2_to_mat2x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true | false | true | true | true | false | false | false | true ];
+			input float in1 = [ 0.0 | -0.5 | -20.125 | -8.25 | 0.0 | 2.0 | 3.5 | 1.0 | 1.0 | 36.8125 ];
+			input int in2 = [ 2 | 1 | 255 | 8 | -66 | 0 | -12 | 5 | -192 | 11 ];
+			input vec2 in3 = [ vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) ];
+			input bool in4 = [ true | false | false | true | false | false | true | true | true | false ];
+			input bvec2 in5 = [ bvec2(true, false) | bvec2(true, true) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) | bvec2(false, true) | bvec2(false, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, false) ];
+			output mat2x4 out0 = [ mat2x4(0.0, 0.0, 2.0, -32.0, 64.0, 1.0, 1.0, 0.0) | mat2x4(1.0, -0.5, 1.0, -0.5, -2.25, 0.0, 1.0, 1.0) | mat2x4(0.0, -20.125, 255.0, 1.0, 1.25, 0.0, 0.0, 1.0) | mat2x4(1.0, -8.25, 8.0, -0.75, -0.0322580645161, 1.0, 1.0, 1.0) | mat2x4(1.0, 0.0, -66.0, 0.0, 0.5, 0.0, 0.0, 0.0) | mat2x4(1.0, 2.0, 0.0, 1.0, 1.25, 0.0, 0.0, 1.0) | mat2x4(0.0, 3.5, -12.0, -32.0, 64.0, 1.0, 0.0, 0.0) | mat2x4(0.0, 1.0, 5.0, -0.5, -2.25, 1.0, 0.0, 0.0) | mat2x4(0.0, 1.0, -192.0, 0.0, 0.5, 1.0, 0.0, 0.0) | mat2x4(1.0, 36.8125, 11.0, -0.75, -0.0322580645161, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec2_int_vec4_to_mat2x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | true | false | false | false | false | false | true | true ];
+			input bvec2 in1 = [ bvec2(true, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(false, true) | bvec2(false, false) ];
+			input int in2 = [ 8 | 1 | 5 | -66 | -192 | 11 | 255 | 0 | -12 | 2 ];
+			input vec4 in3 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) ];
+			output mat2x4 out0 = [ mat2x4(1.0, 1.0, 1.0, 8.0, 0.0, 0.5, 0.75, 0.825) | mat2x4(1.0, 0.0, 0.0, 1.0, 0.0, 0.5, 0.75, 0.825) | mat2x4(1.0, 1.0, 0.0, 5.0, -32.0, 64.0, -51.0, 24.0) | mat2x4(0.0, 0.0, 0.0, -66.0, 1.0, 1.25, 1.125, 1.75) | mat2x4(0.0, 1.0, 1.0, -192.0, -0.5, -2.25, -4.875, 9.0) | mat2x4(0.0, 1.0, 0.0, 11.0, 1.0, 1.25, 1.125, 1.75) | mat2x4(0.0, 0.0, 1.0, 255.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat2x4(0.0, 0.0, 0.0, 0.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat2x4(1.0, 0.0, 1.0, -12.0, -0.5, -2.25, -4.875, 9.0) | mat2x4(1.0, 0.0, 0.0, 2.0, -32.0, 64.0, -51.0, 24.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bvec4_ivec2_bool_to_mat2x4
+		version 310 es
+		values
+		{
+			input float in0 = [ 0.0 | 3.5 | 2.0 | -8.25 | -20.125 | 36.8125 | 1.0 | -0.5 ];
+			input bvec4 in1 = [ bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(false, true, false, false) | bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, false, false, false) ];
+			input ivec2 in2 = [ ivec2(0, -2) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(1, 1) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) ];
+			input bool in3 = [ true | true | false | true | false | false | false | true ];
+			output mat2x4 out0 = [ mat2x4(0.0, 1.0, 0.0, 0.0, 1.0, 0.0, -2.0, 1.0) | mat2x4(3.5, 1.0, 1.0, 1.0, 1.0, -32.0, 64.0, 1.0) | mat2x4(2.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0) | mat2x4(-8.25, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat2x4(-20.125, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat2x4(36.8125, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat2x4(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) | mat2x4(-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, -2.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat2x4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_vec3_to_mat3x2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(-32.0, 64.0, -51.0) | vec3(1.0, 1.25, 1.125) ];
+			input vec3 in1 = [ vec3(1.0, 1.25, 1.125) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) ];
+			output mat3x2 out0 = [ mat3x2(-0.75, -0.0322580645161, 0.0526315789474, 1.0, 1.25, 1.125) | mat3x2(-0.5, -2.25, -4.875, -0.75, -0.0322580645161, 0.0526315789474) | mat3x2(0.0, 0.5, 0.75, -32.0, 64.0, -51.0) | mat3x2(-32.0, 64.0, -51.0, -0.5, -2.25, -4.875) | mat3x2(1.0, 1.25, 1.125, 0.0, 0.5, 0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_bvec3_to_mat3x2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			input bvec3 in1 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, true, false) | bvec3(true, false, false) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat3x2(1.0, 1.0, 1.0, 0.0, 1.0, 0.0) | mat3x2(0.0, 0.0, 0.0, 1.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_float_float_float_float_float_to_mat3x2
+		version 310 es
+		values
+		{
+			input float in0 = [ -8.25 | 36.8125 | -20.125 | -0.5 | 3.5 | 1.0 | 2.0 | 0.0 ];
+			input float in1 = [ 2.0 | 3.5 | -20.125 | 36.8125 | 1.0 | 0.0 | -8.25 | -0.5 ];
+			input float in2 = [ -0.5 | 2.0 | 1.0 | 0.0 | -8.25 | 36.8125 | -20.125 | 3.5 ];
+			input float in3 = [ 36.8125 | 0.0 | 1.0 | -0.5 | 2.0 | 3.5 | -20.125 | -8.25 ];
+			input float in4 = [ 36.8125 | 2.0 | 0.0 | -0.5 | 3.5 | -20.125 | -8.25 | 1.0 ];
+			input float in5 = [ 0.0 | 36.8125 | -20.125 | -0.5 | 3.5 | 2.0 | 1.0 | -8.25 ];
+			output mat3x2 out0 = [ mat3x2(-8.25, 2.0, -0.5, 36.8125, 36.8125, 0.0) | mat3x2(36.8125, 3.5, 2.0, 0.0, 2.0, 36.8125) | mat3x2(-20.125, -20.125, 1.0, 1.0, 0.0, -20.125) | mat3x2(-0.5, 36.8125, 0.0, -0.5, -0.5, -0.5) | mat3x2(3.5, 1.0, -8.25, 2.0, 3.5, 3.5) | mat3x2(1.0, 0.0, 36.8125, 3.5, -20.125, 2.0) | mat3x2(2.0, -8.25, -20.125, -20.125, -8.25, 1.0) | mat3x2(0.0, -0.5, 3.5, -8.25, 1.0, -8.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int_int_int_int_int_to_mat3x2
+		version 310 es
+		values
+		{
+			input int in0 = [ 8 | -192 | 2 | 11 | 255 | -66 | 5 | -12 | 1 | 0 ];
+			input int in1 = [ 1 | 2 | -12 | 5 | 0 | 255 | 8 | 11 | -192 | -66 ];
+			input int in2 = [ -12 | 11 | 2 | 1 | 8 | -66 | -192 | 5 | 255 | 0 ];
+			input int in3 = [ -192 | 0 | -12 | 11 | 1 | -66 | 8 | 255 | 2 | 5 ];
+			input int in4 = [ -12 | 5 | 0 | -66 | 255 | 8 | -192 | 11 | 2 | 1 ];
+			input int in5 = [ -66 | -12 | 8 | 2 | 255 | 0 | -192 | 11 | 1 | 5 ];
+			output mat3x2 out0 = [ mat3x2(8.0, 1.0, -12.0, -192.0, -12.0, -66.0) | mat3x2(-192.0, 2.0, 11.0, 0.0, 5.0, -12.0) | mat3x2(2.0, -12.0, 2.0, -12.0, 0.0, 8.0) | mat3x2(11.0, 5.0, 1.0, 11.0, -66.0, 2.0) | mat3x2(255.0, 0.0, 8.0, 1.0, 255.0, 255.0) | mat3x2(-66.0, 255.0, -66.0, -66.0, 8.0, 0.0) | mat3x2(5.0, 8.0, -192.0, 8.0, -192.0, -192.0) | mat3x2(-12.0, 11.0, 5.0, 255.0, 11.0, 11.0) | mat3x2(1.0, -192.0, 255.0, 2.0, 2.0, 1.0) | mat3x2(0.0, -66.0, 0.0, 5.0, 1.0, 5.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool_bool_bool_bool_bool_to_mat3x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false ];
+			input bool in1 = [ false | true ];
+			input bool in2 = [ false | true ];
+			input bool in3 = [ false | true ];
+			input bool in4 = [ false | true ];
+			input bool in5 = [ false | true ];
+			output mat3x2 out0 = [ mat3x2(1.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(0.0, 1.0, 1.0, 1.0, 1.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_bool_float_int_to_mat3x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | true | true | false | true | false | true | false | false ];
+			input float in1 = [ -20.125 | 0.0 | 3.5 | 0.0 | 1.0 | -8.25 | 1.0 | 2.0 | 36.8125 | -0.5 ];
+			input int in2 = [ 255 | -66 | 8 | -192 | 5 | 11 | 1 | 2 | 0 | -12 ];
+			input bool in3 = [ true | false | false | true | false | true | true | false | false | true ];
+			input float in4 = [ 0.0 | -20.125 | 1.0 | -8.25 | 0.0 | -0.5 | 2.0 | 3.5 | 1.0 | 36.8125 ];
+			input int in5 = [ -192 | 8 | 2 | 255 | -66 | -12 | 11 | 0 | 5 | 1 ];
+			output mat3x2 out0 = [ mat3x2(1.0, -20.125, 255.0, 1.0, 0.0, -192.0) | mat3x2(0.0, 0.0, -66.0, 0.0, -20.125, 8.0) | mat3x2(1.0, 3.5, 8.0, 0.0, 1.0, 2.0) | mat3x2(1.0, 0.0, -192.0, 1.0, -8.25, 255.0) | mat3x2(0.0, 1.0, 5.0, 0.0, 0.0, -66.0) | mat3x2(1.0, -8.25, 11.0, 1.0, -0.5, -12.0) | mat3x2(0.0, 1.0, 1.0, 1.0, 2.0, 11.0) | mat3x2(1.0, 2.0, 2.0, 0.0, 3.5, 0.0) | mat3x2(0.0, 36.8125, 0.0, 0.0, 1.0, 5.0) | mat3x2(0.0, -0.5, -12.0, 1.0, 36.8125, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_ivec3_to_mat3x2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) ];
+			input ivec3 in1 = [ ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.5, 0.75, 1.0, 1.0, 1.0) | mat3x2(1.0, 1.25, 1.125, 0.0, 0.0, 0.0) | mat3x2(-0.75, -0.0322580645161, 0.0526315789474, 0.0, -2.0, -4.0) | mat3x2(-32.0, 64.0, -51.0, -32.0, 64.0, -51.0) | mat3x2(-0.5, -2.25, -4.875, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_bvec4_to_mat3x2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) ];
+			input bvec4 in1 = [ bvec4(true, true, true, true) | bvec4(false, false, false, true) | bvec4(false, false, false, false) | bvec4(true, false, false, true) | bvec4(false, true, false, false) ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.5, 1.0, 1.0, 1.0, 1.0) | mat3x2(-0.75, -0.0322580645161, 0.0, 0.0, 0.0, 1.0) | mat3x2(-32.0, 64.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(-0.5, -2.25, 1.0, 0.0, 0.0, 1.0) | mat3x2(1.0, 1.25, 0.0, 1.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec3_float_ivec2_to_mat3x2
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, false, false) | bvec3(false, true, false) | bvec3(true, false, false) ];
+			input float in1 = [ -8.25 | 1.0 | 2.0 | -0.5 | -20.125 | 0.0 | 36.8125 | 3.5 ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(1, 1) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(-32, 64) ];
+			output mat3x2 out0 = [ mat3x2(0.0, 0.0, 0.0, -8.25, 1.0, 1.0) | mat3x2(1.0, 1.0, 1.0, 1.0, 1.0, 1.0) | mat3x2(0.0, 0.0, 0.0, 2.0, 0.0, 0.0) | mat3x2(0.0, 0.0, 0.0, -0.5, 0.0, -2.0) | mat3x2(0.0, 1.0, 0.0, -20.125, 0.0, 0.0) | mat3x2(1.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x2(0.0, 1.0, 0.0, 36.8125, 0.0, -2.0) | mat3x2(1.0, 0.0, 0.0, 3.5, -32.0, 64.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_float_bvec2_to_mat3x2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(0.0, 0.5, 0.75) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input float in1 = [ -8.25 | 36.8125 | -0.5 | 3.5 | 2.0 | -20.125 | 0.0 | 1.0 ];
+			input bvec2 in2 = [ bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) | bvec2(false, true) | bvec2(false, true) | bvec2(true, false) | bvec2(false, false) ];
+			output mat3x2 out0 = [ mat3x2(1.0, 1.25, 1.125, -8.25, 1.0, 0.0) | mat3x2(1.0, 1.25, 1.125, 36.8125, 0.0, 0.0) | mat3x2(-0.5, -2.25, -4.875, -0.5, 1.0, 1.0) | mat3x2(-0.5, -2.25, -4.875, 3.5, 0.0, 0.0) | mat3x2(-32.0, 64.0, -51.0, 2.0, 0.0, 1.0) | mat3x2(0.0, 0.5, 0.75, -20.125, 0.0, 1.0) | mat3x2(0.0, 0.5, 0.75, 0.0, 1.0, 0.0) | mat3x2(-0.75, -0.0322580645161, 0.0526315789474, 1.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_vec3_vec3_to_mat3
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input vec3 in1 = [ vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(1.0, 1.25, 1.125) | vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) ];
+			input vec3 in2 = [ vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) | vec3(1.0, 1.25, 1.125) | vec3(0.0, 0.5, 0.75) ];
+			output mat3 out0 = [ mat3(1.0, 1.25, 1.125, -0.75, -0.0322580645161, 0.0526315789474, -0.75, -0.0322580645161, 0.0526315789474) | mat3(-0.5, -2.25, -4.875, 1.0, 1.25, 1.125, -32.0, 64.0, -51.0) | mat3(-32.0, 64.0, -51.0, -32.0, 64.0, -51.0, -0.5, -2.25, -4.875) | mat3(0.0, 0.5, 0.75, -0.5, -2.25, -4.875, 1.0, 1.25, 1.125) | mat3(-0.75, -0.0322580645161, 0.0526315789474, 0.0, 0.5, 0.75, 0.0, 0.5, 0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_ivec3_ivec3_to_mat3
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			input ivec3 in1 = [ ivec3(0, 0, 0) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) | ivec3(1, 1, 1) ];
+			input ivec3 in2 = [ ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, 64, -51) | ivec3(0, -2, -4) | ivec3(0, 0, 0) ];
+			output mat3 out0 = [ mat3(1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat3(0.0, -2.0, -4.0, 0.0, -2.0, -4.0, 0.0, 0.0, 0.0) | mat3(0.0, 0.0, 0.0, -32.0, 64.0, -51.0, -32.0, 64.0, -51.0) | mat3(-32.0, 64.0, -51.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0) | mat3(0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_float_float_int_bool_bool_to_mat3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-32.0, 64.0) ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(-32, 64) ];
+			input float in2 = [ -0.5 | 3.5 | 0.0 | -20.125 | 1.0 | -8.25 | 0.0 | 1.0 | 2.0 | 36.8125 ];
+			input float in3 = [ 3.5 | 0.0 | -20.125 | 36.8125 | 2.0 | -8.25 | -0.5 | 1.0 | 0.0 | 1.0 ];
+			input int in4 = [ 0 | 11 | 5 | -192 | 8 | -66 | 1 | 2 | 255 | -12 ];
+			input bool in5 = [ true | true | true | false | true | false | false | false | true | false ];
+			input bool in6 = [ false | false | true | false | false | false | true | true | true | true ];
+			output mat3 out0 = [ mat3(1.0, 1.25, 0.0, 0.0, -0.5, 3.5, 0.0, 1.0, 0.0) | mat3(-0.5, -2.25, 0.0, -2.0, 3.5, 0.0, 11.0, 1.0, 0.0) | mat3(0.0, 0.5, 1.0, 1.0, 0.0, -20.125, 5.0, 1.0, 1.0) | mat3(-0.75, -0.0322580645161, 0.0, -2.0, -20.125, 36.8125, -192.0, 0.0, 0.0) | mat3(-32.0, 64.0, 0.0, 0.0, 1.0, 2.0, 8.0, 1.0, 0.0) | mat3(-0.75, -0.0322580645161, 0.0, 0.0, -8.25, -8.25, -66.0, 0.0, 0.0) | mat3(-0.5, -2.25, 1.0, 1.0, 0.0, -0.5, 1.0, 0.0, 1.0) | mat3(1.0, 1.25, -32.0, 64.0, 1.0, 1.0, 2.0, 0.0, 1.0) | mat3(0.0, 0.5, 0.0, 0.0, 2.0, 0.0, 255.0, 1.0, 1.0) | mat3(-32.0, 64.0, -32.0, 64.0, 36.8125, 1.0, -12.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2, in3, in4, in5, in6);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_vec2_bool_bvec2_float_to_mat3
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true | true | true | true | true | false | false | false | false ];
+			input float in1 = [ -8.25 | 1.0 | -0.5 | 36.8125 | 0.0 | 3.5 | -20.125 | 1.0 | 0.0 | 2.0 ];
+			input int in2 = [ 8 | -66 | 5 | 1 | 0 | 2 | -12 | -192 | 11 | 255 ];
+			input vec2 in3 = [ vec2(0.0, 0.5) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) ];
+			input bool in4 = [ true | false | true | false | false | true | true | false | true | false ];
+			input bvec2 in5 = [ bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) | bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) ];
+			input float in6 = [ -0.5 | 1.0 | 1.0 | 0.0 | 36.8125 | 2.0 | 0.0 | 3.5 | -20.125 | -8.25 ];
+			output mat3 out0 = [ mat3(0.0, -8.25, 8.0, 0.0, 0.5, 1.0, 0.0, 1.0, -0.5) | mat3(1.0, 1.0, -66.0, -32.0, 64.0, 0.0, 0.0, 0.0, 1.0) | mat3(1.0, -0.5, 5.0, 1.0, 1.25, 1.0, 1.0, 0.0, 1.0) | mat3(1.0, 36.8125, 1.0, -32.0, 64.0, 0.0, 0.0, 0.0, 0.0) | mat3(1.0, 0.0, 0.0, -0.5, -2.25, 0.0, 1.0, 1.0, 36.8125) | mat3(1.0, 3.5, 2.0, -0.75, -0.0322580645161, 1.0, 0.0, 0.0, 2.0) | mat3(0.0, -20.125, -12.0, -0.5, -2.25, 1.0, 1.0, 0.0, 0.0) | mat3(0.0, 1.0, -192.0, 0.0, 0.5, 0.0, 0.0, 0.0, 3.5) | mat3(0.0, 0.0, 11.0, 1.0, 1.25, 1.0, 0.0, 1.0, -20.125) | mat3(0.0, 2.0, 255.0, -0.75, -0.0322580645161, 0.0, 1.0, 1.0, -8.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2, in3, in4, in5, in6);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec2_int_vec4_bool_to_mat3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | false | true | true | false | false | true | false | false | true ];
+			input bvec2 in1 = [ bvec2(false, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) | bvec2(true, true) | bvec2(false, false) | bvec2(false, true) | bvec2(true, false) | bvec2(true, false) | bvec2(false, false) ];
+			input int in2 = [ -192 | 2 | 0 | 5 | 1 | -66 | 11 | -12 | 8 | 255 ];
+			input vec4 in3 = [ vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			input bool in4 = [ false | true | false | true | false | false | false | true | true | true ];
+			output mat3 out0 = [ mat3(1.0, 0.0, 0.0, -192.0, -32.0, 64.0, -51.0, 24.0, 0.0) | mat3(0.0, 0.0, 1.0, 2.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 1.0) | mat3(1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.75, 0.825, 0.0) | mat3(1.0, 1.0, 1.0, 5.0, -32.0, 64.0, -51.0, 24.0, 1.0) | mat3(0.0, 1.0, 1.0, 1.0, 1.0, 1.25, 1.125, 1.75, 0.0) | mat3(0.0, 0.0, 0.0, -66.0, 0.0, 0.5, 0.75, 0.825, 0.0) | mat3(1.0, 0.0, 1.0, 11.0, 1.0, 1.25, 1.125, 1.75, 0.0) | mat3(0.0, 1.0, 0.0, -12.0, -0.5, -2.25, -4.875, 9.0, 1.0) | mat3(0.0, 1.0, 0.0, 8.0, -0.5, -2.25, -4.875, 9.0, 1.0) | mat3(1.0, 0.0, 0.0, 255.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2, in3, in4);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bvec4_ivec2_bool_bool_to_mat3
+		version 310 es
+		values
+		{
+			input float in0 = [ -8.25 | 2.0 | 36.8125 | 3.5 | 1.0 | -0.5 | -20.125 | 0.0 ];
+			input bvec4 in1 = [ bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, false, false, true) | bvec4(false, false, false, true) ];
+			input ivec2 in2 = [ ivec2(1, 1) | ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(1, 1) ];
+			input bool in3 = [ false | true | false | true | true | false | true | false ];
+			input bool in4 = [ true | false | false | false | true | true | false | true ];
+			output mat3 out0 = [ mat3(-8.25, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0) | mat3(2.0, 0.0, 1.0, 0.0, 0.0, 0.0, -2.0, 1.0, 0.0) | mat3(36.8125, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat3(3.5, 1.0, 1.0, 1.0, 1.0, -32.0, 64.0, 1.0, 0.0) | mat3(1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0) | mat3(-0.5, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat3(-20.125, 1.0, 0.0, 0.0, 1.0, 0.0, -2.0, 1.0, 0.0) | mat3(0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3(in0, in1, in2, in3, in4);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_vec4_vec4_to_mat3x4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input vec4 in1 = [ vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) ];
+			input vec4 in2 = [ vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			output mat3x4 out0 = [ mat3x4(-0.75, -0.0322580645161, 0.0526315789474, 0.25, -0.75, -0.0322580645161, 0.0526315789474, 0.25, -32.0, 64.0, -51.0, 24.0) | mat3x4(1.0, 1.25, 1.125, 1.75, -0.5, -2.25, -4.875, 9.0, 1.0, 1.25, 1.125, 1.75) | mat3x4(-32.0, 64.0, -51.0, 24.0, 0.0, 0.5, 0.75, 0.825, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat3x4(-0.5, -2.25, -4.875, 9.0, -32.0, 64.0, -51.0, 24.0, -0.5, -2.25, -4.875, 9.0) | mat3x4(0.0, 0.5, 0.75, 0.825, 1.0, 1.25, 1.125, 1.75, 0.0, 0.5, 0.75, 0.825) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_ivec4_ivec4_to_mat3x4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) ];
+			input ivec4 in1 = [ ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) ];
+			input ivec4 in2 = [ ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) ];
+			output mat3x4 out0 = [ mat3x4(0.0, -2.0, -4.0, 9.0, 0.0, -2.0, -4.0, 9.0, 0.0, -2.0, -4.0, 9.0) | mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(-32.0, 64.0, -51.0, 24.0, -32.0, 64.0, -51.0, 24.0, 1.0, 1.0, 1.0, 1.0) | mat3x4(0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat3x4
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(0.0, 0.5) | vec2(-32.0, 64.0) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(-0.5, -2.25) ];
+			input ivec2 in1 = [ ivec2(0, 0) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, -2) ];
+			input float in2 = [ 1.0 | -8.25 | 2.0 | 3.5 | -20.125 | 36.8125 | 0.0 | 1.0 | -0.5 | 0.0 ];
+			input float in3 = [ -0.5 | 36.8125 | 1.0 | 1.0 | 0.0 | 3.5 | 2.0 | 0.0 | -8.25 | -20.125 ];
+			input float in4 = [ -8.25 | 1.0 | 1.0 | 0.0 | 2.0 | 36.8125 | 0.0 | -20.125 | 3.5 | -0.5 ];
+			input int in5 = [ 11 | 2 | 8 | 5 | 0 | -192 | 1 | -12 | 255 | -66 ];
+			input int in6 = [ -12 | 5 | 8 | 1 | 0 | 255 | 11 | -192 | -66 | 2 ];
+			input bool in7 = [ true | true | true | false | false | false | true | false | true | false ];
+			input bool in8 = [ true | true | true | false | true | false | true | false | false | false ];
+			input bool in9 = [ true | false | true | false | true | true | false | false | false | true ];
+			output mat3x4 out0 = [ mat3x4(1.0, 1.25, 0.0, 0.0, 1.0, -0.5, -8.25, 11.0, -12.0, 1.0, 1.0, 1.0) | mat3x4(0.0, 0.5, 0.0, 0.0, -8.25, 36.8125, 1.0, 2.0, 5.0, 1.0, 1.0, 0.0) | mat3x4(0.0, 0.5, -32.0, 64.0, 2.0, 1.0, 1.0, 8.0, 8.0, 1.0, 1.0, 1.0) | mat3x4(-32.0, 64.0, 1.0, 1.0, 3.5, 1.0, 0.0, 5.0, 1.0, 0.0, 0.0, 0.0) | mat3x4(-32.0, 64.0, 1.0, 1.0, -20.125, 0.0, 2.0, 0.0, 0.0, 0.0, 1.0, 1.0) | mat3x4(-0.75, -0.0322580645161, -32.0, 64.0, 36.8125, 3.5, 36.8125, -192.0, 255.0, 0.0, 0.0, 1.0) | mat3x4(1.0, 1.25, 0.0, 0.0, 0.0, 2.0, 0.0, 1.0, 11.0, 1.0, 1.0, 0.0) | mat3x4(-0.75, -0.0322580645161, 0.0, -2.0, 1.0, 0.0, -20.125, -12.0, -192.0, 0.0, 0.0, 0.0) | mat3x4(-0.5, -2.25, 0.0, 0.0, -0.5, -8.25, 3.5, 255.0, -66.0, 1.0, 0.0, 0.0) | mat3x4(-0.5, -2.25, 0.0, -2.0, 0.0, -20.125, -0.5, -66.0, 2.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2, in3, in4, in5, in6, in7, in8, in9);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_vec3_bool_bvec3_float_bool_to_mat3x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | false | true | true | true | true | false | false | false | true ];
+			input float in1 = [ 36.8125 | -8.25 | 1.0 | 0.0 | 0.0 | 1.0 | 2.0 | 3.5 | -0.5 | -20.125 ];
+			input int in2 = [ -66 | -192 | 255 | 8 | 0 | -12 | 5 | 2 | 11 | 1 ];
+			input vec3 in3 = [ vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(0.0, 0.5, 0.75) | vec3(-32.0, 64.0, -51.0) | vec3(1.0, 1.25, 1.125) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-0.5, -2.25, -4.875) ];
+			input bool in4 = [ false | true | true | true | false | false | true | true | false | false ];
+			input bvec3 in5 = [ bvec3(true, false, false) | bvec3(true, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, false) ];
+			input float in6 = [ -8.25 | 1.0 | -0.5 | 36.8125 | 0.0 | 2.0 | -20.125 | 1.0 | 0.0 | 3.5 ];
+			input bool in7 = [ true | false | false | true | true | false | true | false | false | true ];
+			output mat3x4 out0 = [ mat3x4(0.0, 36.8125, -66.0, -0.75, -0.0322580645161, 0.0526315789474, 0.0, 1.0, 0.0, 0.0, -8.25, 1.0) | mat3x4(0.0, -8.25, -192.0, -32.0, 64.0, -51.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(1.0, 1.0, 255.0, 0.0, 0.5, 0.75, 1.0, 0.0, 1.0, 0.0, -0.5, 0.0) | mat3x4(1.0, 0.0, 8.0, -0.75, -0.0322580645161, 0.0526315789474, 1.0, 0.0, 0.0, 0.0, 36.8125, 1.0) | mat3x4(1.0, 0.0, 0.0, 0.0, 0.5, 0.75, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0) | mat3x4(1.0, 1.0, -12.0, -32.0, 64.0, -51.0, 0.0, 1.0, 1.0, 1.0, 2.0, 0.0) | mat3x4(0.0, 2.0, 5.0, 1.0, 1.25, 1.125, 1.0, 1.0, 1.0, 1.0, -20.125, 1.0) | mat3x4(0.0, 3.5, 2.0, 1.0, 1.25, 1.125, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0) | mat3x4(0.0, -0.5, 11.0, -0.5, -2.25, -4.875, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat3x4(1.0, -20.125, 1.0, -0.5, -2.25, -4.875, 0.0, 0.0, 0.0, 0.0, 3.5, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2, in3, in4, in5, in6, in7);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec4_int_vec4_bool_float_to_mat3x4
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | true | true | true | false | false | false | true | false | true ];
+			input bvec4 in1 = [ bvec4(false, false, false, false) | bvec4(true, false, false, true) | bvec4(false, true, false, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, false, false, true) ];
+			input int in2 = [ 255 | 2 | -192 | -12 | 11 | 1 | 0 | -66 | 8 | 5 ];
+			input vec4 in3 = [ vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) ];
+			input bool in4 = [ true | true | true | false | false | false | true | true | false | false ];
+			input float in5 = [ 36.8125 | 2.0 | -8.25 | 0.0 | 1.0 | 1.0 | -0.5 | 3.5 | 0.0 | -20.125 ];
+			output mat3x4 out0 = [ mat3x4(0.0, 0.0, 0.0, 0.0, 0.0, 255.0, -0.5, -2.25, -4.875, 9.0, 1.0, 36.8125) | mat3x4(1.0, 1.0, 0.0, 0.0, 1.0, 2.0, 1.0, 1.25, 1.125, 1.75, 1.0, 2.0) | mat3x4(1.0, 0.0, 1.0, 0.0, 0.0, -192.0, 1.0, 1.25, 1.125, 1.75, 1.0, -8.25) | mat3x4(1.0, 0.0, 0.0, 0.0, 1.0, -12.0, 0.0, 0.5, 0.75, 0.825, 0.0, 0.0) | mat3x4(0.0, 1.0, 1.0, 1.0, 1.0, 11.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 0.0, 1.0) | mat3x4(0.0, 0.0, 0.0, 0.0, 1.0, 1.0, -32.0, 64.0, -51.0, 24.0, 0.0, 1.0) | mat3x4(0.0, 1.0, 1.0, 1.0, 1.0, 0.0, -32.0, 64.0, -51.0, 24.0, 1.0, -0.5) | mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, -66.0, 0.0, 0.5, 0.75, 0.825, 1.0, 3.5) | mat3x4(0.0, 0.0, 1.0, 0.0, 0.0, 8.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 0.0, 0.0) | mat3x4(1.0, 1.0, 0.0, 0.0, 1.0, 5.0, -0.5, -2.25, -4.875, 9.0, 0.0, -20.125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bvec4_ivec4_bool_bool_int_to_mat3x4
+		version 310 es
+		values
+		{
+			input float in0 = [ 36.8125 | 1.0 | 0.0 | 3.5 | -8.25 | -20.125 | 2.0 | 0.0 | 1.0 | -0.5 ];
+			input bvec4 in1 = [ bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(true, false, false, true) | bvec4(true, false, false, true) | bvec4(false, true, false, false) | bvec4(false, false, false, true) | bvec4(false, false, false, false) ];
+			input ivec4 in2 = [ ivec4(0, 0, 0, 0) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) ];
+			input bool in3 = [ true | false | true | true | false | true | true | false | false | false ];
+			input bool in4 = [ true | false | false | true | true | false | false | false | true | true ];
+			input int in5 = [ 1 | 5 | -12 | 8 | -192 | 2 | -66 | 255 | 11 | 0 ];
+			output mat3x4 out0 = [ mat3x4(36.8125, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat3x4(1.0, 0.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0, 9.0, 0.0, 0.0, 5.0) | mat3x4(0.0, 0.0, 1.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0, 1.0, 0.0, -12.0) | mat3x4(3.5, 0.0, 0.0, 0.0, 1.0, 0.0, -2.0, -4.0, 9.0, 1.0, 1.0, 8.0) | mat3x4(-8.25, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, -192.0) | mat3x4(-20.125, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 2.0) | mat3x4(2.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, -66.0) | mat3x4(0.0, 0.0, 1.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0, 0.0, 0.0, 255.0) | mat3x4(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 11.0) | mat3x4(-0.5, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat3x4(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec3_vec3_vec2_to_mat4x2
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(1.0, 1.25, 1.125) | vec3(-32.0, 64.0, -51.0) | vec3(0.0, 0.5, 0.75) | vec3(-0.5, -2.25, -4.875) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input vec3 in1 = [ vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(-0.5, -2.25, -4.875) | vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) ];
+			input vec2 in2 = [ vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(0.0, 0.5) ];
+			output mat4x2 out0 = [ mat4x2(1.0, 1.25, 1.125, -32.0, 64.0, -51.0, 1.0, 1.25) | mat4x2(-32.0, 64.0, -51.0, -0.75, -0.0322580645161, 0.0526315789474, -0.5, -2.25) | mat4x2(0.0, 0.5, 0.75, -0.5, -2.25, -4.875, -0.75, -0.0322580645161) | mat4x2(-0.5, -2.25, -4.875, 0.0, 0.5, 0.75, -32.0, 64.0) | mat4x2(-0.75, -0.0322580645161, 0.0526315789474, 1.0, 1.25, 1.125, 0.0, 0.5) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec3_ivec3_ivec2_to_mat4x2
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(-32, 64, -51) ];
+			input ivec3 in1 = [ ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, 64, -51) ];
+			input ivec2 in2 = [ ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) ];
+			output mat4x2 out0 = [ mat4x2(0.0, 0.0, 0.0, 0.0, -2.0, -4.0, -32.0, 64.0) | mat4x2(1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0) | mat4x2(0.0, -2.0, -4.0, 1.0, 1.0, 1.0, 0.0, 0.0) | mat4x2(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0) | mat4x2(-32.0, 64.0, -51.0, -32.0, 64.0, -51.0, 0.0, -2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_float_float_int_bool_to_mat4x2
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(0.0, 0.5) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) ];
+			input float in2 = [ 3.5 | -8.25 | 2.0 | 36.8125 | -0.5 | 1.0 | 1.0 | 0.0 | -20.125 | 0.0 ];
+			input float in3 = [ 0.0 | 36.8125 | 3.5 | 1.0 | -0.5 | -8.25 | 2.0 | 0.0 | -20.125 | 1.0 ];
+			input int in4 = [ -12 | -66 | 11 | 5 | 8 | 255 | -192 | 2 | 1 | 0 ];
+			input bool in5 = [ true | true | false | true | false | true | false | true | false | false ];
+			output mat4x2 out0 = [ mat4x2(-0.5, -2.25, 0.0, -2.0, 3.5, 0.0, -12.0, 1.0) | mat4x2(0.0, 0.5, 0.0, 0.0, -8.25, 36.8125, -66.0, 1.0) | mat4x2(1.0, 1.25, -32.0, 64.0, 2.0, 3.5, 11.0, 0.0) | mat4x2(0.0, 0.5, 1.0, 1.0, 36.8125, 1.0, 5.0, 1.0) | mat4x2(-32.0, 64.0, 1.0, 1.0, -0.5, -0.5, 8.0, 0.0) | mat4x2(-0.75, -0.0322580645161, 0.0, 0.0, 1.0, -8.25, 255.0, 1.0) | mat4x2(-0.75, -0.0322580645161, -32.0, 64.0, 1.0, 2.0, -192.0, 0.0) | mat4x2(-32.0, 64.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0) | mat4x2(-0.5, -2.25, 0.0, 0.0, -20.125, -20.125, 1.0, 0.0) | mat4x2(1.0, 1.25, 0.0, -2.0, 0.0, 1.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_vec2_bool_bvec2_to_mat4x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | false | true | true | true | false | false | true | false | true ];
+			input float in1 = [ 36.8125 | -8.25 | 3.5 | 1.0 | 2.0 | -0.5 | 0.0 | 1.0 | -20.125 | 0.0 ];
+			input int in2 = [ -66 | 1 | -192 | 2 | 11 | 0 | 255 | 8 | 5 | -12 ];
+			input vec2 in3 = [ vec2(-0.75, -0.0322580645161) | vec2(-0.75, -0.0322580645161) | vec2(-32.0, 64.0) | vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(0.0, 0.5) | vec2(-0.5, -2.25) ];
+			input bool in4 = [ true | false | false | false | false | true | true | true | false | true ];
+			input bvec2 in5 = [ bvec2(false, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) | bvec2(true, false) ];
+			output mat4x2 out0 = [ mat4x2(0.0, 36.8125, -66.0, -0.75, -0.0322580645161, 1.0, 0.0, 0.0) | mat4x2(0.0, -8.25, 1.0, -0.75, -0.0322580645161, 0.0, 0.0, 1.0) | mat4x2(1.0, 3.5, -192.0, -32.0, 64.0, 0.0, 0.0, 0.0) | mat4x2(1.0, 1.0, 2.0, 0.0, 0.5, 0.0, 1.0, 1.0) | mat4x2(1.0, 2.0, 11.0, 1.0, 1.25, 0.0, 1.0, 0.0) | mat4x2(0.0, -0.5, 0.0, 1.0, 1.25, 1.0, 0.0, 1.0) | mat4x2(0.0, 0.0, 255.0, -0.5, -2.25, 1.0, 0.0, 0.0) | mat4x2(1.0, 1.0, 8.0, -32.0, 64.0, 1.0, 1.0, 1.0) | mat4x2(0.0, -20.125, 5.0, 0.0, 0.5, 0.0, 0.0, 0.0) | mat4x2(1.0, 0.0, -12.0, -0.5, -2.25, 1.0, 1.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec2_int_vec4_to_mat4x2
+		version 310 es
+		values
+		{
+			input bool in0 = [ false | false | true | false | true | false | true | true | true | false ];
+			input bvec2 in1 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, false) | bvec2(false, true) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(true, true) ];
+			input int in2 = [ -12 | 8 | 2 | 255 | 5 | -192 | 0 | 11 | 1 | -66 ];
+			input vec4 in3 = [ vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			output mat4x2 out0 = [ mat4x2(0.0, 1.0, 0.0, -12.0, 1.0, 1.25, 1.125, 1.75) | mat4x2(0.0, 0.0, 0.0, 8.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat4x2(1.0, 0.0, 0.0, 2.0, -32.0, 64.0, -51.0, 24.0) | mat4x2(0.0, 0.0, 0.0, 255.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat4x2(1.0, 1.0, 0.0, 5.0, -0.5, -2.25, -4.875, 9.0) | mat4x2(0.0, 0.0, 1.0, -192.0, -0.5, -2.25, -4.875, 9.0) | mat4x2(1.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.75, 0.825) | mat4x2(1.0, 0.0, 1.0, 11.0, -32.0, 64.0, -51.0, 24.0) | mat4x2(1.0, 1.0, 1.0, 1.0, 1.0, 1.25, 1.125, 1.75) | mat4x2(0.0, 1.0, 1.0, -66.0, 0.0, 0.5, 0.75, 0.825) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bvec4_ivec2_bool_to_mat4x2
+		version 310 es
+		values
+		{
+			input float in0 = [ 1.0 | -8.25 | -20.125 | 3.5 | -0.5 | 2.0 | 36.8125 | 0.0 ];
+			input bvec4 in1 = [ bvec4(false, false, false, true) | bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(false, true, false, false) | bvec4(false, false, false, false) | bvec4(true, false, false, true) | bvec4(false, true, false, false) | bvec4(false, false, false, true) ];
+			input ivec2 in2 = [ ivec2(-32, 64) | ivec2(0, -2) | ivec2(0, 0) | ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(1, 1) | ivec2(0, 0) ];
+			input bool in3 = [ true | true | false | true | true | false | false | false ];
+			output mat4x2 out0 = [ mat4x2(1.0, 0.0, 0.0, 0.0, 1.0, -32.0, 64.0, 1.0) | mat4x2(-8.25, 1.0, 0.0, 0.0, 1.0, 0.0, -2.0, 1.0) | mat4x2(-20.125, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0) | mat4x2(3.5, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4x2(-0.5, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat4x2(2.0, 1.0, 0.0, 0.0, 1.0, 0.0, -2.0, 0.0) | mat4x2(36.8125, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0) | mat4x2(0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x2(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_vec4_vec4_to_mat4x3
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) ];
+			input vec4 in1 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			input vec4 in2 = [ vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output mat4x3 out0 = [ mat4x3(-0.5, -2.25, -4.875, 9.0, 0.0, 0.5, 0.75, 0.825, -32.0, 64.0, -51.0, 24.0) | mat4x3(1.0, 1.25, 1.125, 1.75, -0.5, -2.25, -4.875, 9.0, 1.0, 1.25, 1.125, 1.75) | mat4x3(0.0, 0.5, 0.75, 0.825, -32.0, 64.0, -51.0, 24.0, -0.5, -2.25, -4.875, 9.0) | mat4x3(-0.75, -0.0322580645161, 0.0526315789474, 0.25, 1.0, 1.25, 1.125, 1.75, 0.0, 0.5, 0.75, 0.825) | mat4x3(-32.0, 64.0, -51.0, 24.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, -0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_ivec4_ivec4_to_mat4x3
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			input ivec4 in1 = [ ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) | ivec4(0, -2, -4, 9) | ivec4(1, 1, 1, 1) ];
+			input ivec4 in2 = [ ivec4(-32, 64, -51, 24) | ivec4(0, -2, -4, 9) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, 0) ];
+			output mat4x3 out0 = [ mat4x3(1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0) | mat4x3(0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0, 0.0, -2.0, -4.0, 9.0) | mat4x3(0.0, -2.0, -4.0, 9.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0) | mat4x3(-32.0, 64.0, -51.0, 24.0, 0.0, -2.0, -4.0, 9.0, 0.0, 0.0, 0.0, 0.0) | mat4x3(0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat4x3
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(-0.5, -2.25) | vec2(0.0, 0.5) | vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(1.0, 1.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			input ivec2 in1 = [ ivec2(0, -2) | ivec2(-32, 64) | ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, 0) | ivec2(0, -2) | ivec2(0, 0) | ivec2(1, 1) ];
+			input float in2 = [ -20.125 | 3.5 | 2.0 | -0.5 | -8.25 | 0.0 | 1.0 | 0.0 | 1.0 | 36.8125 ];
+			input float in3 = [ 0.0 | -8.25 | 36.8125 | 1.0 | 0.0 | -20.125 | 3.5 | 2.0 | -0.5 | 1.0 ];
+			input float in4 = [ 0.0 | 1.0 | 3.5 | -20.125 | 0.0 | 36.8125 | 1.0 | -8.25 | 2.0 | -0.5 ];
+			input int in5 = [ 2 | 8 | 1 | -192 | 0 | -12 | 11 | 255 | 5 | -66 ];
+			input int in6 = [ 2 | -12 | 5 | 8 | 11 | 255 | 0 | -66 | 1 | -192 ];
+			input bool in7 = [ true | true | false | true | false | false | false | true | false | true ];
+			input bool in8 = [ true | false | false | true | true | false | true | true | false | false ];
+			input bool in9 = [ false | true | false | false | false | false | true | true | true | true ];
+			output mat4x3 out0 = [ mat4x3(-0.5, -2.25, 0.0, -2.0, -20.125, 0.0, 0.0, 2.0, 2.0, 1.0, 1.0, 0.0) | mat4x3(0.0, 0.5, -32.0, 64.0, 3.5, -8.25, 1.0, 8.0, -12.0, 1.0, 0.0, 1.0) | mat4x3(0.0, 0.5, 1.0, 1.0, 2.0, 36.8125, 3.5, 1.0, 5.0, 0.0, 0.0, 0.0) | mat4x3(1.0, 1.25, -32.0, 64.0, -0.5, 1.0, -20.125, -192.0, 8.0, 1.0, 1.0, 0.0) | mat4x3(-0.75, -0.0322580645161, 0.0, 0.0, -8.25, 0.0, 0.0, 0.0, 11.0, 0.0, 1.0, 0.0) | mat4x3(-0.5, -2.25, 0.0, 0.0, 0.0, -20.125, 36.8125, -12.0, 255.0, 0.0, 0.0, 0.0) | mat4x3(-32.0, 64.0, 0.0, 0.0, 1.0, 3.5, 1.0, 11.0, 0.0, 0.0, 1.0, 1.0) | mat4x3(1.0, 1.25, 0.0, -2.0, 0.0, 2.0, -8.25, 255.0, -66.0, 1.0, 1.0, 1.0) | mat4x3(-32.0, 64.0, 0.0, 0.0, 1.0, -0.5, 2.0, 5.0, 1.0, 0.0, 0.0, 1.0) | mat4x3(-0.75, -0.0322580645161, 1.0, 1.0, 36.8125, 1.0, -0.5, -66.0, -192.0, 1.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2, in3, in4, in5, in6, in7, in8, in9);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_float_int_vec3_bool_bvec3_float_bool_to_mat4x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | false | false | true | true | true | false | false | false ];
+			input float in1 = [ -8.25 | 2.0 | 1.0 | -0.5 | 0.0 | 0.0 | 36.8125 | -20.125 | 3.5 | 1.0 ];
+			input int in2 = [ 255 | 2 | 11 | 1 | 8 | -192 | 0 | -66 | -12 | 5 ];
+			input vec3 in3 = [ vec3(-0.75, -0.0322580645161, 0.0526315789474) | vec3(0.0, 0.5, 0.75) | vec3(0.0, 0.5, 0.75) | vec3(-32.0, 64.0, -51.0) | vec3(-0.5, -2.25, -4.875) | vec3(1.0, 1.25, 1.125) | vec3(-32.0, 64.0, -51.0) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			input bool in4 = [ true | true | false | false | true | false | false | false | true | true ];
+			input bvec3 in5 = [ bvec3(false, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, false, false) | bvec3(true, false, false) | bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) ];
+			input float in6 = [ 1.0 | 0.0 | -0.5 | 36.8125 | 1.0 | -20.125 | 2.0 | 0.0 | -8.25 | 3.5 ];
+			input bool in7 = [ true | true | false | false | false | false | true | true | false | true ];
+			output mat4x3 out0 = [ mat4x3(1.0, -8.25, 255.0, -0.75, -0.0322580645161, 0.0526315789474, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0) | mat4x3(1.0, 2.0, 2.0, 0.0, 0.5, 0.75, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0) | mat4x3(0.0, 1.0, 11.0, 0.0, 0.5, 0.75, 0.0, 0.0, 0.0, 0.0, -0.5, 0.0) | mat4x3(0.0, -0.5, 1.0, -32.0, 64.0, -51.0, 0.0, 0.0, 1.0, 0.0, 36.8125, 0.0) | mat4x3(1.0, 0.0, 8.0, -0.5, -2.25, -4.875, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0) | mat4x3(1.0, 0.0, -192.0, 1.0, 1.25, 1.125, 0.0, 1.0, 0.0, 0.0, -20.125, 0.0) | mat4x3(1.0, 36.8125, 0.0, -32.0, 64.0, -51.0, 0.0, 1.0, 1.0, 1.0, 2.0, 1.0) | mat4x3(0.0, -20.125, -66.0, 1.0, 1.25, 1.125, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0) | mat4x3(0.0, 3.5, -12.0, -0.5, -2.25, -4.875, 1.0, 0.0, 0.0, 0.0, -8.25, 0.0) | mat4x3(0.0, 1.0, 5.0, -0.75, -0.0322580645161, 0.0526315789474, 1.0, 0.0, 0.0, 0.0, 3.5, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2, in3, in4, in5, in6, in7);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec4_int_vec4_bool_float_to_mat4x3
+		version 310 es
+		values
+		{
+			input bool in0 = [ true | true | true | false | true | false | false | true | false | false ];
+			input bvec4 in1 = [ bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(true, false, false, true) | bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, false, false, false) | bvec4(false, false, false, true) ];
+			input int in2 = [ 5 | 11 | 0 | -192 | -66 | 255 | 1 | -12 | 8 | 2 ];
+			input vec4 in3 = [ vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input bool in4 = [ false | true | true | false | true | false | true | true | false | false ];
+			input float in5 = [ -20.125 | 0.0 | 1.0 | -0.5 | 3.5 | -8.25 | 0.0 | 1.0 | 2.0 | 36.8125 ];
+			output mat4x3 out0 = [ mat4x3(1.0, 0.0, 0.0, 0.0, 0.0, 5.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 0.0, -20.125) | mat4x3(1.0, 0.0, 1.0, 0.0, 0.0, 11.0, -32.0, 64.0, -51.0, 24.0, 1.0, 0.0) | mat4x3(1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.25, 1.125, 1.75, 1.0, 1.0) | mat4x3(0.0, 1.0, 1.0, 1.0, 1.0, -192.0, -0.5, -2.25, -4.875, 9.0, 0.0, -0.5) | mat4x3(1.0, 1.0, 1.0, 1.0, 1.0, -66.0, 1.0, 1.25, 1.125, 1.75, 1.0, 3.5) | mat4x3(0.0, 1.0, 0.0, 0.0, 1.0, 255.0, -32.0, 64.0, -51.0, 24.0, 0.0, -8.25) | mat4x3(0.0, 1.0, 0.0, 0.0, 1.0, 1.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, 1.0, 0.0) | mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, -12.0, -0.5, -2.25, -4.875, 9.0, 1.0, 1.0) | mat4x3(0.0, 0.0, 0.0, 0.0, 0.0, 8.0, 0.0, 0.5, 0.75, 0.825, 0.0, 2.0) | mat4x3(0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 0.0, 0.5, 0.75, 0.825, 0.0, 36.8125) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_bvec4_ivec4_bool_bool_int_to_mat4x3
+		version 310 es
+		values
+		{
+			input float in0 = [ 36.8125 | -0.5 | 3.5 | 2.0 | 1.0 | -20.125 | 0.0 | 0.0 | -8.25 | 1.0 ];
+			input bvec4 in1 = [ bvec4(false, false, false, false) | bvec4(false, false, false, true) | bvec4(true, false, false, true) | bvec4(false, true, false, false) | bvec4(false, false, false, false) | bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, true, false, false) | bvec4(false, false, false, true) ];
+			input ivec4 in2 = [ ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(0, -2, -4, 9) ];
+			input bool in3 = [ false | true | false | true | false | false | false | true | true | true ];
+			input bool in4 = [ false | true | false | true | false | true | false | false | true | true ];
+			input int in5 = [ 2 | 1 | 8 | 11 | 255 | 5 | 0 | -66 | -192 | -12 ];
+			output mat4x3 out0 = [ mat4x3(36.8125, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 2.0) | mat4x3(-0.5, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0) | mat4x3(3.5, 1.0, 0.0, 0.0, 1.0, -32.0, 64.0, -51.0, 24.0, 0.0, 0.0, 8.0) | mat4x3(2.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 11.0) | mat4x3(1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 255.0) | mat4x3(-20.125, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 5.0) | mat4x3(0.0, 1.0, 1.0, 1.0, 1.0, -32.0, 64.0, -51.0, 24.0, 0.0, 0.0, 0.0) | mat4x3(0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, -66.0) | mat4x3(-8.25, 0.0, 1.0, 0.0, 0.0, 0.0, -2.0, -4.0, 9.0, 1.0, 1.0, -192.0) | mat4x3(1.0, 0.0, 0.0, 0.0, 1.0, 0.0, -2.0, -4.0, 9.0, 1.0, 1.0, -12.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4x3(in0, in1, in2, in3, in4, in5);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case vec4_vec4_vec4_vec4_to_mat4
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input vec4 in1 = [ vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input vec4 in2 = [ vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input vec4 in3 = [ vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.5, -2.25, -4.875, 9.0) ];
+			output mat4 out0 = [ mat4(-0.75, -0.0322580645161, 0.0526315789474, 0.25, -0.75, -0.0322580645161, 0.0526315789474, 0.25, -32.0, 64.0, -51.0, 24.0, -32.0, 64.0, -51.0, 24.0) | mat4(-32.0, 64.0, -51.0, 24.0, -32.0, 64.0, -51.0, 24.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, -0.75, -0.0322580645161, 0.0526315789474, 0.25) | mat4(-0.5, -2.25, -4.875, 9.0, -0.5, -2.25, -4.875, 9.0, -0.5, -2.25, -4.875, 9.0, 1.0, 1.25, 1.125, 1.75) | mat4(1.0, 1.25, 1.125, 1.75, 1.0, 1.25, 1.125, 1.75, 1.0, 1.25, 1.125, 1.75, 0.0, 0.5, 0.75, 0.825) | mat4(0.0, 0.5, 0.75, 0.825, 0.0, 0.5, 0.75, 0.825, 0.0, 0.5, 0.75, 0.825, -0.5, -2.25, -4.875, 9.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case ivec4_ivec4_ivec4_ivec4_to_mat4
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(-32, 64, -51, 24) | ivec4(0, -2, -4, 9) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, 0) ];
+			input ivec4 in1 = [ ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			input ivec4 in2 = [ ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) | ivec4(0, -2, -4, 9) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) ];
+			input ivec4 in3 = [ ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(0, 0, 0, 0) | ivec4(0, 0, 0, 0) | ivec4(-32, 64, -51, 24) ];
+			output mat4 out0 = [ mat4(-32.0, 64.0, -51.0, 24.0, 1.0, 1.0, 1.0, 1.0, -32.0, 64.0, -51.0, 24.0, 1.0, 1.0, 1.0, 1.0) | mat4(0.0, -2.0, -4.0, 9.0, 0.0, -2.0, -4.0, 9.0, 0.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0, 9.0) | mat4(1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, -2.0, -4.0, 9.0, 0.0, 0.0, 0.0, 0.0) | mat4(0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bvec4_bvec4_bvec4_bvec4_to_mat4
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(false, false, false, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(true, false, false, true) ];
+			input bvec4 in1 = [ bvec4(false, true, false, false) | bvec4(false, false, false, true) | bvec4(true, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			input bvec4 in2 = [ bvec4(false, true, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, true) | bvec4(true, false, false, true) ];
+			input bvec4 in3 = [ bvec4(true, true, true, true) | bvec4(false, false, false, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, false, false, true) ];
+			output mat4 out0 = [ mat4(0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0) | mat4(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0) | mat4(0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0) | mat4(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0) | mat4(1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0, in1, in2, in3);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_ivec3_bvec3_vec4_ivec2_float_vec2_to_mat4
+		version 310 es
+		values
+		{
+			input float in0 = [ 2.0 | 1.0 | 3.5 | 0.0 | -20.125 | 36.8125 | -0.5 | -8.25 ];
+			input ivec3 in1 = [ ivec3(0, 0, 0) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(0, 0, 0) | ivec3(0, -2, -4) ];
+			input bvec3 in2 = [ bvec3(true, false, false) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(false, false, false) | bvec3(false, true, false) ];
+			input vec4 in3 = [ vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(0.0, 0.5, 0.75, 0.825) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(0.0, 0.5, 0.75, 0.825) ];
+			input ivec2 in4 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(0, 0) | ivec2(1, 1) | ivec2(-32, 64) | ivec2(0, 0) | ivec2(0, -2) ];
+			input float in5 = [ 2.0 | 3.5 | 36.8125 | -8.25 | 0.0 | -20.125 | 1.0 | -0.5 ];
+			input vec2 in6 = [ vec2(0.0, 0.5) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(0.0, 0.5) | vec2(-0.75, -0.0322580645161) | vec2(-0.5, -2.25) | vec2(1.0, 1.25) | vec2(1.0, 1.25) ];
+			output mat4 out0 = [ mat4(2.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, -0.5, -2.25, -4.875, 9.0, 0.0, 0.0, 2.0, 0.0, 0.5) | mat4(1.0, -32.0, 64.0, -51.0, 1.0, 1.0, 1.0, 1.0, 1.25, 1.125, 1.75, 1.0, 1.0, 3.5, -0.5, -2.25) | mat4(3.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.75, 0.825, 0.0, -2.0, 36.8125, -32.0, 64.0) | mat4(0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, -0.5, -2.25, -4.875, 9.0, 0.0, 0.0, -8.25, 0.0, 0.5) | mat4(-20.125, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.25, 1.125, 1.75, 1.0, 1.0, 0.0, -0.75, -0.0322580645161) | mat4(36.8125, 0.0, -2.0, -4.0, 0.0, 1.0, 0.0, -0.75, -0.0322580645161, 0.0526315789474, 0.25, -32.0, 64.0, -20.125, -0.5, -2.25) | mat4(-0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, -32.0, 64.0, -51.0, 24.0, 0.0, 0.0, 1.0, 1.0, 1.25) | mat4(-8.25, 0.0, -2.0, -4.0, 0.0, 1.0, 0.0, 0.0, 0.5, 0.75, 0.825, 0.0, -2.0, -0.5, 1.0, 1.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = mat4(in0, in1, in2, in3, in4, in5, in6);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # matrix_combine
diff --git a/external/vulkancts/data/vulkan/glsl/es310/functions.test b/external/vulkancts/data/vulkan/glsl/es310/functions.test
new file mode 100644
index 0000000..d3f34b3
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/functions.test
@@ -0,0 +1,3134 @@
+# Tests todo:
+# - inout with varyings, attributes, uniforms (and arrays of 'em)
+# - inout with arrays, array elements
+# - inout with array elements
+# - inout by-value semantics (arrays & elements & structs)
+
+# Done:
+# - control flow: return, return in loop, etc.
+
+group datatypes "Function Parameter Data Types"
+
+	case float_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				return -a;
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_vec2
+		version 310 es
+		values
+		{
+			input vec2 in0		= [ vec2(0.0, 1.0) | vec2(2.0, 2.5) ];
+			output float out0	= [ -1.0 | -4.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (vec2 a)
+			{
+				return -(a.x + a.y);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_vec3
+		version 310 es
+		values
+		{
+			input vec3 in0		= [ vec3(0.0, 1.0, -2.0) | vec3(2.0, 2.5, -4.0) ];
+			output float out0	= [ 1.0 | -0.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (vec3 a)
+			{
+				return -(a.x + a.y + a.z);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_vec4
+		version 310 es
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, -2.0, 0.5) | vec4(2.0, 2.5, 4.0, -7.0) ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (vec4 a)
+			{
+				return -(a.x + a.y + a.z + a.w);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_mat2
+		version 310 es
+		values
+		{
+			input mat2 in0		= [ mat2(0.0, 1.0, -2.0, 0.5) | mat2(2.0, 2.5, 4.0, -7.0) ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (mat2 a)
+			{
+				return -(a[0][0] + a[0][1] + a[1][0] + a[1][1]);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_mat3
+		version 310 es
+		values
+		{
+			input mat3 in0		= [ mat3(0.0, 1.0, -2.0, 0.5, 1.0, -1.0, 2.0, 4.0, -1.0) | mat3(2.0, 2.5, 4.0, -7.0, 2.5, 3.0, 0.5, -3.5, 1.0) ];
+			output float out0	= [ -4.5 | -5.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (mat3 a)
+			{
+				return -(a[0][0] + a[0][1] + a[0][2] + a[1][0] + a[1][1] + a[1][2] + a[2][0] + a[2][1] + a[2][2]);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_mat4
+		version 310 es
+		values
+		{
+			input mat4 in0		= [ mat4(0.0, 1.0, -2.0, 0.5, 1.0, -1.0, 2.0, 4.0, -1.0, 1.0, 1.0, 1.0, 1.0, 1.0, -2.0, -2.0) | mat4(2.0, 2.5, 4.0, -7.0, 2.5, 3.0, 0.5, -3.5, 1.0, 0.0, 2.0, -1.0, 1.0, 0.0, -1.0, 3.0) ];
+			output float out0	= [ -5.5 | -9.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (mat4 a)
+			{
+				return -(a[0][0] + a[0][1] + a[0][2] + a[0][3] + a[1][0] + a[1][1] + a[1][2] + a[1][3] + a[2][0] + a[2][1] + a[2][2] + a[2][3] + a[3][0] + a[3][1] + a[3][2] + a[3][3]);
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_int
+		version 310 es
+		values
+		{
+			input int in0		= [ -1 | 0 | 1 | 4 ];
+			output int out0		= [ 1 | 0 | -1 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (int a)
+			{
+				return -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec2
+		version 310 es
+		values
+		{
+			input ivec2 in0		= [ ivec2(-1, 0) | ivec2(1, 4) ];
+			output int out0		= [ 1 | -5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (ivec2 a)
+			{
+				return -(a.x + a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec3
+		version 310 es
+		values
+		{
+			input ivec3 in0		= [ ivec3(-1, 0, 2) | ivec3(1, 4, -8) ];
+			output int out0		= [ -1 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (ivec3 a)
+			{
+				return -(a.x + a.y + a.z);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case int_ivec4
+		version 310 es
+		values
+		{
+			input ivec4 in0		= [ ivec4(-1, 0, 2, 2) | ivec4(1, 4, -8, 2) ];
+			output int out0		= [ -3 | 1 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (ivec4 a)
+			{
+				return -(a.x + a.y + a.z + a.w);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uint
+		version 310 es
+		values
+		{
+			input uint in0		= [ 1 | 0 | 2 | 4 ];
+			output uint out0	= [ 1 | 0 | 4 | 16 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			uint func (uint a)
+			{
+				return a*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uvec2
+		version 310 es
+		values
+		{
+			input uvec2 in0		= [ uvec2(1, 0) | uvec2(2, 4) ];
+			output uint out0	= [ 1 | 6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			uint func (uvec2 a)
+			{
+				return (a.x + a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uvec3
+		version 310 es
+		values
+		{
+			input uvec3 in0		= [ uvec3(1, 0, 2) | uvec3(1, 4, 8) ];
+			output uint out0		= [ 3 | 13 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			uint func (uvec3 a)
+			{
+				return (a.x + a.y + a.z);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case uint_uvec4
+		version 310 es
+		values
+		{
+			input uvec4 in0		= [ uvec4(1, 0, 2, 2) | uvec4(1, 4, 8, 2) ];
+			output uint out0	= [ 5 | 15 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			uint func (uvec4 a)
+			{
+				return (a.x + a.y + a.z + a.w);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bool
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (bool a)
+			{
+				return !a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec2
+		version 310 es
+		values
+		{
+			input bvec2 in0		= [ bvec2(true, true) | bvec2(false, true) ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (bvec2 a)
+			{
+				return !(a.x == a.y);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec3
+		version 310 es
+		values
+		{
+			input bvec3 in0		= [ bvec3(true, true, false) | bvec3(true, false, false) ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (bvec3 a)
+			{
+				return (a.x == a.y) == a.z;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case bool_bvec4
+		version 310 es
+		values
+		{
+			input bvec4 in0		= [ bvec4(true, true, true, false) | bvec4(false, false, true, true) | bvec4(true, false, false, true) ];
+			output bool out0	= [ false | true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (bvec4 a)
+			{
+				return ((a.x == a.y) == (a.z == a.w));
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mat2
+		version 310 es
+		values
+		{
+			input mat2 in0	= [ mat2(-2.0, 0.5, -1.0, 1.0) | mat2(1.0, -3.5, -3.5, 2.5) | mat2(-2.0, -2.0, 3.5, 0.0) ];
+			output mat2 out0	= [ mat2(4.0, -1.0, 2.0, -2.0) | mat2(-2.0, 7.0, 7.0, -5.0) | mat2(4.0, 4.0, -7.0, -0.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat2 func (mat2 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat2x3
+		version 310 es
+		values
+		{
+			input mat2x3 in0	= [ mat2x3(2.5, 0.0, 1.0, -2.5, 1.0, 3.0) | mat2x3(0.0, 2.0, 1.5, -3.5, 2.0, 0.5) | mat2x3(-1.5, -3.5, 2.5, 0.0, 1.5, 3.0) ];
+			output mat2x3 out0	= [ mat2x3(-5.0, -0.0, -2.0, 5.0, -2.0, -6.0) | mat2x3(-0.0, -4.0, -3.0, 7.0, -4.0, -1.0) | mat2x3(3.0, 7.0, -5.0, -0.0, -3.0, -6.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat2x3 func (mat2x3 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat2x4
+		version 310 es
+		values
+		{
+			input mat2x4 in0	= [ mat2x4(1.5, 3.0, -1.0, 2.5, -0.5, 3.5, 3.0, -3.0) | mat2x4(-2.5, -2.0, 3.5, -0.5, 1.0, -1.5, 0.0, -1.0) | mat2x4(-1.0, 0.5, 0.5, 3.0, 1.5, 3.0, 2.5, 3.5) ];
+			output mat2x4 out0	= [ mat2x4(-3.0, -6.0, 2.0, -5.0, 1.0, -7.0, -6.0, 6.0) | mat2x4(5.0, 4.0, -7.0, 1.0, -2.0, 3.0, -0.0, 2.0) | mat2x4(2.0, -1.0, -1.0, -6.0, -3.0, -6.0, -5.0, -7.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat2x4 func (mat2x4 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat3x2
+		version 310 es
+		values
+		{
+			input mat3x2 in0	= [ mat3x2(1.5, -2.5, 2.5, 3.5, 3.0, 0.5) | mat3x2(1.5, -2.0, 2.5, 0.5, -1.5, -3.5) | mat3x2(2.5, 3.5, -3.0, 2.5, -0.5, -2.5) ];
+			output mat3x2 out0	= [ mat3x2(-3.0, 5.0, -5.0, -7.0, -6.0, -1.0) | mat3x2(-3.0, 4.0, -5.0, -1.0, 3.0, 7.0) | mat3x2(-5.0, -7.0, 6.0, -5.0, 1.0, 5.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3x2 func (mat3x2 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat3
+		version 310 es
+		values
+		{
+			input mat3 in0	= [ mat3(-1.5, 2.0, 3.0, -3.5, 1.0, -3.5, 1.5, -1.5, 3.0) | mat3(3.5, 0.0, 3.5, -1.5, -3.0, 0.5, -3.5, -2.5, -0.5) | mat3(1.0, -2.5, -3.5, 3.0, -1.5, 3.5, 3.0, -1.0, -0.5) ];
+			output mat3 out0	= [ mat3(3.0, -4.0, -6.0, 7.0, -2.0, 7.0, -3.0, 3.0, -6.0) | mat3(-7.0, -0.0, -7.0, 3.0, 6.0, -1.0, 7.0, 5.0, 1.0) | mat3(-2.0, 5.0, 7.0, -6.0, 3.0, -7.0, -6.0, 2.0, 1.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3 func (mat3 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat3x4
+		version 310 es
+		values
+		{
+			input mat3x4 in0	= [ mat3x4(0.0, 1.0, 0.5, 0.5, 1.0, 3.5, 0.0, -0.5, 1.5, -2.0, -1.5, 3.5) | mat3x4(0.0, 0.5, -3.5, -0.5, 0.5, -3.5, 1.0, 1.0, -3.5, 1.0, -0.5, 1.5) | mat3x4(-1.0, 1.5, 2.0, -3.5, -3.5, 1.5, 3.5, -2.0, -0.5, 0.5, -1.5, -1.0) ];
+			output mat3x4 out0	= [ mat3x4(-0.0, -2.0, -1.0, -1.0, -2.0, -7.0, -0.0, 1.0, -3.0, 4.0, 3.0, -7.0) | mat3x4(-0.0, -1.0, 7.0, 1.0, -1.0, 7.0, -2.0, -2.0, 7.0, -2.0, 1.0, -3.0) | mat3x4(2.0, -3.0, -4.0, 7.0, 7.0, -3.0, -7.0, 4.0, 1.0, -1.0, 3.0, 2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat3x4 func (mat3x4 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat4x2
+		version 310 es
+		values
+		{
+			input mat4x2 in0	= [ mat4x2(-1.5, -1.0, 0.5, -1.5, -1.0, 2.0, -3.5, 0.5) | mat4x2(2.0, -1.5, -2.0, 2.5, -2.0, -2.5, -0.5, 1.5) | mat4x2(-3.0, -1.5, -1.0, 2.5, -0.5, 2.5, -2.5, -1.0) ];
+			output mat4x2 out0	= [ mat4x2(3.0, 2.0, -1.0, 3.0, 2.0, -4.0, 7.0, -1.0) | mat4x2(-4.0, 3.0, 4.0, -5.0, 4.0, 5.0, 1.0, -3.0) | mat4x2(6.0, 3.0, 2.0, -5.0, 1.0, -5.0, 5.0, 2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat4x2 func (mat4x2 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat4x3
+		version 310 es
+		values
+		{
+			input mat4x3 in0	= [ mat4x3(1.0, 3.0, -0.5, -2.0, -3.0, 0.0, -2.5, 2.5, 2.5, -2.5, -1.5, 2.5) | mat4x3(1.0, 2.5, -1.0, -3.0, -1.5, 2.0, -1.5, -1.0, -0.5, -0.5, -0.5, 3.0) | mat4x3(-2.5, -3.5, 3.5, 3.0, 3.5, -0.5, 3.5, 3.0, -2.0, 2.0, 2.5, 1.0) ];
+			output mat4x3 out0	= [ mat4x3(-2.0, -6.0, 1.0, 4.0, 6.0, -0.0, 5.0, -5.0, -5.0, 5.0, 3.0, -5.0) | mat4x3(-2.0, -5.0, 2.0, 6.0, 3.0, -4.0, 3.0, 2.0, 1.0, 1.0, 1.0, -6.0) | mat4x3(5.0, 7.0, -7.0, -6.0, -7.0, 1.0, -7.0, -6.0, 4.0, -4.0, -5.0, -2.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat4x3 func (mat4x3 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+
+	case mat4
+		version 310 es
+		values
+		{
+			input mat4 in0	= [ mat4(0.0, -1.5, -1.0, -2.0, -3.0, 0.5, -1.5, 2.5, -3.5, 3.0, 1.5, 3.0, 3.0, 3.0, 0.5, -3.5) | mat4(2.0, -2.5, -1.5, 1.0, 0.0, -0.5, 3.5, 1.0, -1.0, -2.0, 2.5, 0.0, 2.0, -1.0, -2.5, 0.5) | mat4(2.5, -2.5, 2.0, 3.0, 2.5, 2.5, -3.5, 1.0, 2.5, -3.5, -1.5, -1.5, 0.0, -0.5, 0.0, 2.0) ];
+			output mat4 out0	= [ mat4(-0.0, 3.0, 2.0, 4.0, 6.0, -1.0, 3.0, -5.0, 7.0, -6.0, -3.0, -6.0, -6.0, -6.0, -1.0, 7.0) | mat4(-4.0, 5.0, 3.0, -2.0, -0.0, 1.0, -7.0, -2.0, 2.0, 4.0, -5.0, -0.0, -4.0, 2.0, 5.0, -1.0) | mat4(-5.0, 5.0, -4.0, -6.0, -5.0, -5.0, 7.0, -2.0, -5.0, 7.0, 3.0, 3.0, -0.0, 1.0, -0.0, -4.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			mat4 func (mat4 a)
+			{
+				return -2.0*a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case float_struct
+		version 310 es
+		values
+		{
+			input vec3 in0		= [ vec3(0.0, 1.0, -2.0) | vec3(2.0, 2.5, -4.0) ];
+			output float out0	= [ 1.0 | -0.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct Pos { float a, b, c; };
+
+			float func (Pos p)
+			{
+				return -(p.a + p.b + p.c);
+			}
+
+			void main()
+			{
+				Pos p = Pos(in0.x, in0.y, in0.z);
+				out0 = func(p);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct_struct
+		version 310 es
+		values
+		{
+			input vec3 in0		= [ vec3(0.0, 1.0, -2.0) | vec3(2.0, 2.5, -4.0) ];
+			output float out0	= [ 1.0 | -0.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct Pos { float a, b, c; };
+
+			Pos func (Pos p)
+			{
+				return Pos(-p.a, -p.b, -p.c);
+			}
+
+			void main()
+			{
+				Pos p = Pos(in0.x, in0.y, in0.z);
+				p = func(p);
+				out0 = p.a + p.b + p.c;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case struct_nested_struct
+		version 310 es
+		values
+		{
+			input vec3 in0		= [ vec3(0.0, 1.0, -2.0) | vec3(2.0, 2.5, -4.0) ];
+			output float out0	= [ 1.0 | -0.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct Pos { float a, b, c; };
+			struct Line { Pos start, end; };
+
+			Line func (Pos p)
+			{
+				return Line(p, Pos(-p.a, -p.b, -p.c));
+			}
+
+			float sum (Pos p)
+			{
+				return (p.a + p.b + p.c);
+			}
+
+			void main()
+			{
+				Pos p = Pos(in0.x, in0.y, in0.z);
+				Line line = func(p);
+				out0 = sum(line.start) + (2.0 * sum(line.end));
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # datatypes
+
+group qualifiers "Function Parameter Qualifiers"
+
+	case in_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (in float a)
+			{
+				a = -a;
+				return 2.0 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = in0;
+				float g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out float a)
+			{
+				a = -1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout float a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_lowp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (in lowp float a)
+			{
+				a = -a;
+				return 2.0 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = in0;
+				float g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_lowp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out lowp float a)
+			{
+				a = -1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_lowp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout lowp float a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_highp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (in highp float a)
+			{
+				a = -a;
+				return 2.0 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = in0;
+				float g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_highp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out highp float a)
+			{
+				a = -1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_highp_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout highp float a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = 1.0;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case const_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (const float a)
+			{
+				float b = -a;
+				return 2.0 * b;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = in0;
+				float g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case const_in_float
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (const in float a)
+			{
+				float b = -a;
+				return 2.0 * b;
+			}
+
+			void main()
+			{
+				${SETUP}
+				float f = in0;
+				float g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 4 ];
+			output int out0		= [ 0 | -1 | 2 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (in int a)
+			{
+				a = -a;
+				return 2 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = in0;
+				int g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out int a)
+			{
+				a = -1;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout int a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_lowp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 4 ];
+			output int out0		= [ 0 | -1 | 2 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (in lowp int a)
+			{
+				a = -a;
+				return 2 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = in0;
+				int g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_lowp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out lowp int a)
+			{
+				a = -1;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_lowp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout lowp int a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_highp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 4 ];
+			output int out0		= [ 0 | -1 | 2 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (in highp int a)
+			{
+				a = -a;
+				return 2 * a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = in0;
+				int g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_highp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (out highp int a)
+			{
+				a = -1;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_highp_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			void func (inout highp int a)
+			{
+				a = -a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = 1;
+				func(f);
+				out0 = f * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case const_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 4 ];
+			output int out0		= [ 0 | -1 | 2 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (const int a)
+			{
+				int b = -a;
+				return 2 * b;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = in0;
+				int g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case const_in_int
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 4 ];
+			output int out0		= [ 0 | -1 | 2 | -4 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (const in int a)
+			{
+				int b = -a;
+				return 2 * b;
+			}
+
+			void main()
+			{
+				${SETUP}
+				int f = in0;
+				int g = func(f);
+				out0 = f + g;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case in_bool
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (in bool a)
+			{
+				a = !a;
+				return a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool f = in0;
+				bool g = func(f);
+				out0 = (f != g);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case out_bool
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void func (out bool a)
+			{
+				a = false;
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool f = true;
+				func(f);
+				out0 = (in0 == f);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case inout_bool
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			void func (inout bool a)
+			{
+				a = !a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool f = true;
+				func(f);
+				out0 = (in0 == f);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case const_bool
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ true | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (const bool a)
+			{
+				bool b = !a;
+				return b;
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool f = in0;
+				bool g = func(f);
+				out0 = (f != g);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # qualifiers
+
+group declarations "Function Declarations"
+
+	case basic
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (void);
+
+			float func (void)
+			{
+				return -1.0;
+			}
+
+			void main()
+			{
+				out0 = func() * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case basic_arg
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float f);
+
+			float func (float f)
+			{
+				return -f;
+			}
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case define_after_use
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (void);
+
+			void main()
+			{
+				out0 = func() * in0;
+				${OUTPUT}
+			}
+
+			float func (void)
+			{
+				return -1.0;
+			}
+		""
+	end
+
+	case double_declare
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (void);
+
+			float func (void);
+
+			float func (void)
+			{
+				return -1.0;
+			}
+
+			void main()
+			{
+				out0 = func() * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case declare_after_define
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (void)
+			{
+				return -1.0;
+			}
+
+			float func (void);
+
+			void main()
+			{
+				out0 = func() * in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case void_vs_no_void
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func ();
+
+			void main()
+			{
+				out0 = func() * in0;
+				${OUTPUT}
+			}
+
+			float func (void)
+			{
+				return -1.0;
+			}
+		""
+	end
+
+	case in_vs_no_in
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float f);
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+
+			float func (in float f)
+			{
+				return -f;
+			}
+		""
+	end
+
+	case default_vs_explicit_precision
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float f);
+
+			void main()
+			{
+				out0 = func(in0);
+				${OUTPUT}
+			}
+
+			float func (mediump float f)
+			{
+				return -f;
+			}
+		""
+	end
+
+
+end # declarations
+
+group overloading "Function Overloading"
+
+	case user_func_arg_type_simple
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				return -a;
+			}
+
+			int func (int a)
+			{
+				return -a;
+			}
+
+			void main()
+			{
+				out0 = func(in0) * float(func(-1));
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arg_float_types
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (float a) { return -a; }
+			vec2 func (vec2 a) { return a.yx; }
+			vec3 func (vec3 a) { return a.xxx; }
+			vec4 func (vec4 a) { return a.wwww; }
+
+			void main()
+			{
+				out0 = func(func(func(func(vec4(in0)).xyz).xy).x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arg_int_types
+		version 310 es
+		values
+		{
+			input int in0		= [ 0 | 1 | -2 | 6 ];
+			output int out0		= [ 0 | -1 | 2 | -6 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (int a) { return -a; }
+			ivec2 func (ivec2 a) { return a.yx; }
+			ivec3 func (ivec3 a) { return a.xxx; }
+			ivec4 func (ivec4 a) { return a.wwww; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(func(func(func(ivec4(in0)).xyz).xy).x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arg_bool_types
+		version 310 es
+		values
+		{
+			input bool in0		= [ true | false ];
+			output bool out0	= [ false | true ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (bool a) { return !a; }
+			bvec2 func (bvec2 a) { return a.yx; }
+			bvec3 func (bvec3 a) { return a.xxx; }
+			bvec4 func (bvec4 a) { return a.wwww; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(func(func(func(bvec4(in0)).xyz).xy).x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arg_basic_types
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			float func (float a) { return -a; }
+			vec2 func (vec2 a) { return a.yx; }
+			vec3 func (vec3 a) { return a.xxx; }
+			vec4 func (vec4 a) { return a.wwww; }
+			int func (int a) { return -a; }
+			ivec2 func (ivec2 a) { return a.yx; }
+			ivec3 func (ivec3 a) { return a.xxx; }
+			ivec4 func (ivec4 a) { return a.wwww; }
+			bool func (bool a) { return !a; }
+			bvec2 func (bvec2 a) { return a.yx; }
+			bvec3 func (bvec3 a) { return a.xxx; }
+			bvec4 func (bvec4 a) { return a.wwww; }
+
+			void main()
+			{
+				${SETUP}
+				if (func(func(bvec4(false)).x))
+					out0 = func(in0) * float(func(-1));
+				else
+					out0 = float(func(func(ivec4(func(func(func(vec4(0.5)).xyz).xy).xxxx)).xy).x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arg_complex_types
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			struct Pos { float a, b, c; };
+			struct Line { Pos start, end; };
+
+			float func (float a) { return -a; }
+			float func (float a[4]) { return a[0] + a[3]; }
+			vec2 func (vec2 a) { return a.yx; }
+			vec3 func (vec3 a) { return a.xxx; }
+			vec4 func (vec4 a) { return a.wwww; }
+			vec4 func (vec4 a[4]) { return a[1] + a[2]; }
+			int func (int a) { return -a; }
+			ivec2 func (ivec2 a) { return a.yx; }
+			ivec3 func (ivec3 a) { return a.xxx; }
+			ivec4 func (ivec4 a) { return a.wwww; }
+			bool func (bool a) { return !a; }
+			bvec2 func (bvec2 a) { return a.yx; }
+			bvec3 func (bvec3 a) { return a.xxx; }
+			bvec4 func (bvec4 a) { return a.wwww; }
+			Pos func (Pos a) { return a; }
+			Line func (Line a) { return Line(a.end, a.start); }
+
+			void main()
+			{
+				${SETUP}
+				float arr[4];
+				vec4 arr2[4];
+				out0 = func(arr) + func(arr2).x;
+				if (func(func(bvec4(false)).x))
+					out0 = func(in0) * float(func(-1));
+				else
+					out0 = float(func(func(ivec4(func(func(func(vec4(0.5)).xyz).xy).xxxx)).xy).x);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case user_func_arguments
+		version 310 es
+		values
+		{
+			input float in0		= [ 0.0 | 1.0 | -2.0 | 2.5 ];
+			output float out0	= [ 0.0 | -1.0 | 2.0 | -2.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				return -a;
+			}
+
+			float func (float a, float b)
+			{
+				return a * b;
+			}
+
+			void main()
+			{
+				out0 = func(in0) * func(-0.5, -2.0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case array_size
+		version 310 es
+		values
+		{
+			output float out0	= [ 1.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float f[3])
+			{
+				return f[0];
+			}
+
+			float func (float f[4])
+			{
+				return f[1];
+			}
+
+			void main ()
+			{
+				${SETUP}
+				float[4] x = float[4] (-1.0, 1.0, 0.0, 0.0);
+				out0 = func(x);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # overloading
+
+group array_arguments "Arrays as Arguments"
+
+	case local_in_float
+		version 310 es
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (in float a[4])
+			{
+				a[0] = -1.0;
+				a[2] = -4.0;
+				a[3] = -3.0 * a[1];
+				return a[0];
+			}
+
+			void main()
+			{
+				float arr[4];
+				arr[0] = in0.x;
+				arr[1] = in0.y;
+				arr[2] = in0.z;
+				arr[3] = in0.w;
+				float f = func(arr);
+				out0 = f * vec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case global_in_float
+		version 310 es
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (in float a[4])
+			{
+				a[0] = -1.0;
+				a[2] = -4.0;
+				a[3] = -3.0 * a[1];
+				return a[0];
+			}
+
+			float arr[4];
+
+			void main()
+			{
+				arr[0] = in0.x;
+				arr[1] = in0.y;
+				arr[2] = in0.z;
+				arr[3] = in0.w;
+				float f = func(arr);
+				out0 = f * vec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case local_in_int
+		version 310 es
+		values
+		{
+			input ivec4 in0		= [ ivec4(0, 1, 2, -4) | ivec4(-7, -11, 13, 19) ];
+			output ivec4 out0	= [ ivec4(0, -1, -2, 4) | ivec4(7, 11, -13, -19) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (in int a[4])
+			{
+				a[0] = -1;
+				a[2] = -4;
+				a[3] = -3 * a[1];
+				return a[0];
+			}
+
+			void main()
+			{
+				${SETUP}
+				int arr[4];
+				arr[0] = in0.x;
+				arr[1] = in0.y;
+				arr[2] = in0.z;
+				arr[3] = in0.w;
+				int f = func(arr);
+				out0 = f * ivec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case global_in_int
+		version 310 es
+		values
+		{
+			input ivec4 in0		= [ ivec4(0, 1, 2, 4) | ivec4(-7, -11, 13, 19) ];
+			output ivec4 out0	= [ ivec4(0, -1, -2, -4) | ivec4(7, 11, -13, -19) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int func (in int a[4])
+			{
+				a[0] = -1;
+				a[2] = -4;
+				a[3] = -3 * a[1];
+				return a[0];
+			}
+
+			int arr[4];
+
+			void main()
+			{
+				${SETUP}
+				arr[0] = in0.x;
+				arr[1] = in0.y;
+				arr[2] = in0.z;
+				arr[3] = in0.w;
+				int f = func(arr);
+				out0 = f * ivec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+
+		""
+	end
+
+	case local_in_bool
+		version 310 es
+		values
+		{
+			input bvec4 in0		= [ bvec4(true, true, false, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0	= [ bvec4(false, false, true, false) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (in bool a[4])
+			{
+				a[0] = false;
+				a[2] = true;
+				a[3] = !a[1];
+				return a[0];
+			}
+
+			void main()
+			{
+				${SETUP}
+				bool arr[4];
+				arr[0] = !in0.x;
+				arr[1] = !in0.y;
+				arr[2] = !in0.z;
+				arr[3] = !in0.w;
+				func(arr);
+				out0 = bvec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case global_in_bool
+		version 310 es
+		values
+		{
+			input bvec4 in0		= [ bvec4(true, true, false, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0	= [ bvec4(false, false, true, false) | bvec4(true, true, true, true) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			bool func (in bool a[4])
+			{
+				a[0] = false;
+				a[2] = true;
+				a[3] = !a[1];
+				return a[0];
+			}
+
+			bool arr[4];
+
+			void main()
+			{
+				${SETUP}
+				arr[0] = !in0.x;
+				arr[1] = !in0.y;
+				arr[2] = !in0.z;
+				arr[3] = !in0.w;
+				func(arr);
+				out0 = bvec4(arr[0], arr[1], arr[2], arr[3]);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case test_helpers
+		version 310 es
+		desc "Check that helper functions are supported properly."
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output float out0	= [ 1.0 | 1.0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec4 get (in float arr[4]);
+			void set (out float arr[4], vec4 val);
+			void negate (inout float arr[4]);
+			bool test (in float arr[4], vec4 ref);
+			bool isEqual (in float a[4], in float b[4]);
+
+			void main()
+			{
+				float arr[4];
+				set(arr, in0);
+				negate(arr);
+				out0 = float(test(arr, -in0));
+				${OUTPUT}
+			}
+
+			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+		""
+	end
+
+	case copy_local_in_on_call
+		version 310 es
+		desc "Check that local 'in' arguments are copied on call and don't alias."
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec4 get (in float arr[4]);
+			void set (out float arr[4], vec4 val);
+			void negate (inout float arr[4]);
+			bool test (in float arr[4], vec4 ref);
+			bool isEqual (in float a[4], in float b[4]);
+
+			float func (in float a[4], in float b[4])
+			{
+				a[0] = 2.123;
+				a[2] = -4.123;
+				return isEqual(a, b) ? 1.0 : -1.0;
+			}
+
+			void main()
+			{
+				float arr[4];
+				set(arr, in0);
+				out0 = in0 * func(arr, arr);
+				${OUTPUT}
+			}
+
+			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+		""
+	end
+
+	case copy_global_in_on_call
+		version 310 es
+		desc "Check that global 'in' arguments are copied on call and don't alias."
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec4 get (in float arr[4]);
+			void set (out float arr[4], vec4 val);
+			void negate (inout float arr[4]);
+			bool test (in float arr[4], vec4 ref);
+			bool isEqual (in float a[4], in float b[4]);
+
+			float func (in float a[4], in float b[4])
+			{
+				a[0] = 2.123;
+				a[2] = -4.123;
+				return isEqual(a, b) ? 1.0 : -1.0;
+			}
+
+			float arr[4];
+
+			void main()
+			{
+				set(arr, in0);
+				out0 = in0 * func(arr, arr);
+				${OUTPUT}
+			}
+
+			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+		""
+	end
+
+	case copy_local_inout_on_call
+		version 310 es
+		desc "Check that local 'in' arguments are copied on call and don't alias."
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec4 get (in float arr[4]);
+			void set (out float arr[4], vec4 val);
+			void negate (inout float arr[4]);
+			bool test (in float arr[4], vec4 ref);
+			bool isEqual (in float a[4], in float b[4]);
+
+			float func (inout float a[4], inout float b[4])
+			{
+				negate(a);
+				return isEqual(a, b) ? 1.0 : -1.0;
+			}
+
+			void main()
+			{
+				float arr[4];
+				set(arr, in0);
+				float m = func(arr, arr); // returns -1.0
+				float n = float(test(arr, in0) || test(arr, -in0));
+				out0 = in0 * m * n;
+				${OUTPUT}
+			}
+
+			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+		""
+	end
+
+	case copy_global_inout_on_call
+		version 310 es
+		desc "Check that global 'in' arguments are copied on call and don't alias."
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, 2.0, -4.0) | vec4(-7.5, 12.125, -0.25, 16.0) ];
+			output vec4 out0	= [ vec4(0.0, -1.0, -2.0, 4.0) | vec4(7.5, -12.125, 0.25, -16.0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			vec4 get (in float arr[4]);
+			void set (out float arr[4], vec4 val);
+			void negate (inout float arr[4]);
+			bool test (in float arr[4], vec4 ref);
+			bool isEqual (in float a[4], in float b[4]);
+
+			float func (in float a[4], in float b[4])
+			{
+				negate(a);
+				return isEqual(a, b) ? 1.0 : -1.0;
+			}
+
+			float arr[4];
+
+			void main()
+			{
+				set(arr, in0);
+				float m = func(arr, arr); // returns -1.0
+				float n = float(test(arr, in0) || test(arr, -in0));
+				out0 = in0 * m * n;
+				${OUTPUT}
+			}
+
+			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+		""
+	end
+
+#			vec4 get (in float arr[4]);
+#			void set (out float arr[4], vec4 val);
+#			void negate (inout float arr[4]);
+#			bool test (in float arr[4], vec4 ref);
+#			bool isEqual (in float a[4], in float b[4]);
+
+#			float absDiff (vec4 a, vec4 b) { vec4 d = abs(a - b); return max(max(d.x, d.y), max(d.z, d.w)); }
+#			vec4 get (in float arr[4]) { return vec4(arr[0], arr[1], arr[2], arr[3]); }
+#			void set (out float arr[4], vec4 val) { arr[0] = val.x; arr[1] = val.y; arr[2] = val.z; arr[3] = val.w; }
+#			void negate (inout float arr[4]) { set(arr, -get(arr)); }
+#			bool test (in float arr[4], vec4 ref) { return (absDiff(get(arr), ref) < 0.1); }
+#			bool isEqual (in float a[4], in float b[4]) { return (absDiff(get(a), get(b)) < 0.1); }
+
+end # array_arguments
+
+#group qualifiers "Function Parameter Qualifiers"
+#
+#end # qualifiers
+
+group control_flow "Control Flow In Functions"
+
+	case simple_return
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				return -a;
+				a = a * -1.0;
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_in_if
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				if (a != 0.0)
+					return -a;
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_in_else
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				if (a == 0.0)
+					return 1.0;
+				else
+					return -a;
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_in_loop
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				while (a < 100.0)
+					return -a;
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_in_loop_if
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				while (a < 100.0)
+				{
+					a = -a;
+					if (a != 0.0)
+						return a;
+					else
+						return -1.0;
+				}
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_after_loop
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				for (int i = 0; i < 5; i++)
+					a = -a;
+				return a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_after_break
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				for (int i = 0; i < 6; i++)
+				{
+					a = -a;
+					if (i == 4)
+						break;
+				}
+				return a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_after_continue
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				for (int i = 0; i < 6; i++)
+				{
+					if (i == 4)
+						continue;
+					a = -a;
+				}
+				return a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_in_nested_loop
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				for (int i = 0; i < 6; i++)
+				{
+					a = -a;
+					for (int j = 0; j < 4; j++)
+					{
+						a = -a;
+						if (i == 1)
+							return a;
+					}
+					if (i == 4)
+						return 1.0;
+				}
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case return_after_loop_sequence
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				int i;
+				for (i = 0; i < 6; i++) // negate a
+				{
+					a = -a;
+					if (i == 4)
+						a = -a;
+				}
+
+				for (; i < 10; i++) // keep a
+				{
+					if (i == 8)
+						continue;
+					else if (i == 9)
+						break;
+					a = -a;
+				}
+
+				return a;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mixed_return_break_continue
+		version 310 es
+		values
+		{
+			input float in0		= [ -0.5 | 1.5 ];
+			output float out0	= [ 0.5 | -1.5 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float func (float a)
+			{
+				int i;
+				for (i = 0; i < 6; i++)
+				{
+					if (i == 0)
+						continue;
+					else if (i == 1)
+					{
+					}
+					else if (i == 3)
+						break;
+					else
+						return a;
+					a = -a;
+				}
+
+				return 1.0;
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # control_flow
+
+group misc "Miscellaneous"
+
+	case multi_arg_float
+		version 310 es
+		values
+		{
+			input vec4 in0		= [ vec4(0.0, 1.0, -2.0, 0.5) | vec4(2.0, 2.5, 4.0, -7.0) ];
+			output float out0	= [ 0.5 | -1.5 ]; # -sum(in0)
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			float sum(vec4 v) { return (v.x + v.y + v.z + v.w); }
+
+			float func (float a, vec3 b, vec2 c, vec2 d, vec4 e)
+			{
+				return -sum(vec4(a, b) + vec4(c, d)) + sum(e);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0.y, in0.xzw, in0.wz, in0.yx, in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case multi_arg_int
+		version 310 es
+		values
+		{
+			input ivec4 in0		= [ ivec4(-1, 0, 2, 2) | ivec4(1, 4, -8, 2) ];
+			output int out0		= [ -3 | 1 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			precision mediump int;
+			${DECLARATIONS}
+
+			int sum(ivec4 v) { return (v.x + v.y + v.z + v.w); }
+
+			int func (int a, ivec3 b, ivec2 c, ivec2 d, ivec4 e)
+			{
+				return -sum(ivec4(a, b) + ivec4(c, d)) + sum(e);
+			}
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0.y, in0.xzw, in0.wz, in0.yx, in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case argument_eval_order_1
+		version 310 es
+		values
+		{
+			input int in0	= [  0 | 1 | 3 | 5 ];
+			output int out0	= [ -1 | 5 | 11 | 17 ];
+		}
+
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			int func (float a, int b, bool c, int d)
+			{
+				if (c)
+					return b + int(a) + d;
+				else
+					return -1;
+			}
+
+			void main ()
+			{
+				${SETUP}
+				float v0 = float(in0);
+				int v1 = in0;
+				out0 = func((v0 += 1.0), v1++, (v0 > 1.5), v1);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case argument_eval_order_2
+		version 310 es
+		values
+		{
+			input int in0	= [ 0 | -1 | 3 | 5 ];
+			output int out0	= [ 3 | -1 | 9 | 13 ];
+		}
+
+		both ""
+			#version 310 es
+			precision highp float;
+			${DECLARATIONS}
+
+			int g;
+
+			int modG (int v)
+			{
+				g += v;
+				return v;
+			}
+
+			int func (float a, int b, bool c, int d)
+			{
+				if (c)
+					return b + int(a) + d;
+				else
+					return -1;
+			}
+
+			void main ()
+			{
+				${SETUP}
+				out0 = func(float(g = in0), modG(2), --g > 0, g);
+				${OUTPUT}
+			}
+		""
+	end
+
+end # misc
diff --git a/external/vulkancts/data/vulkan/glsl/es310/linkage.test b/external/vulkancts/data/vulkan/glsl/es310/linkage.test
new file mode 100644
index 0000000..21c62b0
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/linkage.test
@@ -0,0 +1,1885 @@
+
+# Varying tests
+group varying "Varying linkage"
+
+	# Linking rules
+	group rules "Varying linking rules"
+		# not declared in vertex shader, declared in fragment shader
+		case fragment_declare
+			version 310 es
+			desc "varying declared in fragment shader, no reference in vertex shader"
+			values { output float out0 = 1.0; }
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				void main()
+				{
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				layout(location = 0) in mediump float var;
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					out0 = 1.0;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		# declared in vertex shader, no reference in frag shader
+		case vertex_declare
+			version 310 es
+			desc "varying declared in vertex shader, no reference in fragment shader"
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# declared in vertex shader, declared in frag shader
+		case both_declare
+			version 310 es
+			desc "varying declared in both vertex and fragment shader, but not used"
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				layout(location = 0) in mediump float var;
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# declared in vertex shader, static use in frag shader
+		case vertex_declare_fragment_use
+			version 310 es
+			desc "varying declared in both shaders, statically used in fragment shader"
+			values { uniform bool u_false = false; }
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				layout(location = 0) in mediump float var;
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					if (u_false)
+						${FRAG_COLOR} = vec4(var);
+					else
+						${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# static use in vertex shader, no reference in fragment shader
+		case vertex_use_fragment_declare
+			version 310 es
+			desc "varying declared and statically used in vertex shader, no reference in fragment shader"
+			values { uniform bool u_false = false; }
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					if (u_false)
+						var = 1.0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# static use in vertex shader, declared in fragment shader
+		case vertex_use_declare_fragment
+			version 310 es
+			desc "varying declared and statically used in vertex shader, only declared in fragment shader"
+			values { uniform bool u_false = false; }
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					if (u_false)
+						var = 1.0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				layout(location = 0) in mediump float var;
+				${FRAGMENT_DECLARATIONS}
+				void main()
+				{
+					${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# static use in vertex shader, used in fragment shader
+		case vertex_use_fragment_use
+			version 310 es
+			desc "varying statically used in both vertex and fragment shader"
+			values { uniform bool u_false = false; }
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					if (u_false)
+						var = 1.0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mediump float var;
+				void main()
+				{
+					if (u_false)
+						${FRAG_COLOR} = vec4(var);
+					else
+						${FRAG_COLOR} = vec4(1.0);
+				}
+			""
+		end
+
+		# differing precision tests
+		case differing_precision_1
+			version 310 es
+			desc "varying declared as highp in vertex shader, but mediump in fragment shader"
+			values
+			{
+				input float in0		= [ -1.25 | -25.55 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.55 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out highp float var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mediump float var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		# differing precision tests
+		case differing_precision_2
+			version 310 es
+			desc "varying declared as highp in vertex shader, but lowp in fragment shader"
+			values
+			{
+				input float in0		= [ -1.25 | -25.56 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.56 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out highp vec2 var;
+				void main()
+				{
+					var = vec2(in0, 2.0*in0);
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in lowp vec2 var;
+				void main()
+				{
+					out0 = var.y - var.x;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		# differing precision tests
+		case differing_precision_3
+			version 310 es
+			desc "varying declared as lowp in vertex shader, but mediump in fragment shader"
+			values
+			{
+				input float in0		= [ -1.25 | -25.0 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.0 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out lowp vec4 var;
+				void main()
+				{
+					var = vec4(in0, 2.0*in0, -in0, -in0);
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mediump vec4 var;
+				void main()
+				{
+					out0 = var.x + var.y + var.z + var.w;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		# different interpolation
+		case differing_interpolation_2
+			version 310 es
+			desc "varying interpolation different (smooth vs. centroid)"
+			values
+			{
+				input float in0		= [ -1.25 | -25.0 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.0 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) smooth out mediump float var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) centroid in mediump float var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+	end
+
+	group basic_types "Basic varying types"
+		case float
+			version 310 es
+			desc "varying of type float"
+			values
+			{
+				input float in0		= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump float var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in float var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec2
+			version 310 es
+			desc "varying of type vec2"
+			values
+			{
+				input vec2 in0		= [ vec2(-1.25, 1.25) | vec2(-25.65, -7.25) | vec2(0.0, 1.0) | vec2(2.25, 2.25) | vec2(3.4, 9.5) | vec2(16.0, 32.0) ];
+				output vec2 out0	= [ vec2(-1.25, 1.25) | vec2(-25.65, -7.25) | vec2(0.0, 1.0) | vec2(2.25, 2.25) | vec2(3.4, 9.5) | vec2(16.0, 32.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump vec2 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in vec2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec3
+			version 310 es
+			desc "varying of type vec3"
+			values
+			{
+				input vec3 in0		= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+				output vec3 out0	= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump vec3 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in vec3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec4
+			version 310 es
+			desc "varying of type vec4"
+			values
+			{
+				input vec4 in0		= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+				output vec4 out0	= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump vec4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in vec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2
+			version 310 es
+			desc "varying of type mat2"
+			values
+			{
+				input mat2 in0		= [ mat2(1.0, 1.0, 1.0, 1.0) | mat2(-1.25, 1.25, -9.5, -12.2) | mat2(-25.65, -7.25, 14.21, -77.7) | mat2(0.0, 1.0, -1.0, 2.0) | mat2(2.25, 2.25, 22.5, 225.0) | mat2(3.4, 9.5, 19.5, 29.5) | mat2(16.0, 32.0, -64.0, -128.0) ];
+				output mat2 out0	= [ mat2(1.0, 1.0, 1.0, 1.0) | mat2(-1.25, 1.25, -9.5, -12.2) | mat2(-25.65, -7.25, 14.21, -77.7) | mat2(0.0, 1.0, -1.0, 2.0) | mat2(2.25, 2.25, 22.5, 225.0) | mat2(3.4, 9.5, 19.5, 29.5) | mat2(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat2 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2x3
+			version 310 es
+			desc "varying of type mat2x3"
+			values
+			{
+				input mat2x3 in0	= [ mat2x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+				output mat2x3 out0	= [ mat2x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat2x3 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat2x3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2x4
+			version 310 es
+			desc "varying of type mat2x4"
+			values
+			{
+				input mat2x4 in0	= [ mat2x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+				output mat2x4 out0	= [ mat2x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat2x4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat2x4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3x2
+			version 310 es
+			desc "varying of type mat3x2"
+			values
+			{
+				input mat3x2 in0	= [ mat3x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+				output mat3x2 out0	= [ mat3x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat3x2 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat3x2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3
+			version 310 es
+			desc "varying of type mat3"
+			values
+			{
+				input mat3 in0		= [ mat3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 9.9) | mat3(0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0, -9.9) | mat3(3.4, 9.5, 19.5, 29.5, 16.0, 32.0, -64.0, -128.0, 256.0) ];
+				output mat3 out0	= [ mat3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 9.9) | mat3(0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0, -9.9) | mat3(3.4, 9.5, 19.5, 29.5, 16.0, 32.0, -64.0, -128.0, 256.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat3 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3x4
+			version 310 es
+			desc "varying of type mat3x4"
+			values
+			{
+				input mat3x4 in0	= [ mat3x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+				output mat3x4 out0	= [ mat3x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat3x4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat3x4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4x2
+			version 310 es
+			desc "varying of type mat4x2"
+			values
+			{
+				input mat4x2 in0	= [ mat4x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+				output mat4x2 out0	= [ mat4x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat4x2 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat4x2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4x3
+			version 310 es
+			desc "varying of type mat4x3"
+			values
+			{
+				input mat4x3 in0	= [ mat4x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+				output mat4x3 out0	= [ mat4x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat4x3 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat4x3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4
+			version 310 es
+			desc "varying of type mat4"
+			values
+			{
+				input mat4 in0		= [ mat4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0) ];
+				output mat4 out0	= [ mat4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out mediump mat4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in mat4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case int
+			version 310 es
+			desc "varying of type int"
+			values
+			{
+				input int in0		= [ -1 | -25 | 1 | 2 | 3 | 16 ];
+				output int out0		= [ -1 | -25 | 1 | 2 | 3 | 16 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump int var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in int var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec2
+			version 310 es
+			desc "varying of type ivec2"
+			values
+			{
+				input ivec2 in0		= [ ivec2(-1, 1) | ivec2(-25, 25) | ivec2(1, 1) | ivec2(2, 3) | ivec2(16, 17) ];
+				output ivec2 out0	= [ ivec2(-1, 1) | ivec2(-25, 25) | ivec2(1, 1) | ivec2(2, 3) | ivec2(16, 17) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump ivec2 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in ivec2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec3
+			version 310 es
+			desc "varying of type ivec3"
+			values
+			{
+				input ivec3 in0		= [ ivec3(-1, 1, -2) | ivec3(-25, 25, -3) | ivec3(1, 1, 1) | ivec3(2, 3, 4) | ivec3(16, 17, 18) ];
+				output ivec3 out0	= [ ivec3(-1, 1, -2) | ivec3(-25, 25, -3) | ivec3(1, 1, 1) | ivec3(2, 3, 4) | ivec3(16, 17, 18) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump ivec3 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in ivec3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec4
+			version 310 es
+			desc "varying of type ivec4"
+			values
+			{
+				input ivec4 in0		= [ ivec4(-1, 1, -2, 2) | ivec4(-25, 25, -3, 3) | ivec4(1, 1, 1, 1) | ivec4(2, 3, 4, 5) | ivec4(16, 17, 18, 19) ];
+				output ivec4 out0	= [ ivec4(-1, 1, -2, 2) | ivec4(-25, 25, -3, 3) | ivec4(1, 1, 1, 1) | ivec4(2, 3, 4, 5) | ivec4(16, 17, 18, 19) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump ivec4 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in ivec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uint
+			version 310 es
+			desc "varying of type int"
+			values
+			{
+				input uint in0			= [ 1 | 2 | 3 | 16 ];
+				output uint out0		= [ 1 | 2 | 3 | 16 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump uint var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in uint var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec2
+			version 310 es
+			desc "varying of type uvec2"
+			values
+			{
+				input uvec2 in0		= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) ];
+				output uvec2 out0	= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump uvec2 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in uvec2 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec3
+			version 310 es
+			desc "varying of type uvec3"
+			values
+			{
+				input uvec3 in0		= [ uvec3(1, 1, 2) | uvec3(25, 25, 3) | uvec3(1, 1, 1) | uvec3(2, 3, 4) | uvec3(16, 17, 18) ];
+				output uvec3 out0	= [ uvec3(1, 1, 2) | uvec3(25, 25, 3) | uvec3(1, 1, 1) | uvec3(2, 3, 4) | uvec3(16, 17, 18) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump uvec3 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in uvec3 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec4
+			version 310 es
+			desc "varying of type uvec4"
+			values
+			{
+				input uvec4 in0		= [ uvec4(1, 1, 2, 2) | uvec4(25, 25, 3, 3) | uvec4(1, 1, 1, 1) | uvec4(2, 3, 4, 5) | uvec4(16, 17, 18, 19) ];
+				output uvec4 out0	= [ uvec4(1, 1, 2, 2) | uvec4(25, 25, 3, 3) | uvec4(1, 1, 1, 1) | uvec4(2, 3, 4, 5) | uvec4(16, 17, 18, 19) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump uvec4 var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in uvec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+	end
+
+	group struct "Structure varyings"
+		case float
+			version 310 es
+			desc "varying of type float inside struct"
+			values
+			{
+				input float in0		= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump float a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump float a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec2
+			version 310 es
+			desc "varying of type vec2 inside struct"
+			values
+			{
+				input vec2 in0		= [ vec2(-1.25, 1.25) | vec2(-25.65, -7.25) | vec2(0.0, 1.0) | vec2(2.25, 2.25) | vec2(3.4, 9.5) | vec2(16.0, 32.0) ];
+				output vec2 out0	= [ vec2(-1.25, 1.25) | vec2(-25.65, -7.25) | vec2(0.0, 1.0) | vec2(2.25, 2.25) | vec2(3.4, 9.5) | vec2(16.0, 32.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump vec2 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump vec2 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec3
+			version 310 es
+			desc "varying of type vec3 inside struct"
+			values
+			{
+				input vec3 in0		= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+				output vec3 out0	= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump vec3 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump vec3 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case vec4
+			version 310 es
+			desc "varying of type vec4 inside struct"
+			values
+			{
+				input vec4 in0		= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+				output vec4 out0	= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump vec4 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump vec4 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2
+			version 310 es
+			desc "varying of type mat2 inside struct"
+			values
+			{
+				input mat2 in0		= [ mat2(1.0, 1.0, 1.0, 1.0) | mat2(-1.25, 1.25, -9.5, -12.2) | mat2(-25.65, -7.25, 14.21, -77.7) | mat2(0.0, 1.0, -1.0, 2.0) | mat2(2.25, 2.25, 22.5, 225.0) | mat2(3.4, 9.5, 19.5, 29.5) | mat2(16.0, 32.0, -64.0, -128.0) ];
+				output mat2 out0	= [ mat2(1.0, 1.0, 1.0, 1.0) | mat2(-1.25, 1.25, -9.5, -12.2) | mat2(-25.65, -7.25, 14.21, -77.7) | mat2(0.0, 1.0, -1.0, 2.0) | mat2(2.25, 2.25, 22.5, 225.0) | mat2(3.4, 9.5, 19.5, 29.5) | mat2(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat2 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat2 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2x3
+			version 310 es
+			desc "varying of type mat2x3 inside struct"
+			values
+			{
+				input mat2x3 in0	= [ mat2x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+				output mat2x3 out0	= [ mat2x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat2x3 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat2x3 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat2x4
+			version 310 es
+			desc "varying of type mat2x4 inside struct"
+			values
+			{
+				input mat2x4 in0	= [ mat2x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+				output mat2x4 out0	= [ mat2x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat2x4 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat2x4 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3x2
+			version 310 es
+			desc "varying of type mat3x2 inside struct"
+			values
+			{
+				input mat3x2 in0	= [ mat3x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+				output mat3x2 out0	= [ mat3x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat3x2 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat3x2 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3
+			version 310 es
+			desc "varying of type mat3 inside struct"
+			values
+			{
+				input mat3 in0		= [ mat3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 9.9) | mat3(0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0, -9.9) | mat3(3.4, 9.5, 19.5, 29.5, 16.0, 32.0, -64.0, -128.0, 256.0) ];
+				output mat3 out0	= [ mat3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 9.9) | mat3(0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0, -9.9) | mat3(3.4, 9.5, 19.5, 29.5, 16.0, 32.0, -64.0, -128.0, 256.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat3 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat3 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat3x4
+			version 310 es
+			desc "varying of type mat3x4 inside struct"
+			values
+			{
+				input mat3x4 in0	= [ mat3x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+				output mat3x4 out0	= [ mat3x4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat3x4 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat3x4 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4x2
+			version 310 es
+			desc "varying of type mat4x2 inside struct"
+			values
+			{
+				input mat4x2 in0	= [ mat4x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+				output mat4x2 out0	= [ mat4x2(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat4x2 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat4x2 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4x3
+			version 310 es
+			desc "varying of type mat4x3 inside struct"
+			values
+			{
+				input mat4x3 in0	= [ mat4x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+				output mat4x3 out0	= [ mat4x3(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat4x3 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat4x3 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case mat4
+			version 310 es
+			desc "varying of type mat4 inside struct"
+			values
+			{
+				input mat4 in0		= [ mat4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0) ];
+				output mat4 out0	= [ mat4(-1.25, 1.25, -9.5, -12.2, -25.65, -7.25, 14.21, -77.7, 0.0, 1.0, -1.0, 2.0, 2.25, 2.25, 22.5, 225.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump mat4 a; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump mat4 a; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case int
+			version 310 es
+			desc "varying of type int inside struct"
+			values
+			{
+				input int in0		= [ -1 | -25 | 1 | 2 | 3 | 16 ];
+				output int out0		= [ -1 | -25 | 1 | 2 | 3 | 16 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump int a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump int a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec2
+			version 310 es
+			desc "varying of type ivec2 inside struct"
+			values
+			{
+				input ivec2 in0		= [ ivec2(-1, 1) | ivec2(-25, 25) | ivec2(1, 1) | ivec2(2, 3) | ivec2(16, 17) ];
+				output ivec2 out0	= [ ivec2(-1, 1) | ivec2(-25, 25) | ivec2(1, 1) | ivec2(2, 3) | ivec2(16, 17) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump ivec2 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump ivec2 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec3
+			version 310 es
+			desc "varying of type ivec3 inside struct"
+			values
+			{
+				input ivec3 in0		= [ ivec3(-1, 1, -2) | ivec3(-25, 25, -3) | ivec3(1, 1, 1) | ivec3(2, 3, 4) | ivec3(16, 17, 18) ];
+				output ivec3 out0	= [ ivec3(-1, 1, -2) | ivec3(-25, 25, -3) | ivec3(1, 1, 1) | ivec3(2, 3, 4) | ivec3(16, 17, 18) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump ivec3 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump ivec3 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case ivec4
+			version 310 es
+			desc "varying of type ivec4 inside struct"
+			values
+			{
+				input ivec4 in0		= [ ivec4(-1, 1, -2, 2) | ivec4(-25, 25, -3, 3) | ivec4(1, 1, 1, 1) | ivec4(2, 3, 4, 5) | ivec4(16, 17, 18, 19) ];
+				output ivec4 out0	= [ ivec4(-1, 1, -2, 2) | ivec4(-25, 25, -3, 3) | ivec4(1, 1, 1, 1) | ivec4(2, 3, 4, 5) | ivec4(16, 17, 18, 19) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump ivec4 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump ivec4 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uint
+			version 310 es
+			desc "varying of type uint in struct"
+			values
+			{
+				input uint in0			= [ 1 | 2 | 3 | 16 ];
+				output uint out0		= [ 1 | 2 | 3 | 16 ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump uint a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump uint a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec2
+			version 310 es
+			desc "varying of type uvec2 inside struct"
+			values
+			{
+				input uvec2 in0		= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) ];
+				output uvec2 out0	= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump uvec2 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump uvec2 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec3
+			version 310 es
+			desc "varying of type uvec3 inside struct"
+			values
+			{
+				input uvec3 in0		= [ uvec3(1, 1, 2) | uvec3(25, 25, 3) | uvec3(1, 1, 1) | uvec3(2, 3, 4) | uvec3(16, 17, 18) ];
+				output uvec3 out0	= [ uvec3(1, 1, 2) | uvec3(25, 25, 3) | uvec3(1, 1, 1) | uvec3(2, 3, 4) | uvec3(16, 17, 18) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump uvec3 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump uvec3 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case uvec4
+			version 310 es
+			desc "varying of type uvec4 inside struct"
+			values
+			{
+				input uvec4 in0		= [ uvec4(1, 1, 2, 2) | uvec4(25, 25, 3, 3) | uvec4(1, 1, 1, 1) | uvec4(2, 3, 4, 5) | uvec4(16, 17, 18, 19) ];
+				output uvec4 out0	= [ uvec4(1, 1, 2, 2) | uvec4(25, 25, 3, 3) | uvec4(1, 1, 1, 1) | uvec4(2, 3, 4, 5) | uvec4(16, 17, 18, 19) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump uvec4 a; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump uvec4 a; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case float_vec3
+			version 310 es
+			desc "varyings of type float and vec3 inside struct"
+			values
+			{
+				input float in0		= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				input vec3 in1		= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+				output vec3 out1	= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump float a; highp vec3 b; };
+				layout(location = 0) out S var;
+				void main()
+				{
+					var.a = in0;
+					var.b = in1;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump float a; highp vec3 b; };
+				layout(location = 0) in S var;
+				void main()
+				{
+					out0 = var.a;
+					out1 = var.b;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case float_uvec2_vec3
+			version 310 es
+			desc "varyings of type float and vec3 inside struct"
+			values
+			{
+				input float in0		= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				output float out0	= [ -1.25 | -25.65 | 1.0 | 2.25 | 3.4 | 16.0 ];
+				input uvec2 in1		= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) | uvec2(8, 7) ];
+				output uvec2 out1	= [ uvec2(1, 1) | uvec2(25, 25) | uvec2(1, 1) | uvec2(2, 3) | uvec2(16, 17) | uvec2(8, 7) ];
+				input vec3 in2		= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+				output vec3 out2	= [ vec3(-1.25, 1.25, -9.5) | vec3(-25.65, -7.25, 14.21) | vec3(0.0, 1.0, -1.0) | vec3(2.25, 2.25, 22.5) | vec3(3.4, 9.5, 19.5) | vec3(16.0, 32.0, -64.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				struct S { mediump float a; highp uvec2 b; highp vec3 c; };
+				layout(location = 0) flat out S var;
+				void main()
+				{
+					${VERTEX_SETUP}
+					var.a = in0;
+					var.b = in1;
+					var.c = in2;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				struct S { mediump float a; highp uvec2 b; highp vec3 c; };
+				layout(location = 0) flat in S var;
+				void main()
+				{
+					out0 = var.a;
+					out1 = var.b;
+					out2 = var.c;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+	end
+
+	group interpolation "Varying interpolation modes"
+		case smooth
+			version 310 es
+			desc "varying of type vec4"
+			values
+			{
+				input vec4 in0		= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+				output vec4 out0	= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) smooth out mediump vec4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) smooth in vec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case centroid
+			version 310 es
+			desc "varying of type vec4"
+			values
+			{
+				input vec4 in0		= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+				output vec4 out0	= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) centroid out mediump vec4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) centroid in vec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+
+		case flat
+			version 310 es
+			desc "varying of type vec4"
+			values
+			{
+				input vec4 in0		= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+				output vec4 out0	= [ vec4(-1.25, 1.25, -9.5, -12.2) | vec4(-25.65, -7.25, 14.21, -77.7) | vec4(0.0, 1.0, -1.0, 2.0) | vec4(2.25, 2.25, 22.5, 225.0) | vec4(3.4, 9.5, 19.5, 29.5) | vec4(16.0, 32.0, -64.0, -128.0) ];
+			}
+			vertex ""
+				#version 310 es
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) flat out mediump vec4 var;
+				void main()
+				{
+					var = in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) flat in vec4 var;
+				void main()
+				{
+					out0 = var;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+	end
+
+	group usage "Varying usage in shaders"
+		case readback_1
+			version 310 es
+			desc "read back (an already written) varying in the vertex shader"
+			values
+			{
+				input float in0		= [ 1.0 | 0.0 | -2.0 | 10.0 ];
+				output float out0	= [ 3.0 | 0.0 | -6.0 | 30.0 ];
+			}
+			vertex ""
+				#version 310 es
+				precision mediump float;
+				${VERTEX_DECLARATIONS}
+				layout(location = 0) out float var1;
+				layout(location = 1) out float var2;
+
+				void main()
+				{
+					var1 = in0;
+					var2 = var1 + in0;
+					${VERTEX_OUTPUT}
+				}
+			""
+			fragment ""
+				#version 310 es
+				precision mediump float;
+				${FRAGMENT_DECLARATIONS}
+				layout(location = 0) in float var1;
+				layout(location = 1) in float var2;
+
+				void main()
+				{
+					out0 = var1 + var2;
+					${FRAGMENT_OUTPUT}
+				}
+			""
+		end
+	end
+end
diff --git a/external/vulkancts/data/vulkan/glsl/es310/scoping.test b/external/vulkancts/data/vulkan/glsl/es310/scoping.test
new file mode 100644
index 0000000..9fc18a1
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/scoping.test
@@ -0,0 +1,443 @@
+group valid "Valid scoping and name redeclaration cases"
+
+	case local_variable_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = -1;
+
+			void main()
+			{
+				${SETUP}
+				int a = in0;
+
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case block_variable_hides_local_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				${SETUP}
+				int a = in0;
+				{
+					int a = -1;
+				}
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case block_variable_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = -1;
+
+			void main()
+			{
+				${SETUP}
+				{
+					int a = in0;
+
+					out0 = a;
+				}
+				${OUTPUT}
+			}
+		""
+	end
+
+	case for_init_statement_variable_hides_local_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				${SETUP}
+				int a = in0;
+				for (int a = 0; a < 10; a++)
+				{
+				}
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case while_condition_variable_hides_local_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				${SETUP}
+				int a = in0;
+				int i = 0;
+				while (bool a = (i < 1))
+				{
+					i++;
+				}
+				out0 = a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case for_init_statement_variable_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = 5;
+
+			void main()
+			{
+				${SETUP}
+				for (int a = 0; a < 10; a++)
+				{
+				}
+				out0 = in0 + a - 5;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case while_condition_variable_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = 5;
+
+			void main()
+			{
+				${SETUP}
+				int i = 0;
+				while (bool a = (i < 1))
+				{
+					i++;
+				}
+				out0 = in0 + a - 5;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case variable_in_if_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = 1;
+
+			void main()
+			{
+				${SETUP}
+				if (true)
+					int a = 42;
+				out0 = a*in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case variable_from_outer_scope_visible_in_initializer
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			void main()
+			{
+				${SETUP}
+				int a = in0;
+				{
+					int a = a+5, b = a-5;
+					out0 = b;
+					a = 42;
+				}
+				out0 = out0 + a - in0;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case local_int_variable_hides_struct_type
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct S { int val; };
+
+			void main()
+			{
+				${SETUP}
+				int S = S(in0).val;
+				out0 = S;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case local_struct_variable_hides_struct_type
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct S { int val; };
+
+			void main()
+			{
+				${SETUP}
+				S S = S(in0);
+				out0 = S.val;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case local_variable_hides_function
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int foo (int x) { return x; }
+
+			void main()
+			{
+				${SETUP}
+				int foo = in0;
+				out0 = foo;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case function_parameter_hides_global_variable
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int a = -1;
+
+			int func (int a) { return a; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case function_parameter_hides_struct_type
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			struct S { int x; };
+
+			int func (int S) { return S; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case function_parameter_hides_function
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int func (int func) { return func; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case local_variable_in_inner_scope_hides_function_parameter
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+			int func (int inp, int x) { { int x = 5; return inp + x - 5; } }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0, 42);
+				${OUTPUT}
+			}
+		""
+	end
+
+	case redeclare_function
+		version 310 es
+		values
+		{
+			input int in0 = [ 1 | 2 | 3 ];
+			output int out0 = [ 1 | 2 | 3 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+			${DECLARATIONS}
+
+			int func (int x);
+			int func (int);
+			int func (int inp) { return inp; }
+
+			void main()
+			{
+				${SETUP}
+				out0 = func(in0);
+				${OUTPUT}
+			}
+		""
+	end
+
+end
diff --git a/external/vulkancts/data/vulkan/glsl/es310/swizzles.test b/external/vulkancts/data/vulkan/glsl/es310/swizzles.test
new file mode 100644
index 0000000..5a853a6
--- /dev/null
+++ b/external/vulkancts/data/vulkan/glsl/es310/swizzles.test
@@ -0,0 +1,7459 @@
+# WARNING: This file is auto-generated. Do NOT modify it manually, but rather
+# modify the generating script file. Otherwise changes will be lost!
+
+group vector_swizzles "Vector Swizzles"
+
+	case mediump_vec2_x
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_xx
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(-0.5, -0.5) | vec2(-32.0, -32.0) | vec2(-0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_xy
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_yx
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.5, 0.0) | vec2(1.25, 1.0) | vec2(-2.25, -0.5) | vec2(64.0, -32.0) | vec2(-0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_yxy
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_xyxx
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_yyyy
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_s
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_ss
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(-0.5, -0.5) | vec2(-32.0, -32.0) | vec2(-0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_st
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.st;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_ts
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.5, 0.0) | vec2(1.25, 1.0) | vec2(-2.25, -0.5) | vec2(64.0, -32.0) | vec2(-0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_tst
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_stss
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_tttt
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_r
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_rr
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.0) | vec2(1.0, 1.0) | vec2(-0.5, -0.5) | vec2(-32.0, -32.0) | vec2(-0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_rg
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_gr
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec2 out0 = [ vec2(0.5, 0.0) | vec2(1.25, 1.0) | vec2(-2.25, -0.5) | vec2(64.0, -32.0) | vec2(-0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_grg
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_rgrr
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec2_gggg
+		version 310 es
+		values
+		{
+			input vec2 in0 = [ vec2(0.0, 0.5) | vec2(1.0, 1.25) | vec2(-0.5, -2.25) | vec2(-32.0, 64.0) | vec2(-0.75, -0.0322580645161) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_x
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_z
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.75 | 1.125 | -4.875 | -51.0 | 0.0526315789474 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.z;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_xz
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.0, 0.75) | vec2(1.0, 1.125) | vec2(-0.5, -4.875) | vec2(-32.0, -51.0) | vec2(-0.75, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_zz
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.75, 0.75) | vec2(1.125, 1.125) | vec2(-4.875, -4.875) | vec2(-51.0, -51.0) | vec2(0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_xyz
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_zyx
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.5, 0.0) | vec3(1.125, 1.25, 1.0) | vec3(-4.875, -2.25, -0.5) | vec3(-51.0, 64.0, -32.0) | vec3(0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_xxx
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(-0.5, -0.5, -0.5) | vec3(-32.0, -32.0, -32.0) | vec3(-0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_zzz
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.75) | vec3(1.125, 1.125, 1.125) | vec3(-4.875, -4.875, -4.875) | vec3(-51.0, -51.0, -51.0) | vec3(0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_zzy
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.5) | vec3(1.125, 1.125, 1.25) | vec3(-4.875, -4.875, -2.25) | vec3(-51.0, -51.0, 64.0) | vec3(0.0526315789474, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_yxy
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_xzx
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.75, 0.0) | vec3(1.0, 1.125, 1.0) | vec3(-0.5, -4.875, -0.5) | vec3(-32.0, -51.0, -32.0) | vec3(-0.75, 0.0526315789474, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xzx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_xyyx
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.5, 0.0) | vec4(1.0, 1.25, 1.25, 1.0) | vec4(-0.5, -2.25, -2.25, -0.5) | vec4(-32.0, 64.0, 64.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_zzzz
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.75, 0.75) | vec4(1.125, 1.125, 1.125, 1.125) | vec4(-4.875, -4.875, -4.875, -4.875) | vec4(-51.0, -51.0, -51.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_s
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_p
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.75 | 1.125 | -4.875 | -51.0 | 0.0526315789474 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.p;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_sp
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.0, 0.75) | vec2(1.0, 1.125) | vec2(-0.5, -4.875) | vec2(-32.0, -51.0) | vec2(-0.75, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_pp
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.75, 0.75) | vec2(1.125, 1.125) | vec2(-4.875, -4.875) | vec2(-51.0, -51.0) | vec2(0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_stp
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_pts
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.5, 0.0) | vec3(1.125, 1.25, 1.0) | vec3(-4.875, -2.25, -0.5) | vec3(-51.0, 64.0, -32.0) | vec3(0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_sss
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(-0.5, -0.5, -0.5) | vec3(-32.0, -32.0, -32.0) | vec3(-0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_ppp
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.75) | vec3(1.125, 1.125, 1.125) | vec3(-4.875, -4.875, -4.875) | vec3(-51.0, -51.0, -51.0) | vec3(0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_ppt
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.5) | vec3(1.125, 1.125, 1.25) | vec3(-4.875, -4.875, -2.25) | vec3(-51.0, -51.0, 64.0) | vec3(0.0526315789474, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_tst
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_sps
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.75, 0.0) | vec3(1.0, 1.125, 1.0) | vec3(-0.5, -4.875, -0.5) | vec3(-32.0, -51.0, -32.0) | vec3(-0.75, 0.0526315789474, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sps;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_stts
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.5, 0.0) | vec4(1.0, 1.25, 1.25, 1.0) | vec4(-0.5, -2.25, -2.25, -0.5) | vec4(-32.0, 64.0, 64.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_pppp
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.75, 0.75) | vec4(1.125, 1.125, 1.125, 1.125) | vec4(-4.875, -4.875, -4.875, -4.875) | vec4(-51.0, -51.0, -51.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_r
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_b
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output float out0 = [ 0.75 | 1.125 | -4.875 | -51.0 | 0.0526315789474 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.b;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_rb
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.0, 0.75) | vec2(1.0, 1.125) | vec2(-0.5, -4.875) | vec2(-32.0, -51.0) | vec2(-0.75, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_bb
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec2 out0 = [ vec2(0.75, 0.75) | vec2(1.125, 1.125) | vec2(-4.875, -4.875) | vec2(-51.0, -51.0) | vec2(0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_rgb
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_bgr
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.5, 0.0) | vec3(1.125, 1.25, 1.0) | vec3(-4.875, -2.25, -0.5) | vec3(-51.0, 64.0, -32.0) | vec3(0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_rrr
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.0, 0.0) | vec3(1.0, 1.0, 1.0) | vec3(-0.5, -0.5, -0.5) | vec3(-32.0, -32.0, -32.0) | vec3(-0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_bbb
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.75) | vec3(1.125, 1.125, 1.125) | vec3(-4.875, -4.875, -4.875) | vec3(-51.0, -51.0, -51.0) | vec3(0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_bbg
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.75, 0.75, 0.5) | vec3(1.125, 1.125, 1.25) | vec3(-4.875, -4.875, -2.25) | vec3(-51.0, -51.0, 64.0) | vec3(0.0526315789474, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_grg
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.5, 0.0, 0.5) | vec3(1.25, 1.0, 1.25) | vec3(-2.25, -0.5, -2.25) | vec3(64.0, -32.0, 64.0) | vec3(-0.0322580645161, -0.75, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_rbr
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec3 out0 = [ vec3(0.0, 0.75, 0.0) | vec3(1.0, 1.125, 1.0) | vec3(-0.5, -4.875, -0.5) | vec3(-32.0, -51.0, -32.0) | vec3(-0.75, 0.0526315789474, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rbr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_rggr
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.5, 0.0) | vec4(1.0, 1.25, 1.25, 1.0) | vec4(-0.5, -2.25, -2.25, -0.5) | vec4(-32.0, 64.0, 64.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rggr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec3_bbbb
+		version 310 es
+		values
+		{
+			input vec3 in0 = [ vec3(0.0, 0.5, 0.75) | vec3(1.0, 1.25, 1.125) | vec3(-0.5, -2.25, -4.875) | vec3(-32.0, 64.0, -51.0) | vec3(-0.75, -0.0322580645161, 0.0526315789474) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.75, 0.75) | vec4(1.125, 1.125, 1.125, 1.125) | vec4(-4.875, -4.875, -4.875, -4.875) | vec4(-51.0, -51.0, -51.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.0526315789474, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_x
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_w
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.825 | 1.75 | 9.0 | 24.0 | 0.25 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.w;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wx
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.0) | vec2(1.75, 1.0) | vec2(9.0, -0.5) | vec2(24.0, -32.0) | vec2(0.25, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wz
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.75) | vec2(1.75, 1.125) | vec2(9.0, -4.875) | vec2(24.0, -51.0) | vec2(0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_www
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.825, 0.825) | vec3(1.75, 1.75, 1.75) | vec3(9.0, 9.0, 9.0) | vec3(24.0, 24.0, 24.0) | vec3(0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.www;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_yyw
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.5, 0.5, 0.825) | vec3(1.25, 1.25, 1.75) | vec3(-2.25, -2.25, 9.0) | vec3(64.0, 64.0, 24.0) | vec3(-0.0322580645161, -0.0322580645161, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wzy
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.75, 0.5) | vec3(1.75, 1.125, 1.25) | vec3(9.0, -4.875, -2.25) | vec3(24.0, -51.0, 64.0) | vec3(0.25, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_xyzw
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wzyx
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.5, 0.0) | vec4(1.75, 1.125, 1.25, 1.0) | vec4(9.0, -4.875, -2.25, -0.5) | vec4(24.0, -51.0, 64.0, -32.0) | vec4(0.25, 0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_xxxx
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(-0.5, -0.5, -0.5, -0.5) | vec4(-32.0, -32.0, -32.0, -32.0) | vec4(-0.75, -0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_yyyy
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wwww
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.825) | vec4(1.75, 1.75, 1.75, 1.75) | vec4(9.0, 9.0, 9.0, 9.0) | vec4(24.0, 24.0, 24.0, 24.0) | vec4(0.25, 0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwww;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wzzw
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.75, 0.825) | vec4(1.75, 1.125, 1.125, 1.75) | vec4(9.0, -4.875, -4.875, 9.0) | vec4(24.0, -51.0, -51.0, 24.0) | vec4(0.25, 0.0526315789474, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_wwwy
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.5) | vec4(1.75, 1.75, 1.75, 1.25) | vec4(9.0, 9.0, 9.0, -2.25) | vec4(24.0, 24.0, 24.0, 64.0) | vec4(0.25, 0.25, 0.25, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwwy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_xyxx
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_zzwz
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.825, 0.75) | vec4(1.125, 1.125, 1.75, 1.125) | vec4(-4.875, -4.875, 9.0, -4.875) | vec4(-51.0, -51.0, 24.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzwz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_s
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_q
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.825 | 1.75 | 9.0 | 24.0 | 0.25 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.q;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qs
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.0) | vec2(1.75, 1.0) | vec2(9.0, -0.5) | vec2(24.0, -32.0) | vec2(0.25, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qs;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qp
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.75) | vec2(1.75, 1.125) | vec2(9.0, -4.875) | vec2(24.0, -51.0) | vec2(0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qqq
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.825, 0.825) | vec3(1.75, 1.75, 1.75) | vec3(9.0, 9.0, 9.0) | vec3(24.0, 24.0, 24.0) | vec3(0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_ttq
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.5, 0.5, 0.825) | vec3(1.25, 1.25, 1.75) | vec3(-2.25, -2.25, 9.0) | vec3(64.0, 64.0, 24.0) | vec3(-0.0322580645161, -0.0322580645161, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ttq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qpt
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.75, 0.5) | vec3(1.75, 1.125, 1.25) | vec3(9.0, -4.875, -2.25) | vec3(24.0, -51.0, 64.0) | vec3(0.25, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_stpq
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stpq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qpts
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.5, 0.0) | vec4(1.75, 1.125, 1.25, 1.0) | vec4(9.0, -4.875, -2.25, -0.5) | vec4(24.0, -51.0, 64.0, -32.0) | vec4(0.25, 0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_ssss
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(-0.5, -0.5, -0.5, -0.5) | vec4(-32.0, -32.0, -32.0, -32.0) | vec4(-0.75, -0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ssss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_tttt
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qqqq
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.825) | vec4(1.75, 1.75, 1.75, 1.75) | vec4(9.0, 9.0, 9.0, 9.0) | vec4(24.0, 24.0, 24.0, 24.0) | vec4(0.25, 0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qppq
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.75, 0.825) | vec4(1.75, 1.125, 1.125, 1.75) | vec4(9.0, -4.875, -4.875, 9.0) | vec4(24.0, -51.0, -51.0, 24.0) | vec4(0.25, 0.0526315789474, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qppq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_qqqt
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.5) | vec4(1.75, 1.75, 1.75, 1.25) | vec4(9.0, 9.0, 9.0, -2.25) | vec4(24.0, 24.0, 24.0, 64.0) | vec4(0.25, 0.25, 0.25, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_stss
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_ppqp
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.825, 0.75) | vec4(1.125, 1.125, 1.75, 1.125) | vec4(-4.875, -4.875, 9.0, -4.875) | vec4(-51.0, -51.0, 24.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppqp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_r
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.0 | 1.0 | -0.5 | -32.0 | -0.75 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_a
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output float out0 = [ 0.825 | 1.75 | 9.0 | 24.0 | 0.25 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_ar
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.0) | vec2(1.75, 1.0) | vec2(9.0, -0.5) | vec2(24.0, -32.0) | vec2(0.25, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ar;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_ab
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec2 out0 = [ vec2(0.825, 0.75) | vec2(1.75, 1.125) | vec2(9.0, -4.875) | vec2(24.0, -51.0) | vec2(0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ab;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_aaa
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.825, 0.825) | vec3(1.75, 1.75, 1.75) | vec3(9.0, 9.0, 9.0) | vec3(24.0, 24.0, 24.0) | vec3(0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_gga
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.5, 0.5, 0.825) | vec3(1.25, 1.25, 1.75) | vec3(-2.25, -2.25, 9.0) | vec3(64.0, 64.0, 24.0) | vec3(-0.0322580645161, -0.0322580645161, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gga;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_abg
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec3 out0 = [ vec3(0.825, 0.75, 0.5) | vec3(1.75, 1.125, 1.25) | vec3(9.0, -4.875, -2.25) | vec3(24.0, -51.0, 64.0) | vec3(0.25, 0.0526315789474, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_rgba
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_abgr
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.5, 0.0) | vec4(1.75, 1.125, 1.25, 1.0) | vec4(9.0, -4.875, -2.25, -0.5) | vec4(24.0, -51.0, 64.0, -32.0) | vec4(0.25, 0.0526315789474, -0.0322580645161, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_rrrr
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.0, 0.0, 0.0) | vec4(1.0, 1.0, 1.0, 1.0) | vec4(-0.5, -0.5, -0.5, -0.5) | vec4(-32.0, -32.0, -32.0, -32.0) | vec4(-0.75, -0.75, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_gggg
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.5, 0.5, 0.5, 0.5) | vec4(1.25, 1.25, 1.25, 1.25) | vec4(-2.25, -2.25, -2.25, -2.25) | vec4(64.0, 64.0, 64.0, 64.0) | vec4(-0.0322580645161, -0.0322580645161, -0.0322580645161, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_aaaa
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.825) | vec4(1.75, 1.75, 1.75, 1.75) | vec4(9.0, 9.0, 9.0, 9.0) | vec4(24.0, 24.0, 24.0, 24.0) | vec4(0.25, 0.25, 0.25, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_abba
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.75, 0.75, 0.825) | vec4(1.75, 1.125, 1.125, 1.75) | vec4(9.0, -4.875, -4.875, 9.0) | vec4(24.0, -51.0, -51.0, 24.0) | vec4(0.25, 0.0526315789474, 0.0526315789474, 0.25) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_aaag
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.825, 0.825, 0.825, 0.5) | vec4(1.75, 1.75, 1.75, 1.25) | vec4(9.0, 9.0, 9.0, -2.25) | vec4(24.0, 24.0, 24.0, 64.0) | vec4(0.25, 0.25, 0.25, -0.0322580645161) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaag;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_rgrr
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.0, 0.5, 0.0, 0.0) | vec4(1.0, 1.25, 1.0, 1.0) | vec4(-0.5, -2.25, -0.5, -0.5) | vec4(-32.0, 64.0, -32.0, -32.0) | vec4(-0.75, -0.0322580645161, -0.75, -0.75) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_vec4_bbab
+		version 310 es
+		values
+		{
+			input vec4 in0 = [ vec4(0.0, 0.5, 0.75, 0.825) | vec4(1.0, 1.25, 1.125, 1.75) | vec4(-0.5, -2.25, -4.875, 9.0) | vec4(-32.0, 64.0, -51.0, 24.0) | vec4(-0.75, -0.0322580645161, 0.0526315789474, 0.25) ];
+			output vec4 out0 = [ vec4(0.75, 0.75, 0.825, 0.75) | vec4(1.125, 1.125, 1.75, 1.125) | vec4(-4.875, -4.875, 9.0, -4.875) | vec4(-51.0, -51.0, 24.0, -51.0) | vec4(0.0526315789474, 0.0526315789474, 0.25, 0.0526315789474) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbab;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_x
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_xx
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_xy
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_yx
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-2, 0) | ivec2(64, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_yxy
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_xyxx
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_yyyy
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_s
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_ss
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_st
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.st;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_ts
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-2, 0) | ivec2(64, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_tst
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_stss
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_tttt
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_r
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_rr
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, 0) | ivec2(-32, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_rg
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_gr
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-2, 0) | ivec2(64, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_grg
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_rgrr
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec2_gggg
+		version 310 es
+		values
+		{
+			input ivec2 in0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -2) | ivec2(-32, 64) | ivec2(0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_x
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_z
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | -4 | -51 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.z;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_xz
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -4) | ivec2(-32, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_zz
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-4, -4) | ivec2(-51, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_xyz
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_zyx
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -2, 0) | ivec3(-51, 64, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_xxx
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, -32, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_zzz
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -4) | ivec3(-51, -51, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_zzy
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -2) | ivec3(-51, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_yxy
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_xzx
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -4, 0) | ivec3(-32, -51, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xzx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_xyyx
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -2, 0) | ivec4(-32, 64, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_zzzz
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, -4, -4) | ivec4(-51, -51, -51, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_s
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_p
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | -4 | -51 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.p;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_sp
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -4) | ivec2(-32, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_pp
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-4, -4) | ivec2(-51, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_stp
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_pts
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -2, 0) | ivec3(-51, 64, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_sss
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, -32, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_ppp
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -4) | ivec3(-51, -51, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_ppt
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -2) | ivec3(-51, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_tst
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_sps
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -4, 0) | ivec3(-32, -51, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sps;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_stts
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -2, 0) | ivec4(-32, 64, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_pppp
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, -4, -4) | ivec4(-51, -51, -51, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_r
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_b
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output int out0 = [ 0 | 1 | -4 | -51 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.b;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_rb
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(0, -4) | ivec2(-32, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_bb
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(-4, -4) | ivec2(-51, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_rgb
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_bgr
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -2, 0) | ivec3(-51, 64, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_rrr
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, 0, 0) | ivec3(-32, -32, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_bbb
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -4) | ivec3(-51, -51, -51) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_bbg
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-4, -4, -2) | ivec3(-51, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_grg
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, 0, -2) | ivec3(64, -32, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_rbr
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -4, 0) | ivec3(-32, -51, -32) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rbr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_rggr
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -2, 0) | ivec4(-32, 64, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rggr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec3_bbbb
+		version 310 es
+		values
+		{
+			input ivec3 in0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(0, -2, -4) | ivec3(-32, 64, -51) | ivec3(0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, -4, -4) | ivec4(-51, -51, -51, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_x
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_w
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 9 | 24 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.w;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wx
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, 0) | ivec2(24, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wz
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, -4) | ivec2(24, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_www
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, 9, 9) | ivec3(24, 24, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.www;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_yyw
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, -2, 9) | ivec3(64, 64, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wzy
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, -4, -2) | ivec3(24, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_xyzw
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wzyx
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -2, 0) | ivec4(24, -51, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_xxxx
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, -32, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_yyyy
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wwww
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, 9) | ivec4(24, 24, 24, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwww;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wzzw
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -4, 9) | ivec4(24, -51, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_wwwy
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, -2) | ivec4(24, 24, 24, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwwy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_xyxx
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_zzwz
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, 9, -4) | ivec4(-51, -51, 24, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzwz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_s
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_q
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 9 | 24 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.q;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qs
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, 0) | ivec2(24, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qs;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qp
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, -4) | ivec2(24, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qqq
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, 9, 9) | ivec3(24, 24, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_ttq
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, -2, 9) | ivec3(64, 64, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ttq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qpt
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, -4, -2) | ivec3(24, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_stpq
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stpq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qpts
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -2, 0) | ivec4(24, -51, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_ssss
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, -32, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ssss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_tttt
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qqqq
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, 9) | ivec4(24, 24, 24, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qppq
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -4, 9) | ivec4(24, -51, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qppq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_qqqt
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, -2) | ivec4(24, 24, 24, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_stss
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_ppqp
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, 9, -4) | ivec4(-51, -51, 24, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppqp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_r
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 0 | -32 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_a
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output int out0 = [ 0 | 1 | 9 | 24 | 0 ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_ar
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, 0) | ivec2(24, -32) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ar;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_ab
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec2 out0 = [ ivec2(0, 0) | ivec2(1, 1) | ivec2(9, -4) | ivec2(24, -51) | ivec2(0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ab;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_aaa
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, 9, 9) | ivec3(24, 24, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_gga
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(-2, -2, 9) | ivec3(64, 64, 24) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gga;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_abg
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec3 out0 = [ ivec3(0, 0, 0) | ivec3(1, 1, 1) | ivec3(9, -4, -2) | ivec3(24, -51, 64) | ivec3(0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_rgba
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_abgr
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -2, 0) | ivec4(24, -51, 64, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_rrrr
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, 0, 0, 0) | ivec4(-32, -32, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_gggg
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-2, -2, -2, -2) | ivec4(64, 64, 64, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_aaaa
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, 9) | ivec4(24, 24, 24, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_abba
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, -4, -4, 9) | ivec4(24, -51, -51, 24) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_aaag
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(9, 9, 9, -2) | ivec4(24, 24, 24, 64) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaag;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_rgrr
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, 0, 0) | ivec4(-32, 64, -32, -32) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_ivec4_bbab
+		version 310 es
+		values
+		{
+			input ivec4 in0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(0, -2, -4, 9) | ivec4(-32, 64, -51, 24) | ivec4(0, 0, 0, 0) ];
+			output ivec4 out0 = [ ivec4(0, 0, 0, 0) | ivec4(1, 1, 1, 1) | ivec4(-4, -4, 9, -4) | ivec4(-51, -51, 24, -51) | ivec4(0, 0, 0, 0) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbab;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_x
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_xx
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_xy
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_yx
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_yxy
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_xyxx
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_yyyy
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_s
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_ss
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_st
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.st;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_ts
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_tst
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_stss
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_tttt
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_r
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_rr
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_rg
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_gr
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec2 out0 = [ bvec2(false, true) | bvec2(false, false) | bvec2(true, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_grg
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_rgrr
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec2_gggg
+		version 310 es
+		values
+		{
+			input bvec2 in0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, true) | bvec2(true, true) | bvec2(false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_x
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_z
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ false | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.z;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_xz
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_zz
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_xyz
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_zyx
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_xxx
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_zzz
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_zzy
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_yxy
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yxy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_xzx
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xzx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_xyyx
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(false, true, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_zzzz
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzzz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_s
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_p
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ false | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.p;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_sp
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_pp
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_stp
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_pts
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_sss
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_ppp
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_ppt
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_tst
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tst;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_sps
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.sps;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_stts
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(false, true, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_pppp
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.pppp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_r
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_b
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bool out0 = [ false | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.b;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_rb
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_bb
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec2 out0 = [ bvec2(false, false) | bvec2(false, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_rgb
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_bgr
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_rrr
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_bbb
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_bbg
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, false) | bvec3(false, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_grg
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, true, false) | bvec3(false, false, false) | bvec3(true, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.grg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_rbr
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, true) | bvec3(false, false, false) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rbr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_rggr
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(false, true, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rggr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec3_bbbb
+		version 310 es
+		values
+		{
+			input bvec3 in0 = [ bvec3(true, false, false) | bvec3(false, false, false) | bvec3(false, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbbb;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_x
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.x;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_w
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.w;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wx
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wz
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_www
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.www;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_yyw
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, true) | bvec3(true, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wzy
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(true, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_xyzw
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wzyx
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, false) | bvec4(false, false, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzyx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_xxxx
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xxxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_yyyy
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.yyyy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wwww
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwww;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wzzw
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wzzw;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_wwwy
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, false) | bvec4(true, true, true, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.wwwy;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_xyxx
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.xyxx;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_zzwz
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, true, false) | bvec4(false, false, true, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.zzwz;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_s
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.s;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_q
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.q;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qs
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qs;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qp
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qqq
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_ttq
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, true) | bvec3(true, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ttq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qpt
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(true, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_stpq
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stpq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qpts
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, false) | bvec4(false, false, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qpts;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_ssss
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ssss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_tttt
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.tttt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qqqq
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qppq
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qppq;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_qqqt
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, false) | bvec4(true, true, true, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.qqqt;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_stss
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.stss;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_ppqp
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, true, false) | bvec4(false, false, true, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ppqp;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_r
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | false | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.r;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_a
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bool out0 = [ true | true | false | true | false ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.a;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_ar
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, true) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ar;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_ab
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec2 out0 = [ bvec2(true, false) | bvec2(true, false) | bvec2(false, false) | bvec2(true, true) | bvec2(false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.ab;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_aaa
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, true, true) | bvec3(true, true, true) | bvec3(false, false, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_gga
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(false, false, true) | bvec3(false, false, true) | bvec3(true, true, false) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gga;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_abg
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec3 out0 = [ bvec3(true, false, false) | bvec3(true, false, false) | bvec3(false, false, true) | bvec3(true, true, true) | bvec3(false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_rgba
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_abgr
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, false) | bvec4(false, false, true, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abgr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_rrrr
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rrrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_gggg
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, false, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.gggg;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_aaaa
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaaa;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_abba
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, false, true) | bvec4(true, false, false, true) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.abba;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_aaag
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, true, true, false) | bvec4(true, true, true, false) | bvec4(false, false, false, true) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.aaag;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_rgrr
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(true, false, true, true) | bvec4(false, false, false, false) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.rgrr;
+				${OUTPUT}
+			}
+		""
+	end
+
+	case mediump_bvec4_bbab
+		version 310 es
+		values
+		{
+			input bvec4 in0 = [ bvec4(true, false, false, true) | bvec4(false, false, false, true) | bvec4(false, true, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+			output bvec4 out0 = [ bvec4(false, false, true, false) | bvec4(false, false, true, false) | bvec4(false, false, false, false) | bvec4(true, true, true, true) | bvec4(false, false, false, false) ];
+		}
+
+		both ""
+			#version 310 es
+			precision mediump float;
+
+			${DECLARATIONS}
+
+			void main()
+			{
+				${SETUP}
+				out0 = in0.bbab;
+				${OUTPUT}
+			}
+		""
+	end
+
+
+end # vector_swizzles
diff --git a/external/vulkancts/framework/vulkan/CMakeLists.txt b/external/vulkancts/framework/vulkan/CMakeLists.txt
new file mode 100644
index 0000000..9f76ec3
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/CMakeLists.txt
@@ -0,0 +1,73 @@
+# vk - Vulkan utilites
+
+set(VKUTIL_SRCS
+	vkApiVersion.cpp
+	vkApiVersion.hpp
+	vkBuilderUtil.cpp
+	vkBuilderUtil.hpp
+	vkDefs.cpp
+	vkDefs.hpp
+	vkRef.cpp
+	vkRef.hpp
+	vkRefUtil.cpp
+	vkRefUtil.hpp
+	vkPlatform.cpp
+	vkPlatform.hpp
+	vkPrograms.cpp
+	vkPrograms.hpp
+	vkStrUtil.cpp
+	vkStrUtil.hpp
+	vkQueryUtil.cpp
+	vkQueryUtil.hpp
+	vkMemUtil.cpp
+	vkMemUtil.hpp
+	vkDeviceUtil.cpp
+	vkDeviceUtil.hpp
+	vkGlslToSpirV.cpp
+	vkGlslToSpirV.hpp
+	vkSpirVAsm.hpp
+	vkSpirVAsm.cpp
+	vkSpirVProgram.hpp
+	vkSpirVProgram.cpp
+	vkBinaryRegistry.cpp
+	vkBinaryRegistry.hpp
+	vkNullDriver.cpp
+	vkNullDriver.hpp
+	vkImageUtil.cpp
+	vkImageUtil.hpp
+	vkTypeUtil.cpp
+	vkTypeUtil.hpp
+	vkAllocationCallbackUtil.cpp
+	vkAllocationCallbackUtil.hpp
+	)
+
+set(VKUTIL_LIBS
+	glutil
+	tcutil
+	)
+
+if (DEQP_HAVE_GLSLANG)
+	include_directories(${GLSLANG_INCLUDE_PATH})
+	add_definitions(-DDEQP_HAVE_GLSLANG=1)
+
+	# \note Code interfacing with glslang needs to include third-party headers
+	#       that cause all sorts of warnings to appear.
+	if (DE_COMPILER_IS_GCC OR DE_COMPILER_IS_CLANG)
+		set_source_files_properties(
+			FILES vkGlslToSpirV.cpp
+			PROPERTIES COMPILE_FLAGS "${DE_3RD_PARTY_CXX_FLAGS} -std=c++11")
+	endif ()
+
+	set(VKUTIL_LIBS ${VKUTIL_LIBS} ${GLSLANG_LIBRARY})
+endif ()
+
+if(DEQP_HAVE_SPIRV_TOOLS)
+	include_directories(${spirv-tools_SOURCE_DIR}/include)
+	include_directories(${spirv-tools_SOURCE_DIR}/external/include)
+
+	add_definitions(-DDEQP_HAVE_SPIRV_TOOLS=1)
+	set(VKUTIL_LIBS ${VKUTIL_LIBS} SPIRV-Tools)
+endif()
+
+add_library(vkutil STATIC ${VKUTIL_SRCS})
+target_link_libraries(vkutil ${VKUTIL_LIBS})
diff --git a/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.cpp b/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.cpp
new file mode 100644
index 0000000..1c992d4
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.cpp
@@ -0,0 +1,750 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory allocation callback utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkAllocationCallbackUtil.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuTestLog.hpp"
+#include "deSTLUtil.hpp"
+#include "deMemory.h"
+
+#include <map>
+
+namespace vk
+{
+
+// System default allocator
+
+static VKAPI_ATTR void* VKAPI_CALL systemAllocate (void*, size_t size, size_t alignment, VkSystemAllocationScope)
+{
+	if (size > 0)
+		return deAlignedMalloc(size, (deUint32)alignment);
+	else
+		return DE_NULL;
+}
+
+static VKAPI_ATTR void VKAPI_CALL systemFree (void*, void* pMem)
+{
+	deAlignedFree(pMem);
+}
+
+static VKAPI_ATTR void* VKAPI_CALL systemReallocate (void*, void* pOriginal, size_t size, size_t alignment, VkSystemAllocationScope)
+{
+	return deAlignedRealloc(pOriginal, size, alignment);
+}
+
+static VKAPI_ATTR void VKAPI_CALL systemInternalAllocationNotification (void*, size_t, VkInternalAllocationType, VkSystemAllocationScope)
+{
+}
+
+static VKAPI_ATTR void VKAPI_CALL systemInternalFreeNotification (void*, size_t, VkInternalAllocationType, VkSystemAllocationScope)
+{
+}
+
+static const VkAllocationCallbacks s_systemAllocator =
+{
+	DE_NULL,		// pUserData
+	systemAllocate,
+	systemReallocate,
+	systemFree,
+	systemInternalAllocationNotification,
+	systemInternalFreeNotification,
+};
+
+const VkAllocationCallbacks* getSystemAllocator (void)
+{
+	return &s_systemAllocator;
+}
+
+// AllocationCallbacks
+
+static VKAPI_ATTR void* VKAPI_CALL allocationCallback (void* pUserData, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	return reinterpret_cast<AllocationCallbacks*>(pUserData)->allocate(size, alignment, allocationScope);
+}
+
+static VKAPI_ATTR void* VKAPI_CALL reallocationCallback (void* pUserData, void* pOriginal, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	return reinterpret_cast<AllocationCallbacks*>(pUserData)->reallocate(pOriginal, size, alignment, allocationScope);
+}
+
+static VKAPI_ATTR void VKAPI_CALL freeCallback (void* pUserData, void* pMem)
+{
+	reinterpret_cast<AllocationCallbacks*>(pUserData)->free(pMem);
+}
+
+static VKAPI_ATTR void VKAPI_CALL internalAllocationNotificationCallback (void* pUserData, size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	reinterpret_cast<AllocationCallbacks*>(pUserData)->notifyInternalAllocation(size, allocationType, allocationScope);
+}
+
+static VKAPI_ATTR void VKAPI_CALL internalFreeNotificationCallback (void* pUserData, size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	reinterpret_cast<AllocationCallbacks*>(pUserData)->notifyInternalFree(size, allocationType, allocationScope);
+}
+
+static VkAllocationCallbacks makeCallbacks (AllocationCallbacks* object)
+{
+	const VkAllocationCallbacks callbacks =
+	{
+		reinterpret_cast<void*>(object),
+		allocationCallback,
+		reallocationCallback,
+		freeCallback,
+		internalAllocationNotificationCallback,
+		internalFreeNotificationCallback
+	};
+	return callbacks;
+}
+
+AllocationCallbacks::AllocationCallbacks (void)
+	: m_callbacks(makeCallbacks(this))
+{
+}
+
+AllocationCallbacks::~AllocationCallbacks (void)
+{
+}
+
+// AllocationCallbackRecord
+
+AllocationCallbackRecord AllocationCallbackRecord::allocation (size_t size, size_t alignment, VkSystemAllocationScope scope, void* returnedPtr)
+{
+	AllocationCallbackRecord record;
+
+	record.type							= TYPE_ALLOCATION;
+	record.data.allocation.size			= size;
+	record.data.allocation.alignment	= alignment;
+	record.data.allocation.scope		= scope;
+	record.data.allocation.returnedPtr	= returnedPtr;
+
+	return record;
+}
+
+AllocationCallbackRecord AllocationCallbackRecord::reallocation (void* original, size_t size, size_t alignment, VkSystemAllocationScope scope, void* returnedPtr)
+{
+	AllocationCallbackRecord record;
+
+	record.type								= TYPE_REALLOCATION;
+	record.data.reallocation.original		= original;
+	record.data.reallocation.size			= size;
+	record.data.reallocation.alignment		= alignment;
+	record.data.reallocation.scope			= scope;
+	record.data.reallocation.returnedPtr	= returnedPtr;
+
+	return record;
+}
+
+AllocationCallbackRecord AllocationCallbackRecord::free (void* mem)
+{
+	AllocationCallbackRecord record;
+
+	record.type				= TYPE_FREE;
+	record.data.free.mem	= mem;
+
+	return record;
+}
+
+AllocationCallbackRecord AllocationCallbackRecord::internalAllocation (size_t size, VkInternalAllocationType type, VkSystemAllocationScope scope)
+{
+	AllocationCallbackRecord record;
+
+	record.type								= TYPE_INTERNAL_ALLOCATION;
+	record.data.internalAllocation.size		= size;
+	record.data.internalAllocation.type		= type;
+	record.data.internalAllocation.scope	= scope;
+
+	return record;
+}
+
+AllocationCallbackRecord AllocationCallbackRecord::internalFree (size_t size, VkInternalAllocationType type, VkSystemAllocationScope scope)
+{
+	AllocationCallbackRecord record;
+
+	record.type								= TYPE_INTERNAL_FREE;
+	record.data.internalAllocation.size		= size;
+	record.data.internalAllocation.type		= type;
+	record.data.internalAllocation.scope	= scope;
+
+	return record;
+}
+
+// ChainedAllocator
+
+ChainedAllocator::ChainedAllocator (const VkAllocationCallbacks* nextAllocator)
+	: m_nextAllocator(nextAllocator)
+{
+}
+
+ChainedAllocator::~ChainedAllocator (void)
+{
+}
+
+void* ChainedAllocator::allocate (size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	return m_nextAllocator->pfnAllocation(m_nextAllocator->pUserData, size, alignment, allocationScope);
+}
+
+void* ChainedAllocator::reallocate (void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	return m_nextAllocator->pfnReallocation(m_nextAllocator->pUserData, original, size, alignment, allocationScope);
+}
+
+void ChainedAllocator::free (void* mem)
+{
+	m_nextAllocator->pfnFree(m_nextAllocator->pUserData, mem);
+}
+
+void ChainedAllocator::notifyInternalAllocation (size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	m_nextAllocator->pfnInternalAllocation(m_nextAllocator->pUserData, size, allocationType, allocationScope);
+}
+
+void ChainedAllocator::notifyInternalFree (size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	m_nextAllocator->pfnInternalFree(m_nextAllocator->pUserData, size, allocationType, allocationScope);
+}
+
+// AllocationCallbackRecorder
+
+AllocationCallbackRecorder::AllocationCallbackRecorder (const VkAllocationCallbacks* allocator, deUint32 callCountHint)
+	: ChainedAllocator	(allocator)
+	, m_records			(callCountHint)
+{
+}
+
+AllocationCallbackRecorder::~AllocationCallbackRecorder (void)
+{
+}
+
+void* AllocationCallbackRecorder::allocate (size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	void* const	ptr	= ChainedAllocator::allocate(size, alignment, allocationScope);
+
+	m_records.append(AllocationCallbackRecord::allocation(size, alignment, allocationScope, ptr));
+
+	return ptr;
+}
+
+void* AllocationCallbackRecorder::reallocate (void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	void* const	ptr	= ChainedAllocator::reallocate(original, size, alignment, allocationScope);
+
+	m_records.append(AllocationCallbackRecord::reallocation(original, size, alignment, allocationScope, ptr));
+
+	return ptr;
+}
+
+void AllocationCallbackRecorder::free (void* mem)
+{
+	ChainedAllocator::free(mem);
+
+	m_records.append(AllocationCallbackRecord::free(mem));
+}
+
+void AllocationCallbackRecorder::notifyInternalAllocation (size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	ChainedAllocator::notifyInternalAllocation(size, allocationType, allocationScope);
+
+	m_records.append(AllocationCallbackRecord::internalAllocation(size, allocationType, allocationScope));
+}
+
+void AllocationCallbackRecorder::notifyInternalFree (size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope)
+{
+	ChainedAllocator::notifyInternalFree(size, allocationType, allocationScope);
+
+	m_records.append(AllocationCallbackRecord::internalFree(size, allocationType, allocationScope));
+}
+
+// DeterministicFailAllocator
+
+DeterministicFailAllocator::DeterministicFailAllocator (const VkAllocationCallbacks* allocator, deUint32 numPassingAllocs)
+	: ChainedAllocator	(allocator)
+	, m_numPassingAllocs(numPassingAllocs)
+	, m_allocationNdx	(0)
+{
+}
+
+DeterministicFailAllocator::~DeterministicFailAllocator (void)
+{
+}
+
+void* DeterministicFailAllocator::allocate (size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	if (deAtomicIncrementUint32(&m_allocationNdx) <= m_numPassingAllocs)
+		return ChainedAllocator::allocate(size, alignment, allocationScope);
+	else
+		return DE_NULL;
+}
+
+void* DeterministicFailAllocator::reallocate (void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope)
+{
+	if (deAtomicIncrementUint32(&m_allocationNdx) <= m_numPassingAllocs)
+		return ChainedAllocator::reallocate(original, size, alignment, allocationScope);
+	else
+		return DE_NULL;
+}
+
+// Utils
+
+AllocationCallbackValidationResults::AllocationCallbackValidationResults (void)
+{
+	deMemset(internalAllocationTotal, 0, sizeof(internalAllocationTotal));
+}
+
+void AllocationCallbackValidationResults::clear (void)
+{
+	liveAllocations.clear();
+	violations.clear();
+	deMemset(internalAllocationTotal, 0, sizeof(internalAllocationTotal));
+}
+
+namespace
+{
+
+struct AllocationSlot
+{
+	AllocationCallbackRecord	record;
+	bool						isLive;
+
+	AllocationSlot (void)
+		: isLive	(false)
+	{}
+
+	AllocationSlot (const AllocationCallbackRecord& record_, bool isLive_)
+		: record	(record_)
+		, isLive	(isLive_)
+	{}
+};
+
+size_t getAlignment (const AllocationCallbackRecord& record)
+{
+	if (record.type == AllocationCallbackRecord::TYPE_ALLOCATION)
+		return record.data.allocation.alignment;
+	else if (record.type == AllocationCallbackRecord::TYPE_REALLOCATION)
+		return record.data.reallocation.alignment;
+	else
+	{
+		DE_ASSERT(false);
+		return 0;
+	}
+}
+
+} // anonymous
+
+void validateAllocationCallbacks (const AllocationCallbackRecorder& recorder, AllocationCallbackValidationResults* results)
+{
+	std::vector<AllocationSlot>		allocations;
+	std::map<void*, size_t>			ptrToSlotIndex;
+
+	DE_ASSERT(results->liveAllocations.empty() && results->violations.empty());
+
+	for (AllocationCallbackRecorder::RecordIterator callbackIter = recorder.getRecordsBegin();
+		 callbackIter != recorder.getRecordsEnd();
+		 ++callbackIter)
+	{
+		const AllocationCallbackRecord&		record	= *callbackIter;
+
+		// Validate scope
+		{
+			const VkSystemAllocationScope* const	scopePtr	= record.type == AllocationCallbackRecord::TYPE_ALLOCATION			? &record.data.allocation.scope
+																: record.type == AllocationCallbackRecord::TYPE_REALLOCATION		? &record.data.reallocation.scope
+																: record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION	? &record.data.internalAllocation.scope
+																: record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE		? &record.data.internalAllocation.scope
+																: DE_NULL;
+
+			if (scopePtr && !de::inBounds(*scopePtr, (VkSystemAllocationScope)0, VK_SYSTEM_ALLOCATION_SCOPE_LAST))
+				results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_INVALID_ALLOCATION_SCOPE));
+		}
+
+		// Validate alignment
+		if (record.type == AllocationCallbackRecord::TYPE_ALLOCATION ||
+			record.type == AllocationCallbackRecord::TYPE_REALLOCATION)
+		{
+			if (!deIsPowerOfTwoSize(getAlignment(record)))
+				results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_INVALID_ALIGNMENT));
+		}
+
+		// Validate actual allocation behavior
+		switch (record.type)
+		{
+			case AllocationCallbackRecord::TYPE_ALLOCATION:
+			{
+				if (record.data.allocation.returnedPtr)
+				{ 
+					if (!de::contains(ptrToSlotIndex, record.data.allocation.returnedPtr))
+					{
+						ptrToSlotIndex[record.data.allocation.returnedPtr] = allocations.size();
+						allocations.push_back(AllocationSlot(record, true));
+					}
+					else
+					{
+						const size_t		slotNdx		= ptrToSlotIndex[record.data.allocation.returnedPtr];
+						if (!allocations[slotNdx].isLive) 
+						{
+							allocations[slotNdx].isLive = true;
+							allocations[slotNdx].record = record;
+						}
+						else
+						{
+							// we should not have multiple live allocations with the same pointer
+							DE_ASSERT(false);
+						}
+					}
+				}
+
+				break;
+			}
+
+			case AllocationCallbackRecord::TYPE_REALLOCATION:
+			{
+				if (de::contains(ptrToSlotIndex, record.data.reallocation.original))
+				{
+					const size_t		origSlotNdx		= ptrToSlotIndex[record.data.reallocation.original];
+					AllocationSlot&		origSlot		= allocations[origSlotNdx];
+
+					DE_ASSERT(record.data.reallocation.original != DE_NULL);
+
+					if (record.data.reallocation.size > 0)
+					{
+						if (getAlignment(origSlot.record) != record.data.reallocation.alignment)
+							results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_REALLOC_DIFFERENT_ALIGNMENT));
+
+						if (record.data.reallocation.original == record.data.reallocation.returnedPtr)
+						{
+							if (!origSlot.isLive)
+							{
+								results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_REALLOC_FREED_PTR));
+								origSlot.isLive	= true; // Mark live to suppress further errors
+							}
+
+							// Just update slot record
+							allocations[origSlotNdx].record = record;
+						}
+						else
+						{
+							if (record.data.reallocation.returnedPtr)
+							{
+								allocations[origSlotNdx].isLive = false;
+								if (!de::contains(ptrToSlotIndex, record.data.reallocation.returnedPtr))
+								{
+									ptrToSlotIndex[record.data.reallocation.returnedPtr] = allocations.size();
+									allocations.push_back(AllocationSlot(record, true));
+								}
+								else
+								{
+									const size_t slotNdx = ptrToSlotIndex[record.data.reallocation.returnedPtr];
+									if (!allocations[slotNdx].isLive)
+									{
+										allocations[slotNdx].isLive = true;
+										allocations[slotNdx].record = record;
+									}
+									else
+									{
+										// we should not have multiple live allocations with the same pointer
+										DE_ASSERT(false);
+									}
+								}
+							}
+							// else original ptr remains valid and live
+						}
+					}
+					else
+					{
+						DE_ASSERT(!record.data.reallocation.returnedPtr);
+
+						origSlot.isLive = false;
+					}
+				}
+				else
+				{
+					if (record.data.reallocation.original)
+						results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_REALLOC_NOT_ALLOCATED_PTR));
+
+					if (record.data.reallocation.returnedPtr)
+					{
+						DE_ASSERT(!de::contains(ptrToSlotIndex, record.data.reallocation.returnedPtr));
+						ptrToSlotIndex[record.data.reallocation.returnedPtr] = allocations.size();
+						allocations.push_back(AllocationSlot(record, true));
+					}
+				}
+
+				break;
+			}
+
+			case AllocationCallbackRecord::TYPE_FREE:
+			{
+				if (record.data.free.mem != DE_NULL) // Freeing null pointer is valid and ignored
+				{
+					if (de::contains(ptrToSlotIndex, record.data.free.mem))
+					{
+						const size_t	slotNdx		= ptrToSlotIndex[record.data.free.mem];
+
+						if (allocations[slotNdx].isLive)
+							allocations[slotNdx].isLive = false;
+						else
+							results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_DOUBLE_FREE));
+					}
+					else
+						results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_FREE_NOT_ALLOCATED_PTR));
+				}
+
+				break;
+			}
+
+			case AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION:
+			case AllocationCallbackRecord::TYPE_INTERNAL_FREE:
+			{
+				if (de::inBounds(record.data.internalAllocation.type, (VkInternalAllocationType)0, VK_INTERNAL_ALLOCATION_TYPE_LAST))
+				{
+					size_t* const		totalAllocSizePtr	= &results->internalAllocationTotal[record.data.internalAllocation.type][record.data.internalAllocation.scope];
+					const size_t		size				= record.data.internalAllocation.size;
+
+					if (record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE)
+					{
+						if (*totalAllocSizePtr < size)
+						{
+							results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_NEGATIVE_INTERNAL_ALLOCATION_TOTAL));
+							*totalAllocSizePtr = 0; // Reset to 0 to suppress compound errors
+						}
+						else
+							*totalAllocSizePtr -= size;
+					}
+					else
+						*totalAllocSizePtr += size;
+				}
+				else
+					results->violations.push_back(AllocationCallbackViolation(record, AllocationCallbackViolation::REASON_INVALID_INTERNAL_ALLOCATION_TYPE));
+
+				break;
+			}
+
+			default:
+				DE_ASSERT(false);
+		}
+	}
+
+	DE_ASSERT(!de::contains(ptrToSlotIndex, DE_NULL));
+
+	// Collect live allocations
+	for (std::vector<AllocationSlot>::const_iterator slotIter = allocations.begin();
+		 slotIter != allocations.end();
+		 ++slotIter)
+	{
+		if (slotIter->isLive)
+			results->liveAllocations.push_back(slotIter->record);
+	}
+}
+
+bool checkAndLog (tcu::TestLog& log, const AllocationCallbackValidationResults& results, deUint32 allowedLiveAllocScopeBits)
+{
+	using tcu::TestLog;
+
+	size_t	numLeaks	= 0;
+
+	if (!results.violations.empty())
+	{
+		for (size_t violationNdx = 0; violationNdx < results.violations.size(); ++violationNdx)
+		{
+			log << TestLog::Message << "VIOLATION " << (violationNdx+1)
+													<< ": " << results.violations[violationNdx]
+													<< " (" << results.violations[violationNdx].record << ")"
+				<< TestLog::EndMessage;
+		}
+
+		log << TestLog::Message << "ERROR: Found " << results.violations.size() << " invalid allocation callbacks!" << TestLog::EndMessage;
+	}
+
+	// Verify live allocations
+	for (size_t liveNdx = 0; liveNdx < results.liveAllocations.size(); ++liveNdx)
+	{
+		const AllocationCallbackRecord&		record	= results.liveAllocations[liveNdx];
+		const VkSystemAllocationScope		scope	= record.type == AllocationCallbackRecord::TYPE_ALLOCATION		? record.data.allocation.scope
+													: record.type == AllocationCallbackRecord::TYPE_REALLOCATION	? record.data.reallocation.scope
+													: VK_SYSTEM_ALLOCATION_SCOPE_LAST;
+
+		DE_ASSERT(de::inBounds(scope, (VkSystemAllocationScope)0, VK_SYSTEM_ALLOCATION_SCOPE_LAST));
+
+		if ((allowedLiveAllocScopeBits & (1u << scope)) == 0)
+		{
+			log << TestLog::Message << "LEAK " << (numLeaks+1) << ": " << record << TestLog::EndMessage;
+			numLeaks += 1;
+		}
+	}
+
+	// Verify internal allocations
+	for (int internalAllocTypeNdx = 0; internalAllocTypeNdx < VK_INTERNAL_ALLOCATION_TYPE_LAST; ++internalAllocTypeNdx)
+	{
+		for (int scopeNdx = 0; scopeNdx < VK_SYSTEM_ALLOCATION_SCOPE_LAST; ++scopeNdx)
+		{
+			const VkInternalAllocationType	type			= (VkInternalAllocationType)internalAllocTypeNdx;
+			const VkSystemAllocationScope	scope			= (VkSystemAllocationScope)scopeNdx;
+			const size_t					totalAllocated	= results.internalAllocationTotal[type][scope];
+
+			if ((allowedLiveAllocScopeBits & (1u << scopeNdx)) == 0 &&
+				totalAllocated > 0)
+			{
+				log << TestLog::Message << "LEAK " << (numLeaks+1) << ": " << totalAllocated
+										<< " bytes of (" << type << ", " << scope << ") internal memory is still allocated"
+					<< TestLog::EndMessage;
+				numLeaks += 1;
+			}
+		}
+	}
+
+	if (numLeaks > 0)
+		log << TestLog::Message << "ERROR: Found " << numLeaks << " memory leaks!" << TestLog::EndMessage;
+
+	return results.violations.empty() && numLeaks == 0;
+}
+
+bool validateAndLog (tcu::TestLog& log, const AllocationCallbackRecorder& recorder, deUint32 allowedLiveAllocScopeBits)
+{
+	AllocationCallbackValidationResults	validationResults;
+
+	validateAllocationCallbacks(recorder, &validationResults);
+
+	return checkAndLog(log, validationResults, allowedLiveAllocScopeBits);
+}
+
+std::ostream& operator<< (std::ostream& str, const AllocationCallbackRecord& record)
+{
+	switch (record.type)
+	{
+		case AllocationCallbackRecord::TYPE_ALLOCATION:
+			str << "ALLOCATION: size=" << record.data.allocation.size
+				<< ", alignment=" << record.data.allocation.alignment
+				<< ", scope=" << record.data.allocation.scope
+				<< ", returnedPtr=" << tcu::toHex(record.data.allocation.returnedPtr);
+			break;
+
+		case AllocationCallbackRecord::TYPE_REALLOCATION:
+			str << "REALLOCATION: original=" << tcu::toHex(record.data.reallocation.original)
+				<< ", size=" << record.data.reallocation.size
+				<< ", alignment=" << record.data.reallocation.alignment
+				<< ", scope=" << record.data.reallocation.scope
+				<< ", returnedPtr=" << tcu::toHex(record.data.reallocation.returnedPtr);
+			break;
+
+		case AllocationCallbackRecord::TYPE_FREE:
+			str << "FREE: mem=" << tcu::toHex(record.data.free.mem);
+			break;
+
+		case AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION:
+		case AllocationCallbackRecord::TYPE_INTERNAL_FREE:
+			str << "INTERNAL_" << (record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION ? "ALLOCATION" : "FREE")
+				<< ": size=" << record.data.internalAllocation.size
+				<< ", type=" << record.data.internalAllocation.type
+				<< ", scope=" << record.data.internalAllocation.scope;
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return str;
+}
+
+std::ostream& operator<< (std::ostream& str, const AllocationCallbackViolation& violation)
+{
+	switch (violation.reason)
+	{
+		case AllocationCallbackViolation::REASON_DOUBLE_FREE:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_FREE);
+			str << "Double free of " << tcu::toHex(violation.record.data.free.mem);
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_FREE_NOT_ALLOCATED_PTR:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_FREE);
+			str << "Attempt to free " << tcu::toHex(violation.record.data.free.mem) << " which has not been allocated";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_REALLOC_NOT_ALLOCATED_PTR:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_REALLOCATION);
+			str << "Attempt to reallocate " << tcu::toHex(violation.record.data.reallocation.original) << " which has not been allocated";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_REALLOC_FREED_PTR:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_REALLOCATION);
+			str << "Attempt to reallocate " << tcu::toHex(violation.record.data.reallocation.original) << " which has been freed";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_NEGATIVE_INTERNAL_ALLOCATION_TOTAL:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE);
+			str << "Internal allocation total for (" << violation.record.data.internalAllocation.type << ", " << violation.record.data.internalAllocation.scope << ") is negative";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_INVALID_INTERNAL_ALLOCATION_TYPE:
+		{
+			DE_ASSERT(violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_ALLOCATION ||
+					  violation.record.type == AllocationCallbackRecord::TYPE_INTERNAL_FREE);
+			str << "Invalid internal allocation type " << tcu::toHex(violation.record.data.internalAllocation.type);
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_INVALID_ALLOCATION_SCOPE:
+		{
+			str << "Invalid allocation scope";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_INVALID_ALIGNMENT:
+		{
+			str << "Invalid alignment";
+			break;
+		}
+
+		case AllocationCallbackViolation::REASON_REALLOC_DIFFERENT_ALIGNMENT:
+		{
+			str << "Reallocation with different alignment";
+			break;
+		}
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return str;
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.hpp b/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.hpp
new file mode 100644
index 0000000..057dc7e
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkAllocationCallbackUtil.hpp
@@ -0,0 +1,235 @@
+#ifndef _VKALLOCATIONCALLBACKUTIL_HPP
+#define _VKALLOCATIONCALLBACKUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory allocation callback utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "deAppendList.hpp"
+
+#include <vector>
+#include <ostream>
+
+namespace tcu
+{
+class TestLog;
+}
+
+namespace vk
+{
+
+class AllocationCallbacks
+{
+public:
+									AllocationCallbacks		(void);
+	virtual							~AllocationCallbacks	(void);
+
+	virtual void*					allocate				(size_t size, size_t alignment, VkSystemAllocationScope allocationScope) = 0;
+	virtual void*					reallocate				(void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope) = 0;
+	virtual void					free					(void* mem) = 0;
+
+	virtual void					notifyInternalAllocation(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope) = 0;
+	virtual void					notifyInternalFree		(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope) = 0;
+
+	const VkAllocationCallbacks*	getCallbacks			(void) const { return &m_callbacks;	}
+
+private:
+	const VkAllocationCallbacks		m_callbacks;
+};
+
+struct AllocationCallbackRecord
+{
+	enum Type
+	{
+		TYPE_ALLOCATION		= 0,		//! Call to pfnAllocation
+		TYPE_REALLOCATION,				//! Call to pfnReallocation
+		TYPE_FREE,						//! Call to pfnFree
+		TYPE_INTERNAL_ALLOCATION,		//! Call to pfnInternalAllocation
+		TYPE_INTERNAL_FREE,				//! Call to pfnInternalFree
+
+		TYPE_LAST
+	};
+
+	Type	type;
+
+	union
+	{
+		struct
+		{
+			size_t						size;
+			size_t						alignment;
+			VkSystemAllocationScope		scope;
+			void*						returnedPtr;
+		} allocation;
+
+		struct
+		{
+			void*						original;
+			size_t						size;
+			size_t						alignment;
+			VkSystemAllocationScope		scope;
+			void*						returnedPtr;
+		} reallocation;
+
+		struct
+		{
+			void*						mem;
+		} free;
+
+		// \note Used for both INTERNAL_ALLOCATION and INTERNAL_FREE
+		struct
+		{
+			size_t						size;
+			VkInternalAllocationType	type;
+			VkSystemAllocationScope		scope;
+		} internalAllocation;
+	} data;
+
+									AllocationCallbackRecord	(void) : type(TYPE_LAST) {}
+
+	static AllocationCallbackRecord	allocation					(size_t size, size_t alignment, VkSystemAllocationScope scope, void* returnedPtr);
+	static AllocationCallbackRecord	reallocation				(void* original, size_t size, size_t alignment, VkSystemAllocationScope scope, void* returnedPtr);
+	static AllocationCallbackRecord	free						(void* mem);
+	static AllocationCallbackRecord	internalAllocation			(size_t size, VkInternalAllocationType type, VkSystemAllocationScope scope);
+	static AllocationCallbackRecord	internalFree				(size_t size, VkInternalAllocationType type, VkSystemAllocationScope scope);
+};
+
+class ChainedAllocator : public AllocationCallbacks
+{
+public:
+									ChainedAllocator		(const VkAllocationCallbacks* nextAllocator);
+									~ChainedAllocator		(void);
+
+	void*							allocate				(size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+	void*							reallocate				(void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+	void							free					(void* mem);
+
+	void							notifyInternalAllocation(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+	void							notifyInternalFree		(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+
+private:
+	const VkAllocationCallbacks*	m_nextAllocator;
+};
+
+class AllocationCallbackRecorder : public ChainedAllocator
+{
+public:
+							AllocationCallbackRecorder	(const VkAllocationCallbacks* allocator, deUint32 callCountHint = 1024);
+							~AllocationCallbackRecorder	(void);
+
+	void*					allocate					(size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+	void*					reallocate					(void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+	void					free						(void* mem);
+
+	void					notifyInternalAllocation	(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+	void					notifyInternalFree			(size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+
+	typedef de::AppendList<AllocationCallbackRecord>::const_iterator	RecordIterator;
+
+	RecordIterator			getRecordsBegin				(void) const { return m_records.begin();	}
+	RecordIterator			getRecordsEnd				(void) const { return m_records.end();		}
+
+private:
+	typedef de::AppendList<AllocationCallbackRecord> Records;
+
+	Records					m_records;
+};
+
+//! Allocator that starts returning null after N allocs
+class DeterministicFailAllocator : public ChainedAllocator
+{
+public:
+							DeterministicFailAllocator	(const VkAllocationCallbacks* allocator, deUint32 numPassingAllocs);
+							~DeterministicFailAllocator	(void);
+
+	void*					allocate					(size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+	void*					reallocate					(void* original, size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+
+private:
+	const deUint32			m_numPassingAllocs;
+	volatile deUint32		m_allocationNdx;
+};
+
+struct AllocationCallbackViolation
+{
+	enum Reason
+	{
+		REASON_DOUBLE_FREE = 0,
+		REASON_FREE_NOT_ALLOCATED_PTR,
+		REASON_REALLOC_NOT_ALLOCATED_PTR,
+		REASON_REALLOC_FREED_PTR,
+		REASON_NEGATIVE_INTERNAL_ALLOCATION_TOTAL,
+		REASON_INVALID_ALLOCATION_SCOPE,
+		REASON_INVALID_INTERNAL_ALLOCATION_TYPE,
+		REASON_INVALID_ALIGNMENT,
+		REASON_REALLOC_DIFFERENT_ALIGNMENT,
+
+		REASON_LAST
+	};
+
+	AllocationCallbackRecord	record;
+	Reason						reason;
+
+	AllocationCallbackViolation (void)
+		: reason(REASON_LAST)
+	{}
+
+	AllocationCallbackViolation (const AllocationCallbackRecord& record_, Reason reason_)
+		: record(record_)
+		, reason(reason_)
+	{}
+};
+
+struct AllocationCallbackValidationResults
+{
+	std::vector<AllocationCallbackRecord>		liveAllocations;
+	size_t										internalAllocationTotal[VK_INTERNAL_ALLOCATION_TYPE_LAST][VK_SYSTEM_ALLOCATION_SCOPE_LAST];
+	std::vector<AllocationCallbackViolation>	violations;
+
+												AllocationCallbackValidationResults	(void);
+
+	void										clear								(void);
+};
+
+void							validateAllocationCallbacks	(const AllocationCallbackRecorder& recorder, AllocationCallbackValidationResults* results);
+bool							checkAndLog					(tcu::TestLog& log, const AllocationCallbackValidationResults& results, deUint32 allowedLiveAllocScopeBits);
+bool							validateAndLog				(tcu::TestLog& log, const AllocationCallbackRecorder& recorder, deUint32 allowedLiveAllocScopeBits);
+
+std::ostream&					operator<<					(std::ostream& str, const AllocationCallbackRecord& record);
+std::ostream&					operator<<					(std::ostream& str, const AllocationCallbackViolation& violation);
+
+const VkAllocationCallbacks*	getSystemAllocator			(void);
+
+} // vk
+
+#endif // _VKALLOCATIONCALLBACKUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkApiVersion.cpp b/external/vulkancts/framework/vulkan/vkApiVersion.cpp
new file mode 100644
index 0000000..2eae4ba
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkApiVersion.cpp
@@ -0,0 +1,56 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan api version.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkApiVersion.hpp"
+
+namespace vk
+{
+
+ApiVersion unpackVersion (deUint32 version)
+{
+	return ApiVersion((version & 0xFFC00000) >> 22,
+					  (version & 0x003FF000) >> 12,
+					   version & 0x00000FFF);
+}
+
+deUint32 pack (const ApiVersion& version)
+{
+	DE_ASSERT((version.majorNum & ~0x3FF) == 0);
+	DE_ASSERT((version.minorNum & ~0x3FF) == 0);
+	DE_ASSERT((version.patchNum & ~0xFFF) == 0);
+
+	return (version.majorNum << 22) | (version.minorNum << 12) | version.patchNum;
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkApiVersion.hpp b/external/vulkancts/framework/vulkan/vkApiVersion.hpp
new file mode 100644
index 0000000..68d1f7b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkApiVersion.hpp
@@ -0,0 +1,70 @@
+#ifndef _VKAPIVERSION_HPP
+#define _VKAPIVERSION_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan api version.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+
+#include <ostream>
+
+namespace vk
+{
+
+struct ApiVersion
+{
+	deUint32	majorNum;
+	deUint32	minorNum;
+	deUint32	patchNum;
+
+	ApiVersion (deUint32	majorNum_,
+				deUint32	minorNum_,
+				deUint32	patchNum_)
+		: majorNum	(majorNum_)
+		, minorNum	(minorNum_)
+		, patchNum	(patchNum_)
+	{
+	}
+};
+
+ApiVersion		unpackVersion		(deUint32 version);
+deUint32		pack				(const ApiVersion& version);
+
+inline std::ostream& operator<< (std::ostream& s, const ApiVersion& version)
+{
+	return s << version.majorNum << "." << version.minorNum << "." << version.patchNum;
+}
+
+} // vk
+
+#endif // _VKAPIVERSION_HPP
diff --git a/external/vulkancts/framework/vulkan/vkBasicTypes.inl b/external/vulkancts/framework/vulkan/vkBasicTypes.inl
new file mode 100644
index 0000000..dd8ecf6
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkBasicTypes.inl
@@ -0,0 +1,1099 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+enum { VK_API_VERSION					= VK_MAKE_VERSION(1, 0, 0)	};
+enum { VK_MAX_PHYSICAL_DEVICE_NAME_SIZE	= 256						};
+enum { VK_MAX_EXTENSION_NAME_SIZE		= 256						};
+enum { VK_UUID_SIZE						= 16						};
+enum { VK_MAX_MEMORY_TYPES				= 32						};
+enum { VK_MAX_MEMORY_HEAPS				= 16						};
+enum { VK_MAX_DESCRIPTION_SIZE			= 256						};
+enum { VK_ATTACHMENT_UNUSED				= (~0U)						};
+
+VK_DEFINE_HANDLE					(VkInstance,			HANDLE_TYPE_INSTANCE);
+VK_DEFINE_HANDLE					(VkPhysicalDevice,		HANDLE_TYPE_PHYSICAL_DEVICE);
+VK_DEFINE_HANDLE					(VkDevice,				HANDLE_TYPE_DEVICE);
+VK_DEFINE_HANDLE					(VkQueue,				HANDLE_TYPE_QUEUE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkSemaphore,			HANDLE_TYPE_SEMAPHORE);
+VK_DEFINE_HANDLE					(VkCommandBuffer,		HANDLE_TYPE_COMMAND_BUFFER);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkFence,				HANDLE_TYPE_FENCE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDeviceMemory,		HANDLE_TYPE_DEVICE_MEMORY);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkBuffer,				HANDLE_TYPE_BUFFER);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkImage,				HANDLE_TYPE_IMAGE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkEvent,				HANDLE_TYPE_EVENT);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkQueryPool,			HANDLE_TYPE_QUERY_POOL);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkBufferView,			HANDLE_TYPE_BUFFER_VIEW);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkImageView,			HANDLE_TYPE_IMAGE_VIEW);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkShaderModule,		HANDLE_TYPE_SHADER_MODULE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkPipelineCache,		HANDLE_TYPE_PIPELINE_CACHE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkPipelineLayout,		HANDLE_TYPE_PIPELINE_LAYOUT);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkRenderPass,			HANDLE_TYPE_RENDER_PASS);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkPipeline,			HANDLE_TYPE_PIPELINE);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDescriptorSetLayout,	HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkSampler,				HANDLE_TYPE_SAMPLER);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDescriptorPool,		HANDLE_TYPE_DESCRIPTOR_POOL);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDescriptorSet,		HANDLE_TYPE_DESCRIPTOR_SET);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkFramebuffer,			HANDLE_TYPE_FRAMEBUFFER);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkCommandPool,			HANDLE_TYPE_COMMAND_POOL);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkSurfaceKHR,			HANDLE_TYPE_SURFACE_KHR);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkSwapchainKHR,		HANDLE_TYPE_SWAPCHAIN_KHR);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDisplayKHR,			HANDLE_TYPE_DISPLAY_KHR);
+VK_DEFINE_NON_DISPATCHABLE_HANDLE	(VkDisplayModeKHR,		HANDLE_TYPE_DISPLAY_MODE_KHR);
+
+enum VkPipelineCacheHeaderVersion
+{
+	VK_PIPELINE_CACHE_HEADER_VERSION_ONE	= 1,
+};
+
+enum VkResult
+{
+	VK_SUCCESS							= 0,
+	VK_NOT_READY						= 1,
+	VK_TIMEOUT							= 2,
+	VK_EVENT_SET						= 3,
+	VK_EVENT_RESET						= 4,
+	VK_INCOMPLETE						= 5,
+	VK_ERROR_OUT_OF_HOST_MEMORY			= -1,
+	VK_ERROR_OUT_OF_DEVICE_MEMORY		= -2,
+	VK_ERROR_INITIALIZATION_FAILED		= -3,
+	VK_ERROR_DEVICE_LOST				= -4,
+	VK_ERROR_MEMORY_MAP_FAILED			= -5,
+	VK_ERROR_LAYER_NOT_PRESENT			= -6,
+	VK_ERROR_EXTENSION_NOT_PRESENT		= -7,
+	VK_ERROR_FEATURE_NOT_PRESENT		= -8,
+	VK_ERROR_INCOMPATIBLE_DRIVER		= -9,
+	VK_ERROR_TOO_MANY_OBJECTS			= -10,
+	VK_ERROR_FORMAT_NOT_SUPPORTED		= -11,
+	VK_ERROR_SURFACE_LOST_KHR			= -1000000000,
+	VK_SUBOPTIMAL_KHR					= 1000001003,
+	VK_ERROR_OUT_OF_DATE_KHR			= -1000001004,
+	VK_ERROR_INCOMPATIBLE_DISPLAY_KHR	= -1000003001,
+	VK_ERROR_NATIVE_WINDOW_IN_USE_KHR	= -1000008000,
+	VK_ERROR_VALIDATION_FAILED_EXT		= -1000011001,
+};
+
+enum VkStructureType
+{
+	VK_STRUCTURE_TYPE_APPLICATION_INFO							= 0,
+	VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO						= 1,
+	VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO					= 2,
+	VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO						= 3,
+	VK_STRUCTURE_TYPE_SUBMIT_INFO								= 4,
+	VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO						= 5,
+	VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE						= 6,
+	VK_STRUCTURE_TYPE_BIND_SPARSE_INFO							= 7,
+	VK_STRUCTURE_TYPE_FENCE_CREATE_INFO							= 8,
+	VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO						= 9,
+	VK_STRUCTURE_TYPE_EVENT_CREATE_INFO							= 10,
+	VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO					= 11,
+	VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO						= 12,
+	VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO					= 13,
+	VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO							= 14,
+	VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO					= 15,
+	VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO					= 16,
+	VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO				= 17,
+	VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO			= 18,
+	VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO	= 19,
+	VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO	= 20,
+	VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO	= 21,
+	VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO		= 22,
+	VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO	= 23,
+	VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO	= 24,
+	VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO	= 25,
+	VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO	= 26,
+	VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO		= 27,
+	VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO				= 28,
+	VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO				= 29,
+	VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO				= 30,
+	VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO						= 31,
+	VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO			= 32,
+	VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO				= 33,
+	VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO				= 34,
+	VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET						= 35,
+	VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET						= 36,
+	VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO					= 37,
+	VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO					= 38,
+	VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO					= 39,
+	VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO				= 40,
+	VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO			= 41,
+	VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO					= 42,
+	VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO					= 43,
+	VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER						= 44,
+	VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER						= 45,
+	VK_STRUCTURE_TYPE_MEMORY_BARRIER							= 46,
+	VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO				= 47,
+	VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO					= 48,
+	VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR					= 1000001000,
+	VK_STRUCTURE_TYPE_PRESENT_INFO_KHR							= 1000001001,
+	VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR				= 1000002000,
+	VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR			= 1000002001,
+	VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR					= 1000003000,
+	VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR				= 1000004000,
+	VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR				= 1000005000,
+	VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR			= 1000006000,
+	VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR				= 1000007000,
+	VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR			= 1000008000,
+	VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR				= 1000009000,
+	VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT				= 1000011000,
+};
+
+enum VkSystemAllocationScope
+{
+	VK_SYSTEM_ALLOCATION_SCOPE_COMMAND	= 0,
+	VK_SYSTEM_ALLOCATION_SCOPE_OBJECT	= 1,
+	VK_SYSTEM_ALLOCATION_SCOPE_CACHE	= 2,
+	VK_SYSTEM_ALLOCATION_SCOPE_DEVICE	= 3,
+	VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE	= 4,
+
+	VK_SYSTEM_ALLOCATION_SCOPE_LAST
+};
+
+enum VkInternalAllocationType
+{
+	VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE	= 0,
+
+	VK_INTERNAL_ALLOCATION_TYPE_LAST
+};
+
+enum VkFormat
+{
+	VK_FORMAT_UNDEFINED						= 0,
+	VK_FORMAT_R4G4_UNORM_PACK8				= 1,
+	VK_FORMAT_R4G4B4A4_UNORM_PACK16			= 2,
+	VK_FORMAT_B4G4R4A4_UNORM_PACK16			= 3,
+	VK_FORMAT_R5G6B5_UNORM_PACK16			= 4,
+	VK_FORMAT_B5G6R5_UNORM_PACK16			= 5,
+	VK_FORMAT_R5G5B5A1_UNORM_PACK16			= 6,
+	VK_FORMAT_B5G5R5A1_UNORM_PACK16			= 7,
+	VK_FORMAT_A1R5G5B5_UNORM_PACK16			= 8,
+	VK_FORMAT_R8_UNORM						= 9,
+	VK_FORMAT_R8_SNORM						= 10,
+	VK_FORMAT_R8_USCALED					= 11,
+	VK_FORMAT_R8_SSCALED					= 12,
+	VK_FORMAT_R8_UINT						= 13,
+	VK_FORMAT_R8_SINT						= 14,
+	VK_FORMAT_R8_SRGB						= 15,
+	VK_FORMAT_R8G8_UNORM					= 16,
+	VK_FORMAT_R8G8_SNORM					= 17,
+	VK_FORMAT_R8G8_USCALED					= 18,
+	VK_FORMAT_R8G8_SSCALED					= 19,
+	VK_FORMAT_R8G8_UINT						= 20,
+	VK_FORMAT_R8G8_SINT						= 21,
+	VK_FORMAT_R8G8_SRGB						= 22,
+	VK_FORMAT_R8G8B8_UNORM					= 23,
+	VK_FORMAT_R8G8B8_SNORM					= 24,
+	VK_FORMAT_R8G8B8_USCALED				= 25,
+	VK_FORMAT_R8G8B8_SSCALED				= 26,
+	VK_FORMAT_R8G8B8_UINT					= 27,
+	VK_FORMAT_R8G8B8_SINT					= 28,
+	VK_FORMAT_R8G8B8_SRGB					= 29,
+	VK_FORMAT_B8G8R8_UNORM					= 30,
+	VK_FORMAT_B8G8R8_SNORM					= 31,
+	VK_FORMAT_B8G8R8_USCALED				= 32,
+	VK_FORMAT_B8G8R8_SSCALED				= 33,
+	VK_FORMAT_B8G8R8_UINT					= 34,
+	VK_FORMAT_B8G8R8_SINT					= 35,
+	VK_FORMAT_B8G8R8_SRGB					= 36,
+	VK_FORMAT_R8G8B8A8_UNORM				= 37,
+	VK_FORMAT_R8G8B8A8_SNORM				= 38,
+	VK_FORMAT_R8G8B8A8_USCALED				= 39,
+	VK_FORMAT_R8G8B8A8_SSCALED				= 40,
+	VK_FORMAT_R8G8B8A8_UINT					= 41,
+	VK_FORMAT_R8G8B8A8_SINT					= 42,
+	VK_FORMAT_R8G8B8A8_SRGB					= 43,
+	VK_FORMAT_B8G8R8A8_UNORM				= 44,
+	VK_FORMAT_B8G8R8A8_SNORM				= 45,
+	VK_FORMAT_B8G8R8A8_USCALED				= 46,
+	VK_FORMAT_B8G8R8A8_SSCALED				= 47,
+	VK_FORMAT_B8G8R8A8_UINT					= 48,
+	VK_FORMAT_B8G8R8A8_SINT					= 49,
+	VK_FORMAT_B8G8R8A8_SRGB					= 50,
+	VK_FORMAT_A8B8G8R8_UNORM_PACK32			= 51,
+	VK_FORMAT_A8B8G8R8_SNORM_PACK32			= 52,
+	VK_FORMAT_A8B8G8R8_USCALED_PACK32		= 53,
+	VK_FORMAT_A8B8G8R8_SSCALED_PACK32		= 54,
+	VK_FORMAT_A8B8G8R8_UINT_PACK32			= 55,
+	VK_FORMAT_A8B8G8R8_SINT_PACK32			= 56,
+	VK_FORMAT_A8B8G8R8_SRGB_PACK32			= 57,
+	VK_FORMAT_A2R10G10B10_UNORM_PACK32		= 58,
+	VK_FORMAT_A2R10G10B10_SNORM_PACK32		= 59,
+	VK_FORMAT_A2R10G10B10_USCALED_PACK32	= 60,
+	VK_FORMAT_A2R10G10B10_SSCALED_PACK32	= 61,
+	VK_FORMAT_A2R10G10B10_UINT_PACK32		= 62,
+	VK_FORMAT_A2R10G10B10_SINT_PACK32		= 63,
+	VK_FORMAT_A2B10G10R10_UNORM_PACK32		= 64,
+	VK_FORMAT_A2B10G10R10_SNORM_PACK32		= 65,
+	VK_FORMAT_A2B10G10R10_USCALED_PACK32	= 66,
+	VK_FORMAT_A2B10G10R10_SSCALED_PACK32	= 67,
+	VK_FORMAT_A2B10G10R10_UINT_PACK32		= 68,
+	VK_FORMAT_A2B10G10R10_SINT_PACK32		= 69,
+	VK_FORMAT_R16_UNORM						= 70,
+	VK_FORMAT_R16_SNORM						= 71,
+	VK_FORMAT_R16_USCALED					= 72,
+	VK_FORMAT_R16_SSCALED					= 73,
+	VK_FORMAT_R16_UINT						= 74,
+	VK_FORMAT_R16_SINT						= 75,
+	VK_FORMAT_R16_SFLOAT					= 76,
+	VK_FORMAT_R16G16_UNORM					= 77,
+	VK_FORMAT_R16G16_SNORM					= 78,
+	VK_FORMAT_R16G16_USCALED				= 79,
+	VK_FORMAT_R16G16_SSCALED				= 80,
+	VK_FORMAT_R16G16_UINT					= 81,
+	VK_FORMAT_R16G16_SINT					= 82,
+	VK_FORMAT_R16G16_SFLOAT					= 83,
+	VK_FORMAT_R16G16B16_UNORM				= 84,
+	VK_FORMAT_R16G16B16_SNORM				= 85,
+	VK_FORMAT_R16G16B16_USCALED				= 86,
+	VK_FORMAT_R16G16B16_SSCALED				= 87,
+	VK_FORMAT_R16G16B16_UINT				= 88,
+	VK_FORMAT_R16G16B16_SINT				= 89,
+	VK_FORMAT_R16G16B16_SFLOAT				= 90,
+	VK_FORMAT_R16G16B16A16_UNORM			= 91,
+	VK_FORMAT_R16G16B16A16_SNORM			= 92,
+	VK_FORMAT_R16G16B16A16_USCALED			= 93,
+	VK_FORMAT_R16G16B16A16_SSCALED			= 94,
+	VK_FORMAT_R16G16B16A16_UINT				= 95,
+	VK_FORMAT_R16G16B16A16_SINT				= 96,
+	VK_FORMAT_R16G16B16A16_SFLOAT			= 97,
+	VK_FORMAT_R32_UINT						= 98,
+	VK_FORMAT_R32_SINT						= 99,
+	VK_FORMAT_R32_SFLOAT					= 100,
+	VK_FORMAT_R32G32_UINT					= 101,
+	VK_FORMAT_R32G32_SINT					= 102,
+	VK_FORMAT_R32G32_SFLOAT					= 103,
+	VK_FORMAT_R32G32B32_UINT				= 104,
+	VK_FORMAT_R32G32B32_SINT				= 105,
+	VK_FORMAT_R32G32B32_SFLOAT				= 106,
+	VK_FORMAT_R32G32B32A32_UINT				= 107,
+	VK_FORMAT_R32G32B32A32_SINT				= 108,
+	VK_FORMAT_R32G32B32A32_SFLOAT			= 109,
+	VK_FORMAT_R64_UINT						= 110,
+	VK_FORMAT_R64_SINT						= 111,
+	VK_FORMAT_R64_SFLOAT					= 112,
+	VK_FORMAT_R64G64_UINT					= 113,
+	VK_FORMAT_R64G64_SINT					= 114,
+	VK_FORMAT_R64G64_SFLOAT					= 115,
+	VK_FORMAT_R64G64B64_UINT				= 116,
+	VK_FORMAT_R64G64B64_SINT				= 117,
+	VK_FORMAT_R64G64B64_SFLOAT				= 118,
+	VK_FORMAT_R64G64B64A64_UINT				= 119,
+	VK_FORMAT_R64G64B64A64_SINT				= 120,
+	VK_FORMAT_R64G64B64A64_SFLOAT			= 121,
+	VK_FORMAT_B10G11R11_UFLOAT_PACK32		= 122,
+	VK_FORMAT_E5B9G9R9_UFLOAT_PACK32		= 123,
+	VK_FORMAT_D16_UNORM						= 124,
+	VK_FORMAT_X8_D24_UNORM_PACK32			= 125,
+	VK_FORMAT_D32_SFLOAT					= 126,
+	VK_FORMAT_S8_UINT						= 127,
+	VK_FORMAT_D16_UNORM_S8_UINT				= 128,
+	VK_FORMAT_D24_UNORM_S8_UINT				= 129,
+	VK_FORMAT_D32_SFLOAT_S8_UINT			= 130,
+	VK_FORMAT_BC1_RGB_UNORM_BLOCK			= 131,
+	VK_FORMAT_BC1_RGB_SRGB_BLOCK			= 132,
+	VK_FORMAT_BC1_RGBA_UNORM_BLOCK			= 133,
+	VK_FORMAT_BC1_RGBA_SRGB_BLOCK			= 134,
+	VK_FORMAT_BC2_UNORM_BLOCK				= 135,
+	VK_FORMAT_BC2_SRGB_BLOCK				= 136,
+	VK_FORMAT_BC3_UNORM_BLOCK				= 137,
+	VK_FORMAT_BC3_SRGB_BLOCK				= 138,
+	VK_FORMAT_BC4_UNORM_BLOCK				= 139,
+	VK_FORMAT_BC4_SNORM_BLOCK				= 140,
+	VK_FORMAT_BC5_UNORM_BLOCK				= 141,
+	VK_FORMAT_BC5_SNORM_BLOCK				= 142,
+	VK_FORMAT_BC6H_UFLOAT_BLOCK				= 143,
+	VK_FORMAT_BC6H_SFLOAT_BLOCK				= 144,
+	VK_FORMAT_BC7_UNORM_BLOCK				= 145,
+	VK_FORMAT_BC7_SRGB_BLOCK				= 146,
+	VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK		= 147,
+	VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK		= 148,
+	VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK		= 149,
+	VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK		= 150,
+	VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK		= 151,
+	VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK		= 152,
+	VK_FORMAT_EAC_R11_UNORM_BLOCK			= 153,
+	VK_FORMAT_EAC_R11_SNORM_BLOCK			= 154,
+	VK_FORMAT_EAC_R11G11_UNORM_BLOCK		= 155,
+	VK_FORMAT_EAC_R11G11_SNORM_BLOCK		= 156,
+	VK_FORMAT_ASTC_4x4_UNORM_BLOCK			= 157,
+	VK_FORMAT_ASTC_4x4_SRGB_BLOCK			= 158,
+	VK_FORMAT_ASTC_5x4_UNORM_BLOCK			= 159,
+	VK_FORMAT_ASTC_5x4_SRGB_BLOCK			= 160,
+	VK_FORMAT_ASTC_5x5_UNORM_BLOCK			= 161,
+	VK_FORMAT_ASTC_5x5_SRGB_BLOCK			= 162,
+	VK_FORMAT_ASTC_6x5_UNORM_BLOCK			= 163,
+	VK_FORMAT_ASTC_6x5_SRGB_BLOCK			= 164,
+	VK_FORMAT_ASTC_6x6_UNORM_BLOCK			= 165,
+	VK_FORMAT_ASTC_6x6_SRGB_BLOCK			= 166,
+	VK_FORMAT_ASTC_8x5_UNORM_BLOCK			= 167,
+	VK_FORMAT_ASTC_8x5_SRGB_BLOCK			= 168,
+	VK_FORMAT_ASTC_8x6_UNORM_BLOCK			= 169,
+	VK_FORMAT_ASTC_8x6_SRGB_BLOCK			= 170,
+	VK_FORMAT_ASTC_8x8_UNORM_BLOCK			= 171,
+	VK_FORMAT_ASTC_8x8_SRGB_BLOCK			= 172,
+	VK_FORMAT_ASTC_10x5_UNORM_BLOCK			= 173,
+	VK_FORMAT_ASTC_10x5_SRGB_BLOCK			= 174,
+	VK_FORMAT_ASTC_10x6_UNORM_BLOCK			= 175,
+	VK_FORMAT_ASTC_10x6_SRGB_BLOCK			= 176,
+	VK_FORMAT_ASTC_10x8_UNORM_BLOCK			= 177,
+	VK_FORMAT_ASTC_10x8_SRGB_BLOCK			= 178,
+	VK_FORMAT_ASTC_10x10_UNORM_BLOCK		= 179,
+	VK_FORMAT_ASTC_10x10_SRGB_BLOCK			= 180,
+	VK_FORMAT_ASTC_12x10_UNORM_BLOCK		= 181,
+	VK_FORMAT_ASTC_12x10_SRGB_BLOCK			= 182,
+	VK_FORMAT_ASTC_12x12_UNORM_BLOCK		= 183,
+	VK_FORMAT_ASTC_12x12_SRGB_BLOCK			= 184,
+
+	VK_FORMAT_LAST
+};
+
+enum VkImageType
+{
+	VK_IMAGE_TYPE_1D	= 0,
+	VK_IMAGE_TYPE_2D	= 1,
+	VK_IMAGE_TYPE_3D	= 2,
+
+	VK_IMAGE_TYPE_LAST
+};
+
+enum VkImageTiling
+{
+	VK_IMAGE_TILING_OPTIMAL	= 0,
+	VK_IMAGE_TILING_LINEAR	= 1,
+
+	VK_IMAGE_TILING_LAST
+};
+
+enum VkPhysicalDeviceType
+{
+	VK_PHYSICAL_DEVICE_TYPE_OTHER			= 0,
+	VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU	= 1,
+	VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU	= 2,
+	VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU		= 3,
+	VK_PHYSICAL_DEVICE_TYPE_CPU				= 4,
+
+	VK_PHYSICAL_DEVICE_TYPE_LAST
+};
+
+enum VkQueryType
+{
+	VK_QUERY_TYPE_OCCLUSION				= 0,
+	VK_QUERY_TYPE_PIPELINE_STATISTICS	= 1,
+	VK_QUERY_TYPE_TIMESTAMP				= 2,
+
+	VK_QUERY_TYPE_LAST
+};
+
+enum VkSharingMode
+{
+	VK_SHARING_MODE_EXCLUSIVE	= 0,
+	VK_SHARING_MODE_CONCURRENT	= 1,
+
+	VK_SHARING_MODE_LAST
+};
+
+enum VkImageLayout
+{
+	VK_IMAGE_LAYOUT_UNDEFINED							= 0,
+	VK_IMAGE_LAYOUT_GENERAL								= 1,
+	VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			= 2,
+	VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL	= 3,
+	VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL		= 4,
+	VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL			= 5,
+	VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL				= 6,
+	VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL				= 7,
+	VK_IMAGE_LAYOUT_PREINITIALIZED						= 8,
+	VK_IMAGE_LAYOUT_PRESENT_SRC_KHR						= 1000001002,
+};
+
+enum VkImageViewType
+{
+	VK_IMAGE_VIEW_TYPE_1D			= 0,
+	VK_IMAGE_VIEW_TYPE_2D			= 1,
+	VK_IMAGE_VIEW_TYPE_3D			= 2,
+	VK_IMAGE_VIEW_TYPE_CUBE			= 3,
+	VK_IMAGE_VIEW_TYPE_1D_ARRAY		= 4,
+	VK_IMAGE_VIEW_TYPE_2D_ARRAY		= 5,
+	VK_IMAGE_VIEW_TYPE_CUBE_ARRAY	= 6,
+
+	VK_IMAGE_VIEW_TYPE_LAST
+};
+
+enum VkComponentSwizzle
+{
+	VK_COMPONENT_SWIZZLE_IDENTITY	= 0,
+	VK_COMPONENT_SWIZZLE_ZERO		= 1,
+	VK_COMPONENT_SWIZZLE_ONE		= 2,
+	VK_COMPONENT_SWIZZLE_R			= 3,
+	VK_COMPONENT_SWIZZLE_G			= 4,
+	VK_COMPONENT_SWIZZLE_B			= 5,
+	VK_COMPONENT_SWIZZLE_A			= 6,
+
+	VK_COMPONENT_SWIZZLE_LAST
+};
+
+enum VkVertexInputRate
+{
+	VK_VERTEX_INPUT_RATE_VERTEX		= 0,
+	VK_VERTEX_INPUT_RATE_INSTANCE	= 1,
+
+	VK_VERTEX_INPUT_RATE_LAST
+};
+
+enum VkPrimitiveTopology
+{
+	VK_PRIMITIVE_TOPOLOGY_POINT_LIST					= 0,
+	VK_PRIMITIVE_TOPOLOGY_LINE_LIST						= 1,
+	VK_PRIMITIVE_TOPOLOGY_LINE_STRIP					= 2,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST					= 3,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP				= 4,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN					= 5,
+	VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY		= 6,
+	VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY		= 7,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY	= 8,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY	= 9,
+	VK_PRIMITIVE_TOPOLOGY_PATCH_LIST					= 10,
+
+	VK_PRIMITIVE_TOPOLOGY_LAST
+};
+
+enum VkPolygonMode
+{
+	VK_POLYGON_MODE_FILL	= 0,
+	VK_POLYGON_MODE_LINE	= 1,
+	VK_POLYGON_MODE_POINT	= 2,
+
+	VK_POLYGON_MODE_LAST
+};
+
+enum VkFrontFace
+{
+	VK_FRONT_FACE_COUNTER_CLOCKWISE	= 0,
+	VK_FRONT_FACE_CLOCKWISE			= 1,
+
+	VK_FRONT_FACE_LAST
+};
+
+enum VkCompareOp
+{
+	VK_COMPARE_OP_NEVER				= 0,
+	VK_COMPARE_OP_LESS				= 1,
+	VK_COMPARE_OP_EQUAL				= 2,
+	VK_COMPARE_OP_LESS_OR_EQUAL		= 3,
+	VK_COMPARE_OP_GREATER			= 4,
+	VK_COMPARE_OP_NOT_EQUAL			= 5,
+	VK_COMPARE_OP_GREATER_OR_EQUAL	= 6,
+	VK_COMPARE_OP_ALWAYS			= 7,
+
+	VK_COMPARE_OP_LAST
+};
+
+enum VkStencilOp
+{
+	VK_STENCIL_OP_KEEP					= 0,
+	VK_STENCIL_OP_ZERO					= 1,
+	VK_STENCIL_OP_REPLACE				= 2,
+	VK_STENCIL_OP_INCREMENT_AND_CLAMP	= 3,
+	VK_STENCIL_OP_DECREMENT_AND_CLAMP	= 4,
+	VK_STENCIL_OP_INVERT				= 5,
+	VK_STENCIL_OP_INCREMENT_AND_WRAP	= 6,
+	VK_STENCIL_OP_DECREMENT_AND_WRAP	= 7,
+
+	VK_STENCIL_OP_LAST
+};
+
+enum VkLogicOp
+{
+	VK_LOGIC_OP_CLEAR			= 0,
+	VK_LOGIC_OP_AND				= 1,
+	VK_LOGIC_OP_AND_REVERSE		= 2,
+	VK_LOGIC_OP_COPY			= 3,
+	VK_LOGIC_OP_AND_INVERTED	= 4,
+	VK_LOGIC_OP_NO_OP			= 5,
+	VK_LOGIC_OP_XOR				= 6,
+	VK_LOGIC_OP_OR				= 7,
+	VK_LOGIC_OP_NOR				= 8,
+	VK_LOGIC_OP_EQUIVALENT		= 9,
+	VK_LOGIC_OP_INVERT			= 10,
+	VK_LOGIC_OP_OR_REVERSE		= 11,
+	VK_LOGIC_OP_COPY_INVERTED	= 12,
+	VK_LOGIC_OP_OR_INVERTED		= 13,
+	VK_LOGIC_OP_NAND			= 14,
+	VK_LOGIC_OP_SET				= 15,
+
+	VK_LOGIC_OP_LAST
+};
+
+enum VkBlendFactor
+{
+	VK_BLEND_FACTOR_ZERO						= 0,
+	VK_BLEND_FACTOR_ONE							= 1,
+	VK_BLEND_FACTOR_SRC_COLOR					= 2,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR			= 3,
+	VK_BLEND_FACTOR_DST_COLOR					= 4,
+	VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR			= 5,
+	VK_BLEND_FACTOR_SRC_ALPHA					= 6,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA			= 7,
+	VK_BLEND_FACTOR_DST_ALPHA					= 8,
+	VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA			= 9,
+	VK_BLEND_FACTOR_CONSTANT_COLOR				= 10,
+	VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR	= 11,
+	VK_BLEND_FACTOR_CONSTANT_ALPHA				= 12,
+	VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA	= 13,
+	VK_BLEND_FACTOR_SRC_ALPHA_SATURATE			= 14,
+	VK_BLEND_FACTOR_SRC1_COLOR					= 15,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR		= 16,
+	VK_BLEND_FACTOR_SRC1_ALPHA					= 17,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA		= 18,
+
+	VK_BLEND_FACTOR_LAST
+};
+
+enum VkBlendOp
+{
+	VK_BLEND_OP_ADD					= 0,
+	VK_BLEND_OP_SUBTRACT			= 1,
+	VK_BLEND_OP_REVERSE_SUBTRACT	= 2,
+	VK_BLEND_OP_MIN					= 3,
+	VK_BLEND_OP_MAX					= 4,
+
+	VK_BLEND_OP_LAST
+};
+
+enum VkDynamicState
+{
+	VK_DYNAMIC_STATE_VIEWPORT				= 0,
+	VK_DYNAMIC_STATE_SCISSOR				= 1,
+	VK_DYNAMIC_STATE_LINE_WIDTH				= 2,
+	VK_DYNAMIC_STATE_DEPTH_BIAS				= 3,
+	VK_DYNAMIC_STATE_BLEND_CONSTANTS		= 4,
+	VK_DYNAMIC_STATE_DEPTH_BOUNDS			= 5,
+	VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK	= 6,
+	VK_DYNAMIC_STATE_STENCIL_WRITE_MASK		= 7,
+	VK_DYNAMIC_STATE_STENCIL_REFERENCE		= 8,
+
+	VK_DYNAMIC_STATE_LAST
+};
+
+enum VkFilter
+{
+	VK_FILTER_NEAREST	= 0,
+	VK_FILTER_LINEAR	= 1,
+
+	VK_FILTER_LAST
+};
+
+enum VkSamplerMipmapMode
+{
+	VK_SAMPLER_MIPMAP_MODE_NEAREST	= 0,
+	VK_SAMPLER_MIPMAP_MODE_LINEAR	= 1,
+
+	VK_SAMPLER_MIPMAP_MODE_LAST
+};
+
+enum VkSamplerAddressMode
+{
+	VK_SAMPLER_ADDRESS_MODE_REPEAT					= 0,
+	VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT			= 1,
+	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE			= 2,
+	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER			= 3,
+	VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE	= 4,
+
+	VK_SAMPLER_ADDRESS_MODE_LAST
+};
+
+enum VkBorderColor
+{
+	VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK	= 0,
+	VK_BORDER_COLOR_INT_TRANSPARENT_BLACK	= 1,
+	VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK		= 2,
+	VK_BORDER_COLOR_INT_OPAQUE_BLACK		= 3,
+	VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE		= 4,
+	VK_BORDER_COLOR_INT_OPAQUE_WHITE		= 5,
+
+	VK_BORDER_COLOR_LAST
+};
+
+enum VkDescriptorType
+{
+	VK_DESCRIPTOR_TYPE_SAMPLER					= 0,
+	VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER	= 1,
+	VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE			= 2,
+	VK_DESCRIPTOR_TYPE_STORAGE_IMAGE			= 3,
+	VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER		= 4,
+	VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER		= 5,
+	VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER			= 6,
+	VK_DESCRIPTOR_TYPE_STORAGE_BUFFER			= 7,
+	VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC	= 8,
+	VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC	= 9,
+	VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT			= 10,
+
+	VK_DESCRIPTOR_TYPE_LAST
+};
+
+enum VkAttachmentLoadOp
+{
+	VK_ATTACHMENT_LOAD_OP_LOAD		= 0,
+	VK_ATTACHMENT_LOAD_OP_CLEAR		= 1,
+	VK_ATTACHMENT_LOAD_OP_DONT_CARE	= 2,
+
+	VK_ATTACHMENT_LOAD_OP_LAST
+};
+
+enum VkAttachmentStoreOp
+{
+	VK_ATTACHMENT_STORE_OP_STORE		= 0,
+	VK_ATTACHMENT_STORE_OP_DONT_CARE	= 1,
+
+	VK_ATTACHMENT_STORE_OP_LAST
+};
+
+enum VkPipelineBindPoint
+{
+	VK_PIPELINE_BIND_POINT_GRAPHICS	= 0,
+	VK_PIPELINE_BIND_POINT_COMPUTE	= 1,
+
+	VK_PIPELINE_BIND_POINT_LAST
+};
+
+enum VkCommandBufferLevel
+{
+	VK_COMMAND_BUFFER_LEVEL_PRIMARY		= 0,
+	VK_COMMAND_BUFFER_LEVEL_SECONDARY	= 1,
+
+	VK_COMMAND_BUFFER_LEVEL_LAST
+};
+
+enum VkIndexType
+{
+	VK_INDEX_TYPE_UINT16	= 0,
+	VK_INDEX_TYPE_UINT32	= 1,
+
+	VK_INDEX_TYPE_LAST
+};
+
+enum VkSubpassContents
+{
+	VK_SUBPASS_CONTENTS_INLINE						= 0,
+	VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS	= 1,
+
+	VK_SUBPASS_CONTENTS_LAST
+};
+
+enum VkColorSpaceKHR
+{
+	VK_COLORSPACE_SRGB_NONLINEAR_KHR	= 0,
+
+	VK_COLOR_SPACE_K_H_R_LAST
+};
+
+enum VkPresentModeKHR
+{
+	VK_PRESENT_MODE_IMMEDIATE_KHR		= 0,
+	VK_PRESENT_MODE_MAILBOX_KHR			= 1,
+	VK_PRESENT_MODE_FIFO_KHR			= 2,
+	VK_PRESENT_MODE_FIFO_RELAXED_KHR	= 3,
+
+	VK_PRESENT_MODE_K_H_R_LAST
+};
+
+enum VkFormatFeatureFlagBits
+{
+	VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT					= 0x00000001,
+	VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT					= 0x00000002,
+	VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT			= 0x00000004,
+	VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT			= 0x00000008,
+	VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT			= 0x00000010,
+	VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT	= 0x00000020,
+	VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT					= 0x00000040,
+	VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT				= 0x00000080,
+	VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT		= 0x00000100,
+	VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT		= 0x00000200,
+	VK_FORMAT_FEATURE_BLIT_SRC_BIT						= 0x00000400,
+	VK_FORMAT_FEATURE_BLIT_DST_BIT						= 0x00000800,
+};
+typedef deUint32 VkFormatFeatureFlags;
+
+enum VkImageUsageFlagBits
+{
+	VK_IMAGE_USAGE_TRANSFER_SRC_BIT				= 0x00000001,
+	VK_IMAGE_USAGE_TRANSFER_DST_BIT				= 0x00000002,
+	VK_IMAGE_USAGE_SAMPLED_BIT					= 0x00000004,
+	VK_IMAGE_USAGE_STORAGE_BIT					= 0x00000008,
+	VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT			= 0x00000010,
+	VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT	= 0x00000020,
+	VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT		= 0x00000040,
+	VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT			= 0x00000080,
+};
+typedef deUint32 VkImageUsageFlags;
+
+enum VkImageCreateFlagBits
+{
+	VK_IMAGE_CREATE_SPARSE_BINDING_BIT		= 0x00000001,
+	VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT	= 0x00000002,
+	VK_IMAGE_CREATE_SPARSE_ALIASED_BIT		= 0x00000004,
+	VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT		= 0x00000008,
+	VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT		= 0x00000010,
+};
+typedef deUint32 VkImageCreateFlags;
+
+enum VkSampleCountFlagBits
+{
+	VK_SAMPLE_COUNT_1_BIT	= 0x00000001,
+	VK_SAMPLE_COUNT_2_BIT	= 0x00000002,
+	VK_SAMPLE_COUNT_4_BIT	= 0x00000004,
+	VK_SAMPLE_COUNT_8_BIT	= 0x00000008,
+	VK_SAMPLE_COUNT_16_BIT	= 0x00000010,
+	VK_SAMPLE_COUNT_32_BIT	= 0x00000020,
+	VK_SAMPLE_COUNT_64_BIT	= 0x00000040,
+};
+typedef deUint32 VkSampleCountFlags;
+
+enum VkQueueFlagBits
+{
+	VK_QUEUE_GRAPHICS_BIT		= 0x00000001,
+	VK_QUEUE_COMPUTE_BIT		= 0x00000002,
+	VK_QUEUE_TRANSFER_BIT		= 0x00000004,
+	VK_QUEUE_SPARSE_BINDING_BIT	= 0x00000008,
+};
+typedef deUint32 VkQueueFlags;
+
+enum VkMemoryPropertyFlagBits
+{
+	VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT		= 0x00000001,
+	VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT		= 0x00000002,
+	VK_MEMORY_PROPERTY_HOST_COHERENT_BIT	= 0x00000004,
+	VK_MEMORY_PROPERTY_HOST_CACHED_BIT		= 0x00000008,
+	VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT	= 0x00000010,
+};
+typedef deUint32 VkMemoryPropertyFlags;
+
+enum VkMemoryHeapFlagBits
+{
+	VK_MEMORY_HEAP_DEVICE_LOCAL_BIT	= 0x00000001,
+};
+typedef deUint32 VkMemoryHeapFlags;
+
+enum VkPipelineStageFlagBits
+{
+	VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT						= 0x00000001,
+	VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT						= 0x00000002,
+	VK_PIPELINE_STAGE_VERTEX_INPUT_BIT						= 0x00000004,
+	VK_PIPELINE_STAGE_VERTEX_SHADER_BIT						= 0x00000008,
+	VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT		= 0x00000010,
+	VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT	= 0x00000020,
+	VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT					= 0x00000040,
+	VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT					= 0x00000080,
+	VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT				= 0x00000100,
+	VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT				= 0x00000200,
+	VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT			= 0x00000400,
+	VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT					= 0x00000800,
+	VK_PIPELINE_STAGE_TRANSFER_BIT							= 0x00001000,
+	VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT					= 0x00002000,
+	VK_PIPELINE_STAGE_HOST_BIT								= 0x00004000,
+	VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT						= 0x00008000,
+	VK_PIPELINE_STAGE_ALL_COMMANDS_BIT						= 0x00010000,
+};
+typedef deUint32 VkPipelineStageFlags;
+
+enum VkImageAspectFlagBits
+{
+	VK_IMAGE_ASPECT_COLOR_BIT		= 0x00000001,
+	VK_IMAGE_ASPECT_DEPTH_BIT		= 0x00000002,
+	VK_IMAGE_ASPECT_STENCIL_BIT		= 0x00000004,
+	VK_IMAGE_ASPECT_METADATA_BIT	= 0x00000008,
+};
+typedef deUint32 VkImageAspectFlags;
+
+enum VkSparseImageFormatFlagBits
+{
+	VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT			= 0x00000001,
+	VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT			= 0x00000002,
+	VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT	= 0x00000004,
+};
+typedef deUint32 VkSparseImageFormatFlags;
+
+enum VkSparseMemoryBindFlagBits
+{
+	VK_SPARSE_MEMORY_BIND_METADATA_BIT	= 0x00000001,
+};
+typedef deUint32 VkSparseMemoryBindFlags;
+
+enum VkFenceCreateFlagBits
+{
+	VK_FENCE_CREATE_SIGNALED_BIT	= 0x00000001,
+};
+typedef deUint32 VkFenceCreateFlags;
+
+enum VkQueryPipelineStatisticFlagBits
+{
+	VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT						= 0x00000001,
+	VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT					= 0x00000002,
+	VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT					= 0x00000004,
+	VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT					= 0x00000008,
+	VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT					= 0x00000010,
+	VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT						= 0x00000020,
+	VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT							= 0x00000040,
+	VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT					= 0x00000080,
+	VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT			= 0x00000100,
+	VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT	= 0x00000200,
+	VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT					= 0x00000400,
+};
+typedef deUint32 VkQueryPipelineStatisticFlags;
+
+enum VkQueryResultFlagBits
+{
+	VK_QUERY_RESULT_64_BIT					= 0x00000001,
+	VK_QUERY_RESULT_WAIT_BIT				= 0x00000002,
+	VK_QUERY_RESULT_WITH_AVAILABILITY_BIT	= 0x00000004,
+	VK_QUERY_RESULT_PARTIAL_BIT				= 0x00000008,
+};
+typedef deUint32 VkQueryResultFlags;
+
+enum VkBufferCreateFlagBits
+{
+	VK_BUFFER_CREATE_SPARSE_BINDING_BIT		= 0x00000001,
+	VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT	= 0x00000002,
+	VK_BUFFER_CREATE_SPARSE_ALIASED_BIT		= 0x00000004,
+};
+typedef deUint32 VkBufferCreateFlags;
+
+enum VkBufferUsageFlagBits
+{
+	VK_BUFFER_USAGE_TRANSFER_SRC_BIT			= 0x00000001,
+	VK_BUFFER_USAGE_TRANSFER_DST_BIT			= 0x00000002,
+	VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT	= 0x00000004,
+	VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT	= 0x00000008,
+	VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT			= 0x00000010,
+	VK_BUFFER_USAGE_STORAGE_BUFFER_BIT			= 0x00000020,
+	VK_BUFFER_USAGE_INDEX_BUFFER_BIT			= 0x00000040,
+	VK_BUFFER_USAGE_VERTEX_BUFFER_BIT			= 0x00000080,
+	VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT			= 0x00000100,
+};
+typedef deUint32 VkBufferUsageFlags;
+
+enum VkPipelineCreateFlagBits
+{
+	VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT	= 0x00000001,
+	VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT	= 0x00000002,
+	VK_PIPELINE_CREATE_DERIVATIVE_BIT			= 0x00000004,
+};
+typedef deUint32 VkPipelineCreateFlags;
+
+enum VkShaderStageFlagBits
+{
+	VK_SHADER_STAGE_VERTEX_BIT					= 0x00000001,
+	VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT	= 0x00000002,
+	VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT	= 0x00000004,
+	VK_SHADER_STAGE_GEOMETRY_BIT				= 0x00000008,
+	VK_SHADER_STAGE_FRAGMENT_BIT				= 0x00000010,
+	VK_SHADER_STAGE_COMPUTE_BIT					= 0x00000020,
+	VK_SHADER_STAGE_ALL_GRAPHICS				= 0x1F,
+	VK_SHADER_STAGE_ALL							= 0x7FFFFFFF,
+};
+typedef deUint32 VkShaderStageFlags;
+
+enum VkCullModeFlagBits
+{
+	VK_CULL_MODE_NONE			= 0,
+	VK_CULL_MODE_FRONT_BIT		= 0x00000001,
+	VK_CULL_MODE_BACK_BIT		= 0x00000002,
+	VK_CULL_MODE_FRONT_AND_BACK	= 0x3,
+};
+typedef deUint32 VkCullModeFlags;
+
+enum VkColorComponentFlagBits
+{
+	VK_COLOR_COMPONENT_R_BIT	= 0x00000001,
+	VK_COLOR_COMPONENT_G_BIT	= 0x00000002,
+	VK_COLOR_COMPONENT_B_BIT	= 0x00000004,
+	VK_COLOR_COMPONENT_A_BIT	= 0x00000008,
+};
+typedef deUint32 VkColorComponentFlags;
+
+enum VkDescriptorPoolCreateFlagBits
+{
+	VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT	= 0x00000001,
+};
+typedef deUint32 VkDescriptorPoolCreateFlags;
+
+enum VkAttachmentDescriptionFlagBits
+{
+	VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT	= 0x00000001,
+};
+typedef deUint32 VkAttachmentDescriptionFlags;
+
+enum VkAccessFlagBits
+{
+	VK_ACCESS_INDIRECT_COMMAND_READ_BIT				= 0x00000001,
+	VK_ACCESS_INDEX_READ_BIT						= 0x00000002,
+	VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT				= 0x00000004,
+	VK_ACCESS_UNIFORM_READ_BIT						= 0x00000008,
+	VK_ACCESS_INPUT_ATTACHMENT_READ_BIT				= 0x00000010,
+	VK_ACCESS_SHADER_READ_BIT						= 0x00000020,
+	VK_ACCESS_SHADER_WRITE_BIT						= 0x00000040,
+	VK_ACCESS_COLOR_ATTACHMENT_READ_BIT				= 0x00000080,
+	VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT			= 0x00000100,
+	VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT		= 0x00000200,
+	VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT	= 0x00000400,
+	VK_ACCESS_TRANSFER_READ_BIT						= 0x00000800,
+	VK_ACCESS_TRANSFER_WRITE_BIT					= 0x00001000,
+	VK_ACCESS_HOST_READ_BIT							= 0x00002000,
+	VK_ACCESS_HOST_WRITE_BIT						= 0x00004000,
+	VK_ACCESS_MEMORY_READ_BIT						= 0x00008000,
+	VK_ACCESS_MEMORY_WRITE_BIT						= 0x00010000,
+};
+typedef deUint32 VkAccessFlags;
+
+enum VkDependencyFlagBits
+{
+	VK_DEPENDENCY_BY_REGION_BIT	= 0x00000001,
+};
+typedef deUint32 VkDependencyFlags;
+
+enum VkCommandPoolCreateFlagBits
+{
+	VK_COMMAND_POOL_CREATE_TRANSIENT_BIT			= 0x00000001,
+	VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT	= 0x00000002,
+};
+typedef deUint32 VkCommandPoolCreateFlags;
+
+enum VkCommandPoolResetFlagBits
+{
+	VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT	= 0x00000001,
+};
+typedef deUint32 VkCommandPoolResetFlags;
+
+enum VkCommandBufferUsageFlagBits
+{
+	VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT			= 0x00000001,
+	VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT	= 0x00000002,
+	VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT		= 0x00000004,
+};
+typedef deUint32 VkCommandBufferUsageFlags;
+
+enum VkQueryControlFlagBits
+{
+	VK_QUERY_CONTROL_PRECISE_BIT	= 0x00000001,
+};
+typedef deUint32 VkQueryControlFlags;
+
+enum VkCommandBufferResetFlagBits
+{
+	VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT	= 0x00000001,
+};
+typedef deUint32 VkCommandBufferResetFlags;
+
+enum VkStencilFaceFlagBits
+{
+	VK_STENCIL_FACE_FRONT_BIT	= 0x00000001,
+	VK_STENCIL_FACE_BACK_BIT	= 0x00000002,
+	VK_STENCIL_FRONT_AND_BACK	= 0x3,
+};
+typedef deUint32 VkStencilFaceFlags;
+
+enum VkSurfaceTransformFlagBitsKHR
+{
+	VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR						= 0x00000001,
+	VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR						= 0x00000002,
+	VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR						= 0x00000004,
+	VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR						= 0x00000008,
+	VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR				= 0x00000010,
+	VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR	= 0x00000020,
+	VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR	= 0x00000040,
+	VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR	= 0x00000080,
+	VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR						= 0x00000100,
+};
+typedef deUint32 VkSurfaceTransformFlagsKHR;
+
+enum VkCompositeAlphaFlagBitsKHR
+{
+	VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR			= 0x00000001,
+	VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR	= 0x00000002,
+	VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR	= 0x00000004,
+	VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR			= 0x00000008,
+};
+typedef deUint32 VkCompositeAlphaFlagsKHR;
+
+enum VkDisplayPlaneAlphaFlagBitsKHR
+{
+	VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR					= 0x00000001,
+	VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR					= 0x00000002,
+	VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR				= 0x00000004,
+	VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR	= 0x00000008,
+};
+typedef deUint32 VkDisplayPlaneAlphaFlagsKHR;
+
+typedef deUint32 VkInstanceCreateFlags;
+
+typedef deUint32 VkDeviceCreateFlags;
+
+typedef deUint32 VkDeviceQueueCreateFlags;
+
+typedef deUint32 VkMemoryMapFlags;
+
+typedef deUint32 VkSemaphoreCreateFlags;
+
+typedef deUint32 VkEventCreateFlags;
+
+typedef deUint32 VkQueryPoolCreateFlags;
+
+typedef deUint32 VkBufferViewCreateFlags;
+
+typedef deUint32 VkImageViewCreateFlags;
+
+typedef deUint32 VkShaderModuleCreateFlags;
+
+typedef deUint32 VkPipelineCacheCreateFlags;
+
+typedef deUint32 VkPipelineShaderStageCreateFlags;
+
+typedef deUint32 VkPipelineVertexInputStateCreateFlags;
+
+typedef deUint32 VkPipelineInputAssemblyStateCreateFlags;
+
+typedef deUint32 VkPipelineTessellationStateCreateFlags;
+
+typedef deUint32 VkPipelineViewportStateCreateFlags;
+
+typedef deUint32 VkPipelineRasterizationStateCreateFlags;
+
+typedef deUint32 VkPipelineMultisampleStateCreateFlags;
+
+typedef deUint32 VkPipelineDepthStencilStateCreateFlags;
+
+typedef deUint32 VkPipelineColorBlendStateCreateFlags;
+
+typedef deUint32 VkPipelineDynamicStateCreateFlags;
+
+typedef deUint32 VkPipelineLayoutCreateFlags;
+
+typedef deUint32 VkSamplerCreateFlags;
+
+typedef deUint32 VkDescriptorSetLayoutCreateFlags;
+
+typedef deUint32 VkDescriptorPoolResetFlags;
+
+typedef deUint32 VkFramebufferCreateFlags;
+
+typedef deUint32 VkRenderPassCreateFlags;
+
+typedef deUint32 VkSubpassDescriptionFlags;
+
+typedef deUint32 VkSwapchainCreateFlagsKHR;
+
+typedef deUint32 VkDisplayModeCreateFlagsKHR;
+
+typedef deUint32 VkDisplaySurfaceCreateFlagsKHR;
+
+typedef deUint32 VkXlibSurfaceCreateFlagsKHR;
+
+typedef deUint32 VkXcbSurfaceCreateFlagsKHR;
+
+typedef deUint32 VkWaylandSurfaceCreateFlagsKHR;
+
+typedef deUint32 VkMirSurfaceCreateFlagsKHR;
+
+typedef deUint32 VkAndroidSurfaceCreateFlagsKHR;
+
+typedef deUint32 VkWin32SurfaceCreateFlagsKHR;
+
diff --git a/external/vulkancts/framework/vulkan/vkBinaryRegistry.cpp b/external/vulkancts/framework/vulkan/vkBinaryRegistry.cpp
new file mode 100644
index 0000000..99efa56
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkBinaryRegistry.cpp
@@ -0,0 +1,444 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Program binary registry.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkBinaryRegistry.hpp"
+#include "tcuResource.hpp"
+#include "tcuFormatUtil.hpp"
+#include "deFilePath.hpp"
+#include "deStringUtil.hpp"
+#include "deString.h"
+#include "deInt32.h"
+
+#include <sstream>
+#include <fstream>
+#include <stdexcept>
+#include <limits>
+
+namespace vk
+{
+namespace BinaryRegistryDetail
+{
+
+using std::string;
+using std::vector;
+
+namespace
+{
+
+string getProgramPath (const std::string& dirName, deUint32 index)
+{
+	return de::FilePath::join(dirName, de::toString(tcu::toHex(index)) + ".spv").getPath();
+}
+
+string getIndexPath (const std::string& dirName)
+{
+	return de::FilePath::join(dirName, "index.bin").getPath();
+}
+
+void writeBinary (const std::string& dstDir, deUint32 index, const ProgramBinary& binary)
+{
+	const de::FilePath	fullPath	= getProgramPath(dstDir, index);
+
+	if (!de::FilePath(fullPath.getDirName()).exists())
+		de::createDirectoryAndParents(fullPath.getDirName().c_str());
+
+	{
+		std::ofstream	out		(fullPath.getPath(), std::ios_base::binary);
+
+		if (!out.is_open() || !out.good())
+			throw tcu::Exception("Failed to open " + string(fullPath.getPath()));
+
+		out.write((const char*)binary.getBinary(), binary.getSize());
+		out.close();
+	}
+}
+
+deUint32 binaryHash (const ProgramBinary* binary)
+{
+	return deMemoryHash(binary->getBinary(), binary->getSize());
+}
+
+deBool binaryEqual (const ProgramBinary* a, const ProgramBinary* b)
+{
+	if (a->getSize() == b->getSize())
+		return deMemoryEqual(a->getBinary(), b->getBinary(), a->getSize());
+	else
+		return DE_FALSE;
+}
+
+std::vector<deUint32> getSearchPath (const ProgramIdentifier& id)
+{
+	const std::string	combinedStr		= id.testCasePath + '#' + id.programName;
+	const size_t		strLen			= combinedStr.size();
+	const size_t		numWords		= strLen/4 + 1;		// Must always end up with at least one 0 byte
+	vector<deUint32>	words			(numWords, 0u);
+
+	deMemcpy(&words[0], combinedStr.c_str(), strLen);
+
+	return words;
+}
+
+const deUint32* findBinaryIndex (BinaryIndexAccess* index, const ProgramIdentifier& id)
+{
+	const vector<deUint32>	words	= getSearchPath(id);
+	size_t					nodeNdx	= 0;
+	size_t					wordNdx	= 0;
+
+	for (;;)
+	{
+		const BinaryIndexNode&	curNode	= (*index)[nodeNdx];
+
+		if (curNode.word == words[wordNdx])
+		{
+			if (wordNdx+1 < words.size())
+			{
+				TCU_CHECK_INTERNAL((size_t)curNode.index < index->size());
+
+				nodeNdx  = curNode.index;
+				wordNdx	+= 1;
+			}
+			else if (wordNdx+1 == words.size())
+				return &curNode.index;
+			else
+				return DE_NULL;
+		}
+		else if (curNode.word != 0)
+		{
+			nodeNdx += 1;
+
+			// Index should always be null-terminated
+			TCU_CHECK_INTERNAL(nodeNdx < index->size());
+		}
+		else
+			return DE_NULL;
+	}
+
+	return DE_NULL;
+}
+
+//! Sparse index node used for final binary index construction
+struct SparseIndexNode
+{
+	deUint32						word;
+	deUint32						index;
+	std::vector<SparseIndexNode*>	children;
+
+	SparseIndexNode (deUint32 word_, deUint32 index_)
+		: word	(word_)
+		, index	(index_)
+	{}
+
+	SparseIndexNode (void)
+		: word	(0)
+		, index	(0)
+	{}
+
+	~SparseIndexNode (void)
+	{
+		for (size_t ndx = 0; ndx < children.size(); ndx++)
+			delete children[ndx];
+	}
+};
+
+#if defined(DE_DEBUG)
+bool isNullByteTerminated (deUint32 word)
+{
+	deUint8 bytes[4];
+	deMemcpy(bytes, &word, sizeof(word));
+	return bytes[3] == 0;
+}
+#endif
+
+void addToSparseIndex (SparseIndexNode* group, const deUint32* words, size_t numWords, deUint32 index)
+{
+	const deUint32		curWord	= words[0];
+	SparseIndexNode*	child	= DE_NULL;
+
+	for (size_t childNdx = 0; childNdx < group->children.size(); childNdx++)
+	{
+		if (group->children[childNdx]->word == curWord)
+		{
+			child = group->children[childNdx];
+			break;
+		}
+	}
+
+	DE_ASSERT(numWords > 1 || !child);
+
+	if (!child)
+	{
+		group->children.reserve(group->children.size()+1);
+		group->children.push_back(new SparseIndexNode(curWord, numWords == 1 ? index : 0));
+
+		child = group->children.back();
+	}
+
+	if (numWords > 1)
+		addToSparseIndex(child, words+1, numWords-1, index);
+	else
+		DE_ASSERT(isNullByteTerminated(curWord));
+}
+
+// Prepares sparse index for finalization. Ensures that child with word = 0 is moved
+// to the end, or one is added if there is no such child already.
+void normalizeSparseIndex (SparseIndexNode* group)
+{
+	int		zeroChildPos	= -1;
+
+	for (size_t childNdx = 0; childNdx < group->children.size(); childNdx++)
+	{
+		normalizeSparseIndex(group->children[childNdx]);
+
+		if (group->children[childNdx]->word == 0)
+		{
+			DE_ASSERT(zeroChildPos < 0);
+			zeroChildPos = (int)childNdx;
+		}
+	}
+
+	if (zeroChildPos >= 0)
+	{
+		// Move child with word = 0 to last
+		while (zeroChildPos != (int)group->children.size()-1)
+		{
+			std::swap(group->children[zeroChildPos], group->children[zeroChildPos+1]);
+			zeroChildPos += 1;
+		}
+	}
+	else if (!group->children.empty())
+	{
+		group->children.reserve(group->children.size()+1);
+		group->children.push_back(new SparseIndexNode(0, 0));
+	}
+}
+
+deUint32 getIndexSize (const SparseIndexNode* group)
+{
+	size_t	numNodes	= group->children.size();
+
+	for (size_t childNdx = 0; childNdx < group->children.size(); childNdx++)
+		numNodes += getIndexSize(group->children[childNdx]);
+
+	DE_ASSERT(numNodes <= std::numeric_limits<deUint32>::max());
+
+	return (deUint32)numNodes;
+}
+
+deUint32 addAndCountNodes (BinaryIndexNode* index, deUint32 baseOffset, const SparseIndexNode* group)
+{
+	const deUint32	numLocalNodes	= (deUint32)group->children.size();
+	deUint32		curOffset		= numLocalNodes;
+
+	// Must be normalized prior to construction of final index
+	DE_ASSERT(group->children.empty() || group->children.back()->word == 0);
+
+	for (size_t childNdx = 0; childNdx < numLocalNodes; childNdx++)
+	{
+		const SparseIndexNode*	child		= group->children[childNdx];
+		const deUint32			subtreeSize	= addAndCountNodes(index+curOffset, baseOffset+curOffset, child);
+
+		index[childNdx].word = child->word;
+
+		if (subtreeSize == 0)
+			index[childNdx].index = child->index;
+		else
+		{
+			DE_ASSERT(child->index == 0);
+			index[childNdx].index = baseOffset+curOffset;
+		}
+
+		curOffset += subtreeSize;
+	}
+
+	return curOffset;
+}
+
+void buildFinalIndex (std::vector<BinaryIndexNode>* dst, const SparseIndexNode* root)
+{
+	const deUint32	indexSize	= getIndexSize(root);
+
+	DE_ASSERT(indexSize > 0);
+
+	dst->resize(indexSize);
+	addAndCountNodes(&(*dst)[0], 0, root);
+}
+
+} // anonymous
+
+// BinaryRegistryWriter
+
+DE_IMPLEMENT_POOL_HASH(BinaryHash, const ProgramBinary*, deUint32, binaryHash, binaryEqual);
+
+BinaryRegistryWriter::BinaryRegistryWriter (const std::string& dstPath)
+	: m_dstPath			(dstPath)
+	, m_binaryIndexMap	(DE_NULL)
+{
+	m_binaryIndexMap = BinaryHash_create(m_memPool.getRawPool());
+
+	if (!m_binaryIndexMap)
+		throw std::bad_alloc();
+}
+
+BinaryRegistryWriter::~BinaryRegistryWriter (void)
+{
+	for (BinaryVector::const_iterator binaryIter = m_compactedBinaries.begin();
+		 binaryIter != m_compactedBinaries.end();
+		 ++binaryIter)
+		delete *binaryIter;
+}
+
+void BinaryRegistryWriter::storeProgram (const ProgramIdentifier& id, const ProgramBinary& binary)
+{
+	const deUint32* const	indexPtr	= BinaryHash_find(m_binaryIndexMap, &binary);
+	deUint32				index		= indexPtr ? *indexPtr : ~0u;
+
+	DE_ASSERT(binary.getFormat() == vk::PROGRAM_FORMAT_SPIRV);
+
+	if (!indexPtr)
+	{
+		ProgramBinary* const	binaryClone		= new ProgramBinary(binary);
+
+		try
+		{
+			index = (deUint32)m_compactedBinaries.size();
+			m_compactedBinaries.push_back(binaryClone);
+		}
+		catch (...)
+		{
+			delete binaryClone;
+			throw;
+		}
+
+		writeBinary(m_dstPath, index, binary);
+
+		if (!BinaryHash_insert(m_binaryIndexMap, binaryClone, index))
+			throw std::bad_alloc();
+	}
+
+	DE_ASSERT((size_t)index < m_compactedBinaries.size());
+
+	m_binaryIndices.push_back(BinaryIndex(id, index));
+}
+
+void BinaryRegistryWriter::writeIndex (void) const
+{
+	const de::FilePath				indexPath	= getIndexPath(m_dstPath);
+	std::vector<BinaryIndexNode>	index;
+
+	{
+		de::UniquePtr<SparseIndexNode>	sparseIndex	(new SparseIndexNode());
+
+		for (size_t progNdx = 0; progNdx < m_binaryIndices.size(); progNdx++)
+		{
+			const std::vector<deUint32>	searchPath	= getSearchPath(m_binaryIndices[progNdx].id);
+			addToSparseIndex(sparseIndex.get(), &searchPath[0], searchPath.size(), m_binaryIndices[progNdx].index);
+		}
+
+		normalizeSparseIndex(sparseIndex.get());
+		buildFinalIndex(&index, sparseIndex.get());
+	}
+
+	// Even in empty index there is always terminating node for the root group
+	DE_ASSERT(!index.empty());
+
+	if (!de::FilePath(indexPath.getDirName()).exists())
+		de::createDirectoryAndParents(indexPath.getDirName().c_str());
+
+	{
+		std::ofstream indexOut(indexPath.getPath(), std::ios_base::binary);
+
+		if (!indexOut.is_open() || !indexOut.good())
+			throw tcu::InternalError(string("Failed to open program binary index file ") + indexPath.getPath());
+
+		indexOut.write((const char*)&index[0], index.size()*sizeof(BinaryIndexNode));
+	}
+}
+
+// BinaryRegistryReader
+
+BinaryRegistryReader::BinaryRegistryReader (const tcu::Archive& archive, const std::string& srcPath)
+	: m_archive	(archive)
+	, m_srcPath	(srcPath)
+{
+}
+
+BinaryRegistryReader::~BinaryRegistryReader (void)
+{
+}
+
+ProgramBinary* BinaryRegistryReader::loadProgram (const ProgramIdentifier& id) const
+{
+	if (!m_binaryIndex)
+	{
+		try
+		{
+			m_binaryIndex = BinaryIndexPtr(new BinaryIndexAccess(de::MovePtr<tcu::Resource>(m_archive.getResource(getIndexPath(m_srcPath).c_str()))));
+		}
+		catch (const tcu::ResourceError& e)
+		{
+			throw ProgramNotFoundException(id, string("Failed to open binary index (") + e.what() + ")");
+		}
+	}
+
+	{
+		const deUint32*	indexPos	= findBinaryIndex(m_binaryIndex.get(), id);
+
+		if (indexPos)
+		{
+			const string	fullPath	= getProgramPath(m_srcPath, *indexPos);
+
+			try
+			{
+				de::UniquePtr<tcu::Resource>	progRes		(m_archive.getResource(fullPath.c_str()));
+				const int						progSize	= progRes->getSize();
+				vector<deUint8>					bytes		(progSize);
+
+				TCU_CHECK_INTERNAL(!bytes.empty());
+
+				progRes->read(&bytes[0], progSize);
+
+				return new ProgramBinary(vk::PROGRAM_FORMAT_SPIRV, bytes.size(), &bytes[0]);
+			}
+			catch (const tcu::ResourceError& e)
+			{
+				throw ProgramNotFoundException(id, e.what());
+			}
+		}
+		else
+			throw ProgramNotFoundException(id, "Program not found in index");
+	}
+}
+
+} // BinaryRegistryDetail
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkBinaryRegistry.hpp b/external/vulkancts/framework/vulkan/vkBinaryRegistry.hpp
new file mode 100644
index 0000000..714e6f6
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkBinaryRegistry.hpp
@@ -0,0 +1,245 @@
+#ifndef _VKBINARYREGISTRY_HPP
+#define _VKBINARYREGISTRY_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Program binary registry.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkPrograms.hpp"
+#include "tcuResource.hpp"
+#include "deMemPool.hpp"
+#include "dePoolHash.h"
+#include "deUniquePtr.hpp"
+
+#include <map>
+#include <vector>
+#include <stdexcept>
+
+namespace vk
+{
+namespace BinaryRegistryDetail
+{
+
+struct ProgramIdentifier
+{
+	std::string		testCasePath;
+	std::string		programName;
+
+	ProgramIdentifier (const std::string& testCasePath_, const std::string& programName_)
+		: testCasePath	(testCasePath_)
+		, programName	(programName_)
+	{
+	}
+};
+
+inline bool operator< (const ProgramIdentifier& a, const ProgramIdentifier& b)
+{
+	return (a.testCasePath < b.testCasePath) || ((a.testCasePath == b.testCasePath) && (a.programName < b.programName));
+}
+
+class ProgramNotFoundException : public tcu::ResourceError
+{
+public:
+	ProgramNotFoundException (const ProgramIdentifier& id, const std::string& reason)
+		: tcu::ResourceError("Program " + id.testCasePath + " / '" + id.programName + "' not found: " + reason)
+	{
+	}
+};
+
+// Program Binary Index
+// --------------------
+//
+// When SPIR-V binaries are stored on disk, duplicate binaries are eliminated
+// to save a significant amount of space. Many tests use identical binaries and
+// just storing each compiled binary without de-duplication would be incredibly
+// wasteful.
+//
+// To locate binary that corresponds given ProgramIdentifier, a program binary
+// index is needed. Since that index is accessed every time a test requests shader
+// binary, it must be fast to load (to reduce statup cost), and fast to access.
+//
+// Simple trie is used to store binary indices. It is laid out as an array of
+// BinaryIndexNodes. Nodes store 4-byte pieces (words) of search string, rather
+// than just a single character. This gives more regular memory layout in exchange
+// of a little wasted storage.
+//
+// Search strings are created by splitting original string into 4-byte words and
+// appending one or more terminating 0 bytes.
+//
+// For each node where word doesn't have trailing 0 bytes (not terminated), the
+// index points into a offset of its child list. Children for each node are stored
+// consecutively, and the list is terminated by child with word = 0.
+//
+// If word contains one or more trailing 0 bytes, index denotes the binary index
+// instead of index of the child list.
+
+struct BinaryIndexNode
+{
+	deUint32	word;		//!< 4 bytes of search string.
+	deUint32	index;		//!< Binary index if word ends with 0 bytes, or index of first child node otherwise.
+};
+
+template<typename Element>
+class LazyResource
+{
+public:
+									LazyResource		(de::MovePtr<tcu::Resource> resource);
+
+	const Element&					operator[]			(size_t ndx);
+	size_t							size				(void) const { return m_elements.size();	}
+
+private:
+	enum
+	{
+		ELEMENTS_PER_PAGE_LOG2	= 10
+	};
+
+	inline size_t					getPageForElement	(size_t elemNdx) const { return elemNdx >> ELEMENTS_PER_PAGE_LOG2;	}
+	inline bool						isPageResident		(size_t pageNdx) const { return m_isPageResident[pageNdx];			}
+
+	void							makePageResident	(size_t pageNdx);
+
+	de::UniquePtr<tcu::Resource>	m_resource;
+
+	std::vector<Element>			m_elements;
+	std::vector<bool>				m_isPageResident;
+};
+
+template<typename Element>
+LazyResource<Element>::LazyResource (de::MovePtr<tcu::Resource> resource)
+	: m_resource(resource)
+{
+	const size_t	resSize		= m_resource->getSize();
+	const size_t	numElements	= resSize/sizeof(Element);
+	const size_t	numPages	= (numElements >> ELEMENTS_PER_PAGE_LOG2) + ((numElements & ((1u<<ELEMENTS_PER_PAGE_LOG2)-1u)) == 0 ? 0 : 1);
+
+	TCU_CHECK_INTERNAL(numElements*sizeof(Element) == resSize);
+
+	m_elements.resize(numElements);
+	m_isPageResident.resize(numPages, false);
+}
+
+template<typename Element>
+const Element& LazyResource<Element>::operator[] (size_t ndx)
+{
+	const size_t pageNdx = getPageForElement(ndx);
+
+	if (ndx >= m_elements.size())
+		throw std::out_of_range("");
+
+	if (!isPageResident(pageNdx))
+		makePageResident(pageNdx);
+
+	return m_elements[ndx];
+}
+
+template<typename Element>
+void LazyResource<Element>::makePageResident (size_t pageNdx)
+{
+	const size_t	pageSize		= (size_t)(1<<ELEMENTS_PER_PAGE_LOG2)*sizeof(Element);
+	const size_t	pageOffset		= pageNdx*pageSize;
+	const size_t	numBytesToRead	= de::min(m_elements.size()*sizeof(Element) - pageOffset, pageSize);
+
+	DE_ASSERT(!isPageResident(pageNdx));
+
+	if ((size_t)m_resource->getPosition() != pageOffset)
+		m_resource->setPosition((int)pageOffset);
+
+	m_resource->read((deUint8*)&m_elements[pageNdx << ELEMENTS_PER_PAGE_LOG2], (int)numBytesToRead);
+	m_isPageResident[pageNdx] = true;
+}
+
+typedef LazyResource<BinaryIndexNode> BinaryIndexAccess;
+
+DE_DECLARE_POOL_HASH(BinaryHash, const ProgramBinary*, deUint32);
+
+class BinaryRegistryReader
+{
+public:
+							BinaryRegistryReader	(const tcu::Archive& archive, const std::string& srcPath);
+							~BinaryRegistryReader	(void);
+
+	ProgramBinary*			loadProgram				(const ProgramIdentifier& id) const;
+
+private:
+	typedef de::MovePtr<BinaryIndexAccess> BinaryIndexPtr;
+
+	const tcu::Archive&		m_archive;
+	const std::string		m_srcPath;
+
+	mutable BinaryIndexPtr	m_binaryIndex;
+};
+
+class BinaryRegistryWriter
+{
+public:
+						BinaryRegistryWriter	(const std::string& dstPath);
+						~BinaryRegistryWriter	(void);
+
+	void				storeProgram			(const ProgramIdentifier& id, const ProgramBinary& binary);
+	void				writeIndex				(void) const;
+
+private:
+	struct BinaryIndex
+	{
+		ProgramIdentifier	id;
+		deUint32			index;
+
+		BinaryIndex (const ProgramIdentifier&	id_,
+					 deUint32					index_)
+			: id	(id_)
+			, index	(index_)
+		{}
+	};
+
+	typedef std::vector<ProgramBinary*>	BinaryVector;
+	typedef std::vector<BinaryIndex>	BinaryIndexVector;
+
+	const std::string&	m_dstPath;
+
+	de::MemPool			m_memPool;
+	BinaryHash*			m_binaryIndexMap;		//!< ProgramBinary -> slot in m_compactedBinaries
+	BinaryVector		m_compactedBinaries;
+	BinaryIndexVector	m_binaryIndices;		//!< ProgramIdentifier -> slot in m_compactedBinaries
+};
+
+} // BinaryRegistryDetail
+
+using BinaryRegistryDetail::BinaryRegistryReader;
+using BinaryRegistryDetail::BinaryRegistryWriter;
+using BinaryRegistryDetail::ProgramIdentifier;
+using BinaryRegistryDetail::ProgramNotFoundException;
+
+} // vk
+
+#endif // _VKBINARYREGISTRY_HPP
diff --git a/external/vulkancts/framework/vulkan/vkBuilderUtil.cpp b/external/vulkancts/framework/vulkan/vkBuilderUtil.cpp
new file mode 100644
index 0000000..44dde43
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkBuilderUtil.cpp
@@ -0,0 +1,257 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object builder utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkBuilderUtil.hpp"
+
+#include "vkRefUtil.hpp"
+
+namespace vk
+{
+
+// DescriptorSetLayoutBuilder
+
+DescriptorSetLayoutBuilder::DescriptorSetLayoutBuilder (void)
+{
+}
+
+DescriptorSetLayoutBuilder& DescriptorSetLayoutBuilder::addBinding (VkDescriptorType	descriptorType,
+																	deUint32			descriptorCount,
+																	VkShaderStageFlags	stageFlags,
+																	const VkSampler*	pImmutableSamplers)
+{
+	if (pImmutableSamplers)
+	{
+		const ImmutableSamplerInfo immutableSamplerInfo =
+		{
+			(deUint32)m_bindings.size(),
+			(deUint32)m_immutableSamplers.size()
+		};
+
+		m_immutableSamplerInfos.push_back(immutableSamplerInfo);
+
+		for (size_t descriptorNdx = 0; descriptorNdx < descriptorCount; descriptorNdx++)
+			m_immutableSamplers.push_back(pImmutableSamplers[descriptorNdx]);
+	}
+
+	// pImmutableSamplers will be updated at build time
+	const VkDescriptorSetLayoutBinding binding =
+	{
+		(deUint32)m_bindings.size(),	// binding
+		descriptorType,					// descriptorType
+		descriptorCount,				// descriptorCount
+		stageFlags,						// stageFlags
+		DE_NULL,						// pImmutableSamplers
+	};
+	m_bindings.push_back(binding);
+	return *this;
+}
+
+Move<VkDescriptorSetLayout> DescriptorSetLayoutBuilder::build (const DeviceInterface& vk, VkDevice device) const
+{
+	// Create new layout bindings with pImmutableSamplers updated
+	std::vector<VkDescriptorSetLayoutBinding>	bindings	= m_bindings;
+
+	for (size_t samplerInfoNdx = 0; samplerInfoNdx < m_immutableSamplerInfos.size(); samplerInfoNdx++)
+	{
+		const ImmutableSamplerInfo&	samplerInfo	= m_immutableSamplerInfos[samplerInfoNdx];
+
+		bindings[samplerInfo.bindingIndex].pImmutableSamplers	= &m_immutableSamplers[samplerInfo.samplerBaseIndex];
+	}
+
+	const VkDescriptorSetLayoutCreateInfo		createInfo	=
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(VkDescriptorSetLayoutCreateFlags)0,					// flags
+		(deUint32)bindings.size(),								// bindingCount
+		(bindings.empty()) ? (DE_NULL) : (bindings.data()),		// pBinding
+	};
+
+	return createDescriptorSetLayout(vk, device, &createInfo);
+}
+
+// DescriptorPoolBuilder
+
+DescriptorPoolBuilder::DescriptorPoolBuilder (void)
+{
+}
+
+DescriptorPoolBuilder& DescriptorPoolBuilder::addType (VkDescriptorType type, deUint32 numDescriptors)
+{
+	if (numDescriptors == 0u)
+	{
+		// nothing to do
+		return *this;
+	}
+	else
+	{
+		for (size_t ndx = 0; ndx < m_counts.size(); ++ndx)
+		{
+			if (m_counts[ndx].type == type)
+			{
+				// augment existing requirement
+				m_counts[ndx].descriptorCount += numDescriptors;
+				return *this;
+			}
+		}
+
+		{
+			// new requirement
+			const VkDescriptorPoolSize typeCount =
+			{
+				type,			// type
+				numDescriptors,	// numDescriptors
+			};
+
+			m_counts.push_back(typeCount);
+			return *this;
+		}
+	}
+}
+
+Move<VkDescriptorPool> DescriptorPoolBuilder::build (const DeviceInterface& vk, VkDevice device, VkDescriptorPoolCreateFlags flags, deUint32 maxSets) const
+{
+	const VkDescriptorPoolSize* const	typeCountPtr	= (m_counts.empty()) ? (DE_NULL) : (&m_counts[0]);
+	const VkDescriptorPoolCreateInfo	createInfo		=
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+		DE_NULL,
+		flags,
+		maxSets,
+		(deUint32)m_counts.size(),		// poolSizeCount
+		typeCountPtr,					// pPoolSizes
+	};
+
+	return createDescriptorPool(vk, device, &createInfo);
+}
+
+// DescriptorSetUpdateBuilder
+
+DescriptorSetUpdateBuilder::DescriptorSetUpdateBuilder (void)
+{
+}
+
+DescriptorSetUpdateBuilder& DescriptorSetUpdateBuilder::write (VkDescriptorSet					destSet,
+															   deUint32							destBinding,
+															   deUint32							destArrayElement,
+															   deUint32							count,
+															   VkDescriptorType					descriptorType,
+															   const VkDescriptorImageInfo*		pImageInfo,
+															   const VkDescriptorBufferInfo*	pBufferInfo,
+															   const VkBufferView*				pTexelBufferView)
+{
+	// pImageInfo, pBufferInfo and pTexelBufferView will be updated when calling update()
+	const VkWriteDescriptorSet writeParams =
+	{
+		VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+		DE_NULL,
+		destSet,			//!< destSet
+		destBinding,		//!< destBinding
+		destArrayElement,	//!< destArrayElement
+		count,				//!< count
+		descriptorType,		//!< descriptorType
+		DE_NULL,
+		DE_NULL,
+		DE_NULL
+	};
+
+	m_writes.push_back(writeParams);
+
+	// Store a copy of pImageInfo, pBufferInfo and pTexelBufferView
+	WriteDescriptorInfo	writeInfo;
+
+	if (pImageInfo)
+		writeInfo.imageInfos.insert(writeInfo.imageInfos.end(), pImageInfo, pImageInfo + count);
+
+	if (pBufferInfo)
+		writeInfo.bufferInfos.insert(writeInfo.bufferInfos.end(), pBufferInfo, pBufferInfo + count);
+
+	if (pTexelBufferView)
+		writeInfo.texelBufferViews.insert(writeInfo.texelBufferViews.end(), pTexelBufferView, pTexelBufferView + count);
+
+	m_writeDescriptorInfos.push_back(writeInfo);
+
+	return *this;
+}
+
+DescriptorSetUpdateBuilder& DescriptorSetUpdateBuilder::copy (VkDescriptorSet	srcSet,
+															  deUint32			srcBinding,
+															  deUint32			srcArrayElement,
+															  VkDescriptorSet	destSet,
+															  deUint32			destBinding,
+															  deUint32			destArrayElement,
+															  deUint32			count)
+{
+	const VkCopyDescriptorSet copyParams =
+	{
+		VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+		DE_NULL,
+		srcSet,				//!< srcSet
+		srcBinding,			//!< srcBinding
+		srcArrayElement,	//!< srcArrayElement
+		destSet,			//!< destSet
+		destBinding,		//!< destBinding
+		destArrayElement,	//!< destArrayElement
+		count,				//!< count
+	};
+	m_copies.push_back(copyParams);
+	return *this;
+}
+
+void DescriptorSetUpdateBuilder::update (const DeviceInterface& vk, VkDevice device) const
+{
+	// Update VkWriteDescriptorSet structures with stored info
+	std::vector<VkWriteDescriptorSet> writes	= m_writes;
+
+	for (size_t writeNdx = 0; writeNdx < m_writes.size(); writeNdx++)
+	{
+		const WriteDescriptorInfo& writeInfo = m_writeDescriptorInfos[writeNdx];
+
+		if (!writeInfo.imageInfos.empty())
+			writes[writeNdx].pImageInfo			= &writeInfo.imageInfos[0];
+
+		if (!writeInfo.bufferInfos.empty())
+			writes[writeNdx].pBufferInfo		= &writeInfo.bufferInfos[0];
+
+		if (!writeInfo.texelBufferViews.empty())
+			writes[writeNdx].pTexelBufferView	= &writeInfo.texelBufferViews[0];
+	}
+
+	const VkWriteDescriptorSet* const	writePtr	= (m_writes.empty()) ? (DE_NULL) : (&writes[0]);
+	const VkCopyDescriptorSet* const	copyPtr		= (m_copies.empty()) ? (DE_NULL) : (&m_copies[0]);
+
+	vk.updateDescriptorSets(device, (deUint32)writes.size(), writePtr, (deUint32)m_copies.size(), copyPtr);
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkBuilderUtil.hpp b/external/vulkancts/framework/vulkan/vkBuilderUtil.hpp
new file mode 100644
index 0000000..1a30b3a
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkBuilderUtil.hpp
@@ -0,0 +1,256 @@
+#ifndef _VKBUILDERUTIL_HPP
+#define _VKBUILDERUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object builder utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+
+#include <vector>
+
+namespace vk
+{
+
+class DescriptorSetLayoutBuilder
+{
+public:
+												DescriptorSetLayoutBuilder	(void);
+
+	DescriptorSetLayoutBuilder&					addBinding					(VkDescriptorType	descriptorType,
+																			 deUint32			descriptorCount,
+																			 VkShaderStageFlags	stageFlags,
+																			 const VkSampler*	pImmutableSamplers);
+
+	Move<VkDescriptorSetLayout>					build						(const DeviceInterface& vk, VkDevice device) const;
+
+	// helpers
+
+	inline DescriptorSetLayoutBuilder&			addSingleBinding			(VkDescriptorType	descriptorType,
+																			 VkShaderStageFlags	stageFlags)
+	{
+		return addBinding(descriptorType, 1u, stageFlags, (VkSampler*)DE_NULL);
+	}
+	inline DescriptorSetLayoutBuilder&			addArrayBinding				(VkDescriptorType	descriptorType,
+																			 deUint32			descriptorCount,
+																			 VkShaderStageFlags	stageFlags)
+	{
+		return addBinding(descriptorType, descriptorCount, stageFlags, (VkSampler*)DE_NULL);
+	}
+	inline DescriptorSetLayoutBuilder&			addSingleSamplerBinding		(VkDescriptorType	descriptorType,
+																			 VkShaderStageFlags	stageFlags,
+																			 const VkSampler*	immutableSampler)	//!< \note: Using pointer to sampler to clarify that handle is not
+																													//!<        copied and argument lifetime is expected to cover build()
+																													//!<        call.
+	{
+		return addBinding(descriptorType, 1u, stageFlags, immutableSampler);
+	}
+	inline DescriptorSetLayoutBuilder&			addArraySamplerBinding		(VkDescriptorType	descriptorType,
+																			 deUint32			descriptorCount,
+																			 VkShaderStageFlags	stageFlags,
+																			 const VkSampler*	pImmutableSamplers)
+	{
+		return addBinding(descriptorType, descriptorCount, stageFlags, pImmutableSamplers);
+	}
+
+private:
+												DescriptorSetLayoutBuilder	(const DescriptorSetLayoutBuilder&); // delete
+	DescriptorSetLayoutBuilder&					operator=					(const DescriptorSetLayoutBuilder&); // delete
+
+	std::vector<VkDescriptorSetLayoutBinding>	m_bindings;
+
+	struct ImmutableSamplerInfo
+	{
+		deUint32 bindingIndex;
+		deUint32 samplerBaseIndex;
+	};
+
+	std::vector<ImmutableSamplerInfo>			m_immutableSamplerInfos;
+	std::vector<VkSampler>						m_immutableSamplers;
+};
+
+class DescriptorPoolBuilder
+{
+public:
+										DescriptorPoolBuilder	(void);
+
+	DescriptorPoolBuilder&				addType					(VkDescriptorType type, deUint32 numDescriptors = 1u);
+	Move<VkDescriptorPool>				build					(const DeviceInterface& vk, VkDevice device, VkDescriptorPoolCreateFlags flags, deUint32 maxSets) const;
+
+private:
+										DescriptorPoolBuilder	(const DescriptorPoolBuilder&); // delete
+	DescriptorPoolBuilder&				operator=				(const DescriptorPoolBuilder&); // delete
+
+	std::vector<VkDescriptorPoolSize>	m_counts;
+};
+
+class DescriptorSetUpdateBuilder
+{
+public:
+	class Location
+	{
+	public:
+		static inline Location	binding				(deUint32 binding_)
+		{
+			return Location(binding_, 0u);
+		}
+		static inline Location	bindingArrayElement	(deUint32 binding_, deUint32 arrayElement)
+		{
+			return Location(binding_, arrayElement);
+		}
+
+	private:
+		// \note private to force use of factory methods that have more descriptive names
+		inline					Location			(deUint32 binding_, deUint32 arrayElement)
+			: m_binding			(binding_)
+			, m_arrayElement	(arrayElement)
+		{
+		}
+
+		friend class DescriptorSetUpdateBuilder;
+
+		const deUint32			m_binding;
+		const deUint32			m_arrayElement;
+	};
+
+										DescriptorSetUpdateBuilder	(void);
+
+	DescriptorSetUpdateBuilder&			write						(VkDescriptorSet				destSet,
+																	 deUint32						destBinding,
+																	 deUint32						destArrayElement,
+																	 deUint32						count,
+																	 VkDescriptorType				descriptorType,
+																	 const VkDescriptorImageInfo*	pImageInfo,
+																	 const VkDescriptorBufferInfo*	pBufferInfo,
+																	 const VkBufferView*			pTexelBufferView);
+
+	DescriptorSetUpdateBuilder&			copy						(VkDescriptorSet	srcSet,
+																	 deUint32			srcBinding,
+																	 deUint32			srcArrayElement,
+																	 VkDescriptorSet	destSet,
+																	 deUint32			destBinding,
+																	 deUint32			destArrayElement,
+																	 deUint32			count);
+
+	void								update						(const DeviceInterface& vk, VkDevice device) const;
+
+	// helpers
+
+	inline DescriptorSetUpdateBuilder&	writeSingle					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 const VkDescriptorImageInfo*	pImageInfo)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, 1u, descriptorType, pImageInfo, DE_NULL, DE_NULL);
+	}
+
+	inline DescriptorSetUpdateBuilder&	writeSingle					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 const VkDescriptorBufferInfo*	pBufferInfo)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, 1u, descriptorType, DE_NULL, pBufferInfo, DE_NULL);
+	}
+
+	inline DescriptorSetUpdateBuilder&	writeSingle					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 const VkBufferView*			pTexelBufferView)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, 1u, descriptorType, DE_NULL, DE_NULL, pTexelBufferView);
+	}
+
+	inline DescriptorSetUpdateBuilder&	writeArray					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 deUint32						numDescriptors,
+																	 const VkDescriptorImageInfo*	pImageInfo)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, numDescriptors, descriptorType, pImageInfo, DE_NULL, DE_NULL);
+	}
+
+	inline DescriptorSetUpdateBuilder&	writeArray					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 deUint32						numDescriptors,
+																	 const VkDescriptorBufferInfo*	pBufferInfo)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, numDescriptors, descriptorType, DE_NULL, pBufferInfo, DE_NULL);
+	}
+
+	inline DescriptorSetUpdateBuilder&	writeArray					(VkDescriptorSet				destSet,
+																	 const Location&				destLocation,
+																	 VkDescriptorType				descriptorType,
+																	 deUint32						numDescriptors,
+																	 const VkBufferView*			pTexelBufferView)
+	{
+		return write(destSet, destLocation.m_binding, destLocation.m_arrayElement, numDescriptors, descriptorType, DE_NULL, DE_NULL, pTexelBufferView);
+	}
+
+	inline DescriptorSetUpdateBuilder&	copySingle					(VkDescriptorSet	srcSet,
+																	 const Location&	srcLocation,
+																	 VkDescriptorSet	destSet,
+																	 const Location&	destLocation)
+	{
+		return copy(srcSet, srcLocation.m_binding, srcLocation.m_arrayElement, destSet, destLocation.m_binding, destLocation.m_arrayElement, 1u);
+	}
+
+	inline DescriptorSetUpdateBuilder&	copyArray					(VkDescriptorSet	srcSet,
+																	 const Location&	srcLocation,
+																	 VkDescriptorSet	destSet,
+																	 const Location&	destLocation,
+																	 deUint32			count)
+	{
+		return copy(srcSet, srcLocation.m_binding, srcLocation.m_arrayElement, destSet, destLocation.m_binding, destLocation.m_arrayElement, count);
+	}
+
+private:
+										DescriptorSetUpdateBuilder	(const DescriptorSetUpdateBuilder&); // delete
+	DescriptorSetUpdateBuilder&			operator=					(const DescriptorSetUpdateBuilder&); // delete
+
+	struct WriteDescriptorInfo
+	{
+		std::vector<VkDescriptorImageInfo>	imageInfos;
+		std::vector<VkDescriptorBufferInfo>	bufferInfos;
+		std::vector<VkBufferView>			texelBufferViews;
+	};
+
+	std::vector<WriteDescriptorInfo>	m_writeDescriptorInfos;
+
+	std::vector<VkWriteDescriptorSet>	m_writes;
+	std::vector<VkCopyDescriptorSet>	m_copies;
+};
+
+} // vk
+
+#endif // _VKBUILDERUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkConcreteDeviceInterface.inl b/external/vulkancts/framework/vulkan/vkConcreteDeviceInterface.inl
new file mode 100644
index 0000000..3bcc88b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkConcreteDeviceInterface.inl
@@ -0,0 +1,124 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual void		destroyDevice									(VkDevice device, const VkAllocationCallbacks* pAllocator) const;
+virtual void		getDeviceQueue									(VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue) const;
+virtual VkResult	queueSubmit										(VkQueue queue, deUint32 submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const;
+virtual VkResult	queueWaitIdle									(VkQueue queue) const;
+virtual VkResult	deviceWaitIdle									(VkDevice device) const;
+virtual VkResult	allocateMemory									(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const;
+virtual void		freeMemory										(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	mapMemory										(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const;
+virtual void		unmapMemory										(VkDevice device, VkDeviceMemory memory) const;
+virtual VkResult	flushMappedMemoryRanges							(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const;
+virtual VkResult	invalidateMappedMemoryRanges					(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const;
+virtual void		getDeviceMemoryCommitment						(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const;
+virtual VkResult	bindBufferMemory								(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const;
+virtual VkResult	bindImageMemory									(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const;
+virtual void		getBufferMemoryRequirements						(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const;
+virtual void		getImageMemoryRequirements						(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) const;
+virtual void		getImageSparseMemoryRequirements				(VkDevice device, VkImage image, deUint32* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const;
+virtual void		getPhysicalDeviceSparseImageFormatProperties	(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, deUint32* pPropertyCount, VkSparseImageFormatProperties* pProperties) const;
+virtual VkResult	queueBindSparse									(VkQueue queue, deUint32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const;
+virtual VkResult	createFence										(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const;
+virtual void		destroyFence									(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	resetFences										(VkDevice device, deUint32 fenceCount, const VkFence* pFences) const;
+virtual VkResult	getFenceStatus									(VkDevice device, VkFence fence) const;
+virtual VkResult	waitForFences									(VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout) const;
+virtual VkResult	createSemaphore									(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const;
+virtual void		destroySemaphore								(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createEvent										(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const;
+virtual void		destroyEvent									(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	getEventStatus									(VkDevice device, VkEvent event) const;
+virtual VkResult	setEvent										(VkDevice device, VkEvent event) const;
+virtual VkResult	resetEvent										(VkDevice device, VkEvent event) const;
+virtual VkResult	createQueryPool									(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const;
+virtual void		destroyQueryPool								(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	getQueryPoolResults								(VkDevice device, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, deUintptr dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const;
+virtual VkResult	createBuffer									(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const;
+virtual void		destroyBuffer									(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createBufferView								(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const;
+virtual void		destroyBufferView								(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createImage										(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const;
+virtual void		destroyImage									(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) const;
+virtual void		getImageSubresourceLayout						(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const;
+virtual VkResult	createImageView									(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const;
+virtual void		destroyImageView								(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createShaderModule								(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const;
+virtual void		destroyShaderModule								(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createPipelineCache								(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const;
+virtual void		destroyPipelineCache							(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	getPipelineCacheData							(VkDevice device, VkPipelineCache pipelineCache, deUintptr* pDataSize, void* pData) const;
+virtual VkResult	mergePipelineCaches								(VkDevice device, VkPipelineCache dstCache, deUint32 srcCacheCount, const VkPipelineCache* pSrcCaches) const;
+virtual VkResult	createGraphicsPipelines							(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const;
+virtual VkResult	createComputePipelines							(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const;
+virtual void		destroyPipeline									(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createPipelineLayout							(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const;
+virtual void		destroyPipelineLayout							(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createSampler									(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const;
+virtual void		destroySampler									(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createDescriptorSetLayout						(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const;
+virtual void		destroyDescriptorSetLayout						(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createDescriptorPool							(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const;
+virtual void		destroyDescriptorPool							(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	resetDescriptorPool								(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const;
+virtual VkResult	allocateDescriptorSets							(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const;
+virtual VkResult	freeDescriptorSets								(VkDevice device, VkDescriptorPool descriptorPool, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const;
+virtual void		updateDescriptorSets							(VkDevice device, deUint32 descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, deUint32 descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const;
+virtual VkResult	createFramebuffer								(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const;
+virtual void		destroyFramebuffer								(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	createRenderPass								(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const;
+virtual void		destroyRenderPass								(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const;
+virtual void		getRenderAreaGranularity						(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) const;
+virtual VkResult	createCommandPool								(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const;
+virtual void		destroyCommandPool								(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult	resetCommandPool								(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const;
+virtual VkResult	allocateCommandBuffers							(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const;
+virtual void		freeCommandBuffers								(VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const;
+virtual VkResult	beginCommandBuffer								(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const;
+virtual VkResult	endCommandBuffer								(VkCommandBuffer commandBuffer) const;
+virtual VkResult	resetCommandBuffer								(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const;
+virtual void		cmdBindPipeline									(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const;
+virtual void		cmdSetViewport									(VkCommandBuffer commandBuffer, deUint32 firstViewport, deUint32 viewportCount, const VkViewport* pViewports) const;
+virtual void		cmdSetScissor									(VkCommandBuffer commandBuffer, deUint32 firstScissor, deUint32 scissorCount, const VkRect2D* pScissors) const;
+virtual void		cmdSetLineWidth									(VkCommandBuffer commandBuffer, float lineWidth) const;
+virtual void		cmdSetDepthBias									(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const;
+virtual void		cmdSetBlendConstants							(VkCommandBuffer commandBuffer, const float blendConstants[4]) const;
+virtual void		cmdSetDepthBounds								(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const;
+virtual void		cmdSetStencilCompareMask						(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 compareMask) const;
+virtual void		cmdSetStencilWriteMask							(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 writeMask) const;
+virtual void		cmdSetStencilReference							(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 reference) const;
+virtual void		cmdBindDescriptorSets							(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, deUint32 firstSet, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets, deUint32 dynamicOffsetCount, const deUint32* pDynamicOffsets) const;
+virtual void		cmdBindIndexBuffer								(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const;
+virtual void		cmdBindVertexBuffers							(VkCommandBuffer commandBuffer, deUint32 firstBinding, deUint32 bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const;
+virtual void		cmdDraw											(VkCommandBuffer commandBuffer, deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance) const;
+virtual void		cmdDrawIndexed									(VkCommandBuffer commandBuffer, deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance) const;
+virtual void		cmdDrawIndirect									(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const;
+virtual void		cmdDrawIndexedIndirect							(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const;
+virtual void		cmdDispatch										(VkCommandBuffer commandBuffer, deUint32 x, deUint32 y, deUint32 z) const;
+virtual void		cmdDispatchIndirect								(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const;
+virtual void		cmdCopyBuffer									(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferCopy* pRegions) const;
+virtual void		cmdCopyImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageCopy* pRegions) const;
+virtual void		cmdBlitImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageBlit* pRegions, VkFilter filter) const;
+virtual void		cmdCopyBufferToImage							(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkBufferImageCopy* pRegions) const;
+virtual void		cmdCopyImageToBuffer							(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferImageCopy* pRegions) const;
+virtual void		cmdUpdateBuffer									(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const deUint32* pData) const;
+virtual void		cmdFillBuffer									(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, deUint32 data) const;
+virtual void		cmdClearColorImage								(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const;
+virtual void		cmdClearDepthStencilImage						(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const;
+virtual void		cmdClearAttachments								(VkCommandBuffer commandBuffer, deUint32 attachmentCount, const VkClearAttachment* pAttachments, deUint32 rectCount, const VkClearRect* pRects) const;
+virtual void		cmdResolveImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageResolve* pRegions) const;
+virtual void		cmdSetEvent										(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+virtual void		cmdResetEvent									(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const;
+virtual void		cmdWaitEvents									(VkCommandBuffer commandBuffer, deUint32 eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+virtual void		cmdPipelineBarrier								(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const;
+virtual void		cmdBeginQuery									(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query, VkQueryControlFlags flags) const;
+virtual void		cmdEndQuery										(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query) const;
+virtual void		cmdResetQueryPool								(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount) const;
+virtual void		cmdWriteTimestamp								(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, deUint32 query) const;
+virtual void		cmdCopyQueryPoolResults							(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const;
+virtual void		cmdPushConstants								(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size, const void* pValues) const;
+virtual void		cmdBeginRenderPass								(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const;
+virtual void		cmdNextSubpass									(VkCommandBuffer commandBuffer, VkSubpassContents contents) const;
+virtual void		cmdEndRenderPass								(VkCommandBuffer commandBuffer) const;
+virtual void		cmdExecuteCommands								(VkCommandBuffer commandBuffer, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const;
diff --git a/external/vulkancts/framework/vulkan/vkConcreteInstanceInterface.inl b/external/vulkancts/framework/vulkan/vkConcreteInstanceInterface.inl
new file mode 100644
index 0000000..2c04d28
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkConcreteInstanceInterface.inl
@@ -0,0 +1,15 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual void				destroyInstance							(VkInstance instance, const VkAllocationCallbacks* pAllocator) const;
+virtual VkResult			enumeratePhysicalDevices				(VkInstance instance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const;
+virtual void				getPhysicalDeviceFeatures				(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const;
+virtual void				getPhysicalDeviceFormatProperties		(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const;
+virtual VkResult			getPhysicalDeviceImageFormatProperties	(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const;
+virtual void				getPhysicalDeviceProperties				(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const;
+virtual void				getPhysicalDeviceQueueFamilyProperties	(VkPhysicalDevice physicalDevice, deUint32* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const;
+virtual void				getPhysicalDeviceMemoryProperties		(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const;
+virtual PFN_vkVoidFunction	getDeviceProcAddr						(VkDevice device, const char* pName) const;
+virtual VkResult			createDevice							(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const;
+virtual VkResult			enumerateDeviceExtensionProperties		(VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const;
+virtual VkResult			enumerateDeviceLayerProperties			(VkPhysicalDevice physicalDevice, deUint32* pPropertyCount, VkLayerProperties* pProperties) const;
diff --git a/external/vulkancts/framework/vulkan/vkConcretePlatformInterface.inl b/external/vulkancts/framework/vulkan/vkConcretePlatformInterface.inl
new file mode 100644
index 0000000..2331bcb
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkConcretePlatformInterface.inl
@@ -0,0 +1,7 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual VkResult			createInstance							(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) const;
+virtual PFN_vkVoidFunction	getInstanceProcAddr						(VkInstance instance, const char* pName) const;
+virtual VkResult			enumerateInstanceExtensionProperties	(const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const;
+virtual VkResult			enumerateInstanceLayerProperties		(deUint32* pPropertyCount, VkLayerProperties* pProperties) const;
diff --git a/external/vulkancts/framework/vulkan/vkDefs.cpp b/external/vulkancts/framework/vulkan/vkDefs.cpp
new file mode 100644
index 0000000..8ac22d7
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDefs.cpp
@@ -0,0 +1,105 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan utilites.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkStrUtil.hpp"
+
+#include <sstream>
+
+DE_STATIC_ASSERT(sizeof(vk::VkImageType)	== sizeof(deUint32));
+DE_STATIC_ASSERT(sizeof(vk::VkResult)		== sizeof(deUint32));
+DE_STATIC_ASSERT(sizeof(vk::VkDevice)		== sizeof(void*));
+DE_STATIC_ASSERT(sizeof(vk::VkBuffer)		== sizeof(deUint64));
+
+namespace vk
+{
+
+static bool isOutOfMemoryError (VkResult result)
+{
+	return result == VK_ERROR_OUT_OF_DEVICE_MEMORY	||
+		   result == VK_ERROR_OUT_OF_HOST_MEMORY;
+}
+
+Error::Error (VkResult error, const char* message, const char* expr, const char* file, int line)
+	: tcu::TestError	(message, expr, file, line)
+	, m_error			(error)
+{
+}
+
+Error::Error (VkResult error, const std::string& message)
+	: tcu::TestError	(message)
+	, m_error			(error)
+{
+}
+
+Error::~Error (void) throw()
+{
+}
+
+OutOfMemoryError::OutOfMemoryError (VkResult error, const char* message, const char* expr, const char* file, int line)
+	: tcu::ResourceError(message, expr, file, line)
+	, m_error			(error)
+{
+	DE_ASSERT(isOutOfMemoryError(error));
+}
+
+OutOfMemoryError::OutOfMemoryError (VkResult error, const std::string& message)
+	: tcu::ResourceError(message)
+	, m_error			(error)
+{
+	DE_ASSERT(isOutOfMemoryError(error));
+}
+
+OutOfMemoryError::~OutOfMemoryError (void) throw()
+{
+}
+
+void checkResult (VkResult result, const char* msg, const char* file, int line)
+{
+	if (result != VK_SUCCESS)
+	{
+		std::ostringstream msgStr;
+		if (msg)
+			msgStr << msg << ": ";
+
+		msgStr << getResultStr(result);
+
+		if (isOutOfMemoryError(result))
+			throw OutOfMemoryError(result, msgStr.str().c_str(), DE_NULL, file, line);
+		else
+			throw Error(result, msgStr.str().c_str(), DE_NULL, file, line);
+	}
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkDefs.hpp b/external/vulkancts/framework/vulkan/vkDefs.hpp
new file mode 100644
index 0000000..2bf29fa
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDefs.hpp
@@ -0,0 +1,192 @@
+#ifndef _VKDEFS_HPP
+#define _VKDEFS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan utilites.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+
+#if (DE_OS == DE_OS_ANDROID) && defined(__ARM_ARCH_7A__)
+#	define VKAPI_ATTR __attribute__((pcs("aapcs-vfp")))
+#else
+#	define VKAPI_ATTR
+#endif
+
+#if (DE_OS == DE_OS_WIN32) && ((_MSC_VER >= 800) || defined(_STDCALL_SUPPORTED))
+#	define VKAPI_CALL __stdcall
+#else
+#	define VKAPI_CALL
+#endif
+
+#define VK_DEFINE_HANDLE(NAME, TYPE)					typedef struct NAME##_s* NAME
+#define VK_DEFINE_NON_DISPATCHABLE_HANDLE(NAME, TYPE)	typedef Handle<TYPE> NAME
+
+#define VK_MAKE_VERSION(MAJOR, MINOR, PATCH)	((MAJOR << 22) | (MINOR << 12) | PATCH)
+#define VK_BIT(NUM)								(1<<NUM)
+
+#define VK_CHECK(EXPR)							vk::checkResult((EXPR), #EXPR, __FILE__, __LINE__)
+#define VK_CHECK_MSG(EXPR, MSG)					vk::checkResult((EXPR), MSG, __FILE__, __LINE__)
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Vulkan utilities
+ *//*--------------------------------------------------------------------*/
+namespace vk
+{
+
+typedef deUint64	VkDeviceSize;
+typedef deUint32	VkSampleMask;
+typedef deUint32	VkBool32;
+typedef deUint32	VkFlags;
+
+// enum HandleType { HANDLE_TYPE_INSTANCE, ... };
+#include "vkHandleType.inl"
+
+template<HandleType Type>
+class Handle
+{
+public:
+				Handle		(void) {} // \note Left uninitialized on purpose
+				Handle		(deUint64 internal) : m_internal(internal) {}
+
+	Handle&		operator=	(deUint64 internal)					{ m_internal = internal; return *this;			}
+
+	bool		operator==	(const Handle<Type>& other) const	{ return this->m_internal == other.m_internal;	}
+	bool		operator!=	(const Handle<Type>& other) const	{ return this->m_internal != other.m_internal;	}
+
+	bool		operator!	(void) const						{ return !m_internal;							}
+
+	deUint64	getInternal	(void) const						{ return m_internal;							}
+
+	enum { HANDLE_TYPE = Type };
+
+private:
+	deUint64	m_internal;
+};
+
+#include "vkBasicTypes.inl"
+
+enum { VK_QUEUE_FAMILY_IGNORED		= 0xffffffff	};
+enum { VK_NO_ATTACHMENT				= 0xffffffff	};
+
+enum
+{
+	VK_FALSE		= 0,
+	VK_TRUE			= 1,
+	VK_WHOLE_SIZE	= (~0ULL),
+};
+
+typedef VKAPI_ATTR void		(VKAPI_CALL* PFN_vkVoidFunction)					(void);
+
+typedef VKAPI_ATTR void*	(VKAPI_CALL* PFN_vkAllocationFunction)				(void* pUserData, size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+typedef VKAPI_ATTR void*	(VKAPI_CALL* PFN_vkReallocationFunction)			(void* pUserData, void* pOriginal, size_t size, size_t alignment, VkSystemAllocationScope allocationScope);
+typedef VKAPI_ATTR void		(VKAPI_CALL* PFN_vkFreeFunction)					(void* pUserData, void* pMem);
+typedef VKAPI_ATTR void		(VKAPI_CALL* PFN_vkInternalAllocationNotification)	(void* pUserData, size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+typedef VKAPI_ATTR void		(VKAPI_CALL* PFN_vkInternalFreeNotification)		(void* pUserData, size_t size, VkInternalAllocationType allocationType, VkSystemAllocationScope allocationScope);
+
+#include "vkStructTypes.inl"
+
+extern "C"
+{
+#include "vkFunctionPointerTypes.inl"
+}
+
+class PlatformInterface
+{
+public:
+#include "vkVirtualPlatformInterface.inl"
+
+protected:
+						PlatformInterface	(void) {}
+
+private:
+						PlatformInterface	(const PlatformInterface&);
+	PlatformInterface&	operator=			(const PlatformInterface&);
+};
+
+class InstanceInterface
+{
+public:
+#include "vkVirtualInstanceInterface.inl"
+
+protected:
+						InstanceInterface	(void) {}
+
+private:
+						InstanceInterface	(const InstanceInterface&);
+	InstanceInterface&	operator=			(const InstanceInterface&);
+};
+
+class DeviceInterface
+{
+public:
+#include "vkVirtualDeviceInterface.inl"
+
+protected:
+						DeviceInterface		(void) {}
+
+private:
+						DeviceInterface		(const DeviceInterface&);
+	DeviceInterface&	operator=			(const DeviceInterface&);
+};
+
+class Error : public tcu::TestError
+{
+public:
+					Error				(VkResult error, const char* message, const char* expr, const char* file, int line);
+					Error				(VkResult error, const std::string& message);
+	virtual			~Error				(void) throw();
+
+	VkResult		getError			(void) const { return m_error; }
+
+private:
+	const VkResult	m_error;
+};
+
+class OutOfMemoryError : public tcu::ResourceError
+{
+public:
+					OutOfMemoryError	(VkResult error, const char* message, const char* expr, const char* file, int line);
+					OutOfMemoryError	(VkResult error, const std::string& message);
+	virtual			~OutOfMemoryError	(void) throw();
+
+	VkResult		getError			(void) const { return m_error; }
+
+private:
+	const VkResult	m_error;
+};
+
+void			checkResult			(VkResult result, const char* message, const char* file, int line);
+
+} // vk
+
+#endif // _VKDEFS_HPP
diff --git a/external/vulkancts/framework/vulkan/vkDeviceDriverImpl.inl b/external/vulkancts/framework/vulkan/vkDeviceDriverImpl.inl
new file mode 100644
index 0000000..ff65bf2
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDeviceDriverImpl.inl
@@ -0,0 +1,608 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+
+void DeviceDriver::destroyDevice (VkDevice device, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyDevice(device, pAllocator);
+}
+
+void DeviceDriver::getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue) const
+{
+	m_vk.getDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
+}
+
+VkResult DeviceDriver::queueSubmit (VkQueue queue, deUint32 submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const
+{
+	return m_vk.queueSubmit(queue, submitCount, pSubmits, fence);
+}
+
+VkResult DeviceDriver::queueWaitIdle (VkQueue queue) const
+{
+	return m_vk.queueWaitIdle(queue);
+}
+
+VkResult DeviceDriver::deviceWaitIdle (VkDevice device) const
+{
+	return m_vk.deviceWaitIdle(device);
+}
+
+VkResult DeviceDriver::allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const
+{
+	return m_vk.allocateMemory(device, pAllocateInfo, pAllocator, pMemory);
+}
+
+void DeviceDriver::freeMemory (VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.freeMemory(device, memory, pAllocator);
+}
+
+VkResult DeviceDriver::mapMemory (VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const
+{
+	return m_vk.mapMemory(device, memory, offset, size, flags, ppData);
+}
+
+void DeviceDriver::unmapMemory (VkDevice device, VkDeviceMemory memory) const
+{
+	m_vk.unmapMemory(device, memory);
+}
+
+VkResult DeviceDriver::flushMappedMemoryRanges (VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const
+{
+	return m_vk.flushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+
+VkResult DeviceDriver::invalidateMappedMemoryRanges (VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const
+{
+	return m_vk.invalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
+}
+
+void DeviceDriver::getDeviceMemoryCommitment (VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const
+{
+	m_vk.getDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
+}
+
+VkResult DeviceDriver::bindBufferMemory (VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const
+{
+	return m_vk.bindBufferMemory(device, buffer, memory, memoryOffset);
+}
+
+VkResult DeviceDriver::bindImageMemory (VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const
+{
+	return m_vk.bindImageMemory(device, image, memory, memoryOffset);
+}
+
+void DeviceDriver::getBufferMemoryRequirements (VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const
+{
+	m_vk.getBufferMemoryRequirements(device, buffer, pMemoryRequirements);
+}
+
+void DeviceDriver::getImageMemoryRequirements (VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) const
+{
+	m_vk.getImageMemoryRequirements(device, image, pMemoryRequirements);
+}
+
+void DeviceDriver::getImageSparseMemoryRequirements (VkDevice device, VkImage image, deUint32* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const
+{
+	m_vk.getImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
+}
+
+void DeviceDriver::getPhysicalDeviceSparseImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, deUint32* pPropertyCount, VkSparseImageFormatProperties* pProperties) const
+{
+	m_vk.getPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
+}
+
+VkResult DeviceDriver::queueBindSparse (VkQueue queue, deUint32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const
+{
+	return m_vk.queueBindSparse(queue, bindInfoCount, pBindInfo, fence);
+}
+
+VkResult DeviceDriver::createFence (VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const
+{
+	return m_vk.createFence(device, pCreateInfo, pAllocator, pFence);
+}
+
+void DeviceDriver::destroyFence (VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyFence(device, fence, pAllocator);
+}
+
+VkResult DeviceDriver::resetFences (VkDevice device, deUint32 fenceCount, const VkFence* pFences) const
+{
+	return m_vk.resetFences(device, fenceCount, pFences);
+}
+
+VkResult DeviceDriver::getFenceStatus (VkDevice device, VkFence fence) const
+{
+	return m_vk.getFenceStatus(device, fence);
+}
+
+VkResult DeviceDriver::waitForFences (VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout) const
+{
+	return m_vk.waitForFences(device, fenceCount, pFences, waitAll, timeout);
+}
+
+VkResult DeviceDriver::createSemaphore (VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const
+{
+	return m_vk.createSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
+}
+
+void DeviceDriver::destroySemaphore (VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroySemaphore(device, semaphore, pAllocator);
+}
+
+VkResult DeviceDriver::createEvent (VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const
+{
+	return m_vk.createEvent(device, pCreateInfo, pAllocator, pEvent);
+}
+
+void DeviceDriver::destroyEvent (VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyEvent(device, event, pAllocator);
+}
+
+VkResult DeviceDriver::getEventStatus (VkDevice device, VkEvent event) const
+{
+	return m_vk.getEventStatus(device, event);
+}
+
+VkResult DeviceDriver::setEvent (VkDevice device, VkEvent event) const
+{
+	return m_vk.setEvent(device, event);
+}
+
+VkResult DeviceDriver::resetEvent (VkDevice device, VkEvent event) const
+{
+	return m_vk.resetEvent(device, event);
+}
+
+VkResult DeviceDriver::createQueryPool (VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const
+{
+	return m_vk.createQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
+}
+
+void DeviceDriver::destroyQueryPool (VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyQueryPool(device, queryPool, pAllocator);
+}
+
+VkResult DeviceDriver::getQueryPoolResults (VkDevice device, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, deUintptr dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const
+{
+	return m_vk.getQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
+}
+
+VkResult DeviceDriver::createBuffer (VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const
+{
+	return m_vk.createBuffer(device, pCreateInfo, pAllocator, pBuffer);
+}
+
+void DeviceDriver::destroyBuffer (VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyBuffer(device, buffer, pAllocator);
+}
+
+VkResult DeviceDriver::createBufferView (VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const
+{
+	return m_vk.createBufferView(device, pCreateInfo, pAllocator, pView);
+}
+
+void DeviceDriver::destroyBufferView (VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyBufferView(device, bufferView, pAllocator);
+}
+
+VkResult DeviceDriver::createImage (VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const
+{
+	return m_vk.createImage(device, pCreateInfo, pAllocator, pImage);
+}
+
+void DeviceDriver::destroyImage (VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyImage(device, image, pAllocator);
+}
+
+void DeviceDriver::getImageSubresourceLayout (VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const
+{
+	m_vk.getImageSubresourceLayout(device, image, pSubresource, pLayout);
+}
+
+VkResult DeviceDriver::createImageView (VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const
+{
+	return m_vk.createImageView(device, pCreateInfo, pAllocator, pView);
+}
+
+void DeviceDriver::destroyImageView (VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyImageView(device, imageView, pAllocator);
+}
+
+VkResult DeviceDriver::createShaderModule (VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const
+{
+	return m_vk.createShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
+}
+
+void DeviceDriver::destroyShaderModule (VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyShaderModule(device, shaderModule, pAllocator);
+}
+
+VkResult DeviceDriver::createPipelineCache (VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const
+{
+	return m_vk.createPipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
+}
+
+void DeviceDriver::destroyPipelineCache (VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyPipelineCache(device, pipelineCache, pAllocator);
+}
+
+VkResult DeviceDriver::getPipelineCacheData (VkDevice device, VkPipelineCache pipelineCache, deUintptr* pDataSize, void* pData) const
+{
+	return m_vk.getPipelineCacheData(device, pipelineCache, pDataSize, pData);
+}
+
+VkResult DeviceDriver::mergePipelineCaches (VkDevice device, VkPipelineCache dstCache, deUint32 srcCacheCount, const VkPipelineCache* pSrcCaches) const
+{
+	return m_vk.mergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
+}
+
+VkResult DeviceDriver::createGraphicsPipelines (VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const
+{
+	return m_vk.createGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+}
+
+VkResult DeviceDriver::createComputePipelines (VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const
+{
+	return m_vk.createComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
+}
+
+void DeviceDriver::destroyPipeline (VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyPipeline(device, pipeline, pAllocator);
+}
+
+VkResult DeviceDriver::createPipelineLayout (VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const
+{
+	return m_vk.createPipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
+}
+
+void DeviceDriver::destroyPipelineLayout (VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyPipelineLayout(device, pipelineLayout, pAllocator);
+}
+
+VkResult DeviceDriver::createSampler (VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const
+{
+	return m_vk.createSampler(device, pCreateInfo, pAllocator, pSampler);
+}
+
+void DeviceDriver::destroySampler (VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroySampler(device, sampler, pAllocator);
+}
+
+VkResult DeviceDriver::createDescriptorSetLayout (VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const
+{
+	return m_vk.createDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
+}
+
+void DeviceDriver::destroyDescriptorSetLayout (VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
+}
+
+VkResult DeviceDriver::createDescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const
+{
+	return m_vk.createDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
+}
+
+void DeviceDriver::destroyDescriptorPool (VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyDescriptorPool(device, descriptorPool, pAllocator);
+}
+
+VkResult DeviceDriver::resetDescriptorPool (VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const
+{
+	return m_vk.resetDescriptorPool(device, descriptorPool, flags);
+}
+
+VkResult DeviceDriver::allocateDescriptorSets (VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const
+{
+	return m_vk.allocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
+}
+
+VkResult DeviceDriver::freeDescriptorSets (VkDevice device, VkDescriptorPool descriptorPool, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const
+{
+	return m_vk.freeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
+}
+
+void DeviceDriver::updateDescriptorSets (VkDevice device, deUint32 descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, deUint32 descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const
+{
+	m_vk.updateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
+}
+
+VkResult DeviceDriver::createFramebuffer (VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const
+{
+	return m_vk.createFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
+}
+
+void DeviceDriver::destroyFramebuffer (VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyFramebuffer(device, framebuffer, pAllocator);
+}
+
+VkResult DeviceDriver::createRenderPass (VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const
+{
+	return m_vk.createRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
+}
+
+void DeviceDriver::destroyRenderPass (VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyRenderPass(device, renderPass, pAllocator);
+}
+
+void DeviceDriver::getRenderAreaGranularity (VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) const
+{
+	m_vk.getRenderAreaGranularity(device, renderPass, pGranularity);
+}
+
+VkResult DeviceDriver::createCommandPool (VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const
+{
+	return m_vk.createCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
+}
+
+void DeviceDriver::destroyCommandPool (VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyCommandPool(device, commandPool, pAllocator);
+}
+
+VkResult DeviceDriver::resetCommandPool (VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const
+{
+	return m_vk.resetCommandPool(device, commandPool, flags);
+}
+
+VkResult DeviceDriver::allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const
+{
+	return m_vk.allocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
+}
+
+void DeviceDriver::freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const
+{
+	m_vk.freeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
+}
+
+VkResult DeviceDriver::beginCommandBuffer (VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const
+{
+	return m_vk.beginCommandBuffer(commandBuffer, pBeginInfo);
+}
+
+VkResult DeviceDriver::endCommandBuffer (VkCommandBuffer commandBuffer) const
+{
+	return m_vk.endCommandBuffer(commandBuffer);
+}
+
+VkResult DeviceDriver::resetCommandBuffer (VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const
+{
+	return m_vk.resetCommandBuffer(commandBuffer, flags);
+}
+
+void DeviceDriver::cmdBindPipeline (VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const
+{
+	m_vk.cmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
+}
+
+void DeviceDriver::cmdSetViewport (VkCommandBuffer commandBuffer, deUint32 firstViewport, deUint32 viewportCount, const VkViewport* pViewports) const
+{
+	m_vk.cmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
+}
+
+void DeviceDriver::cmdSetScissor (VkCommandBuffer commandBuffer, deUint32 firstScissor, deUint32 scissorCount, const VkRect2D* pScissors) const
+{
+	m_vk.cmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
+}
+
+void DeviceDriver::cmdSetLineWidth (VkCommandBuffer commandBuffer, float lineWidth) const
+{
+	m_vk.cmdSetLineWidth(commandBuffer, lineWidth);
+}
+
+void DeviceDriver::cmdSetDepthBias (VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const
+{
+	m_vk.cmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+}
+
+void DeviceDriver::cmdSetBlendConstants (VkCommandBuffer commandBuffer, const float blendConstants[4]) const
+{
+	m_vk.cmdSetBlendConstants(commandBuffer, blendConstants);
+}
+
+void DeviceDriver::cmdSetDepthBounds (VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const
+{
+	m_vk.cmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
+}
+
+void DeviceDriver::cmdSetStencilCompareMask (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 compareMask) const
+{
+	m_vk.cmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
+}
+
+void DeviceDriver::cmdSetStencilWriteMask (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 writeMask) const
+{
+	m_vk.cmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
+}
+
+void DeviceDriver::cmdSetStencilReference (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 reference) const
+{
+	m_vk.cmdSetStencilReference(commandBuffer, faceMask, reference);
+}
+
+void DeviceDriver::cmdBindDescriptorSets (VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, deUint32 firstSet, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets, deUint32 dynamicOffsetCount, const deUint32* pDynamicOffsets) const
+{
+	m_vk.cmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
+}
+
+void DeviceDriver::cmdBindIndexBuffer (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const
+{
+	m_vk.cmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
+}
+
+void DeviceDriver::cmdBindVertexBuffers (VkCommandBuffer commandBuffer, deUint32 firstBinding, deUint32 bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const
+{
+	m_vk.cmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
+}
+
+void DeviceDriver::cmdDraw (VkCommandBuffer commandBuffer, deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance) const
+{
+	m_vk.cmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
+}
+
+void DeviceDriver::cmdDrawIndexed (VkCommandBuffer commandBuffer, deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance) const
+{
+	m_vk.cmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
+}
+
+void DeviceDriver::cmdDrawIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const
+{
+	m_vk.cmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+void DeviceDriver::cmdDrawIndexedIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const
+{
+	m_vk.cmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
+}
+
+void DeviceDriver::cmdDispatch (VkCommandBuffer commandBuffer, deUint32 x, deUint32 y, deUint32 z) const
+{
+	m_vk.cmdDispatch(commandBuffer, x, y, z);
+}
+
+void DeviceDriver::cmdDispatchIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const
+{
+	m_vk.cmdDispatchIndirect(commandBuffer, buffer, offset);
+}
+
+void DeviceDriver::cmdCopyBuffer (VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferCopy* pRegions) const
+{
+	m_vk.cmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
+}
+
+void DeviceDriver::cmdCopyImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageCopy* pRegions) const
+{
+	m_vk.cmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+void DeviceDriver::cmdBlitImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageBlit* pRegions, VkFilter filter) const
+{
+	m_vk.cmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
+}
+
+void DeviceDriver::cmdCopyBufferToImage (VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkBufferImageCopy* pRegions) const
+{
+	m_vk.cmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+void DeviceDriver::cmdCopyImageToBuffer (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferImageCopy* pRegions) const
+{
+	m_vk.cmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
+}
+
+void DeviceDriver::cmdUpdateBuffer (VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const deUint32* pData) const
+{
+	m_vk.cmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
+}
+
+void DeviceDriver::cmdFillBuffer (VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, deUint32 data) const
+{
+	m_vk.cmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
+}
+
+void DeviceDriver::cmdClearColorImage (VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const
+{
+	m_vk.cmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
+}
+
+void DeviceDriver::cmdClearDepthStencilImage (VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const
+{
+	m_vk.cmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
+}
+
+void DeviceDriver::cmdClearAttachments (VkCommandBuffer commandBuffer, deUint32 attachmentCount, const VkClearAttachment* pAttachments, deUint32 rectCount, const VkClearRect* pRects) const
+{
+	m_vk.cmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
+}
+
+void DeviceDriver::cmdResolveImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageResolve* pRegions) const
+{
+	m_vk.cmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
+}
+
+void DeviceDriver::cmdSetEvent (VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const
+{
+	m_vk.cmdSetEvent(commandBuffer, event, stageMask);
+}
+
+void DeviceDriver::cmdResetEvent (VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const
+{
+	m_vk.cmdResetEvent(commandBuffer, event, stageMask);
+}
+
+void DeviceDriver::cmdWaitEvents (VkCommandBuffer commandBuffer, deUint32 eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const
+{
+	m_vk.cmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+void DeviceDriver::cmdPipelineBarrier (VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const
+{
+	m_vk.cmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
+}
+
+void DeviceDriver::cmdBeginQuery (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query, VkQueryControlFlags flags) const
+{
+	m_vk.cmdBeginQuery(commandBuffer, queryPool, query, flags);
+}
+
+void DeviceDriver::cmdEndQuery (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query) const
+{
+	m_vk.cmdEndQuery(commandBuffer, queryPool, query);
+}
+
+void DeviceDriver::cmdResetQueryPool (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount) const
+{
+	m_vk.cmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
+}
+
+void DeviceDriver::cmdWriteTimestamp (VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, deUint32 query) const
+{
+	m_vk.cmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
+}
+
+void DeviceDriver::cmdCopyQueryPoolResults (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const
+{
+	m_vk.cmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
+}
+
+void DeviceDriver::cmdPushConstants (VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size, const void* pValues) const
+{
+	m_vk.cmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
+}
+
+void DeviceDriver::cmdBeginRenderPass (VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const
+{
+	m_vk.cmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
+}
+
+void DeviceDriver::cmdNextSubpass (VkCommandBuffer commandBuffer, VkSubpassContents contents) const
+{
+	m_vk.cmdNextSubpass(commandBuffer, contents);
+}
+
+void DeviceDriver::cmdEndRenderPass (VkCommandBuffer commandBuffer) const
+{
+	m_vk.cmdEndRenderPass(commandBuffer);
+}
+
+void DeviceDriver::cmdExecuteCommands (VkCommandBuffer commandBuffer, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const
+{
+	m_vk.cmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
+}
diff --git a/external/vulkancts/framework/vulkan/vkDeviceFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkDeviceFunctionPointers.inl
new file mode 100644
index 0000000..acf07f2
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDeviceFunctionPointers.inl
@@ -0,0 +1,124 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+DestroyDeviceFunc									destroyDevice;
+GetDeviceQueueFunc									getDeviceQueue;
+QueueSubmitFunc										queueSubmit;
+QueueWaitIdleFunc									queueWaitIdle;
+DeviceWaitIdleFunc									deviceWaitIdle;
+AllocateMemoryFunc									allocateMemory;
+FreeMemoryFunc										freeMemory;
+MapMemoryFunc										mapMemory;
+UnmapMemoryFunc										unmapMemory;
+FlushMappedMemoryRangesFunc							flushMappedMemoryRanges;
+InvalidateMappedMemoryRangesFunc					invalidateMappedMemoryRanges;
+GetDeviceMemoryCommitmentFunc						getDeviceMemoryCommitment;
+BindBufferMemoryFunc								bindBufferMemory;
+BindImageMemoryFunc									bindImageMemory;
+GetBufferMemoryRequirementsFunc						getBufferMemoryRequirements;
+GetImageMemoryRequirementsFunc						getImageMemoryRequirements;
+GetImageSparseMemoryRequirementsFunc				getImageSparseMemoryRequirements;
+GetPhysicalDeviceSparseImageFormatPropertiesFunc	getPhysicalDeviceSparseImageFormatProperties;
+QueueBindSparseFunc									queueBindSparse;
+CreateFenceFunc										createFence;
+DestroyFenceFunc									destroyFence;
+ResetFencesFunc										resetFences;
+GetFenceStatusFunc									getFenceStatus;
+WaitForFencesFunc									waitForFences;
+CreateSemaphoreFunc									createSemaphore;
+DestroySemaphoreFunc								destroySemaphore;
+CreateEventFunc										createEvent;
+DestroyEventFunc									destroyEvent;
+GetEventStatusFunc									getEventStatus;
+SetEventFunc										setEvent;
+ResetEventFunc										resetEvent;
+CreateQueryPoolFunc									createQueryPool;
+DestroyQueryPoolFunc								destroyQueryPool;
+GetQueryPoolResultsFunc								getQueryPoolResults;
+CreateBufferFunc									createBuffer;
+DestroyBufferFunc									destroyBuffer;
+CreateBufferViewFunc								createBufferView;
+DestroyBufferViewFunc								destroyBufferView;
+CreateImageFunc										createImage;
+DestroyImageFunc									destroyImage;
+GetImageSubresourceLayoutFunc						getImageSubresourceLayout;
+CreateImageViewFunc									createImageView;
+DestroyImageViewFunc								destroyImageView;
+CreateShaderModuleFunc								createShaderModule;
+DestroyShaderModuleFunc								destroyShaderModule;
+CreatePipelineCacheFunc								createPipelineCache;
+DestroyPipelineCacheFunc							destroyPipelineCache;
+GetPipelineCacheDataFunc							getPipelineCacheData;
+MergePipelineCachesFunc								mergePipelineCaches;
+CreateGraphicsPipelinesFunc							createGraphicsPipelines;
+CreateComputePipelinesFunc							createComputePipelines;
+DestroyPipelineFunc									destroyPipeline;
+CreatePipelineLayoutFunc							createPipelineLayout;
+DestroyPipelineLayoutFunc							destroyPipelineLayout;
+CreateSamplerFunc									createSampler;
+DestroySamplerFunc									destroySampler;
+CreateDescriptorSetLayoutFunc						createDescriptorSetLayout;
+DestroyDescriptorSetLayoutFunc						destroyDescriptorSetLayout;
+CreateDescriptorPoolFunc							createDescriptorPool;
+DestroyDescriptorPoolFunc							destroyDescriptorPool;
+ResetDescriptorPoolFunc								resetDescriptorPool;
+AllocateDescriptorSetsFunc							allocateDescriptorSets;
+FreeDescriptorSetsFunc								freeDescriptorSets;
+UpdateDescriptorSetsFunc							updateDescriptorSets;
+CreateFramebufferFunc								createFramebuffer;
+DestroyFramebufferFunc								destroyFramebuffer;
+CreateRenderPassFunc								createRenderPass;
+DestroyRenderPassFunc								destroyRenderPass;
+GetRenderAreaGranularityFunc						getRenderAreaGranularity;
+CreateCommandPoolFunc								createCommandPool;
+DestroyCommandPoolFunc								destroyCommandPool;
+ResetCommandPoolFunc								resetCommandPool;
+AllocateCommandBuffersFunc							allocateCommandBuffers;
+FreeCommandBuffersFunc								freeCommandBuffers;
+BeginCommandBufferFunc								beginCommandBuffer;
+EndCommandBufferFunc								endCommandBuffer;
+ResetCommandBufferFunc								resetCommandBuffer;
+CmdBindPipelineFunc									cmdBindPipeline;
+CmdSetViewportFunc									cmdSetViewport;
+CmdSetScissorFunc									cmdSetScissor;
+CmdSetLineWidthFunc									cmdSetLineWidth;
+CmdSetDepthBiasFunc									cmdSetDepthBias;
+CmdSetBlendConstantsFunc							cmdSetBlendConstants;
+CmdSetDepthBoundsFunc								cmdSetDepthBounds;
+CmdSetStencilCompareMaskFunc						cmdSetStencilCompareMask;
+CmdSetStencilWriteMaskFunc							cmdSetStencilWriteMask;
+CmdSetStencilReferenceFunc							cmdSetStencilReference;
+CmdBindDescriptorSetsFunc							cmdBindDescriptorSets;
+CmdBindIndexBufferFunc								cmdBindIndexBuffer;
+CmdBindVertexBuffersFunc							cmdBindVertexBuffers;
+CmdDrawFunc											cmdDraw;
+CmdDrawIndexedFunc									cmdDrawIndexed;
+CmdDrawIndirectFunc									cmdDrawIndirect;
+CmdDrawIndexedIndirectFunc							cmdDrawIndexedIndirect;
+CmdDispatchFunc										cmdDispatch;
+CmdDispatchIndirectFunc								cmdDispatchIndirect;
+CmdCopyBufferFunc									cmdCopyBuffer;
+CmdCopyImageFunc									cmdCopyImage;
+CmdBlitImageFunc									cmdBlitImage;
+CmdCopyBufferToImageFunc							cmdCopyBufferToImage;
+CmdCopyImageToBufferFunc							cmdCopyImageToBuffer;
+CmdUpdateBufferFunc									cmdUpdateBuffer;
+CmdFillBufferFunc									cmdFillBuffer;
+CmdClearColorImageFunc								cmdClearColorImage;
+CmdClearDepthStencilImageFunc						cmdClearDepthStencilImage;
+CmdClearAttachmentsFunc								cmdClearAttachments;
+CmdResolveImageFunc									cmdResolveImage;
+CmdSetEventFunc										cmdSetEvent;
+CmdResetEventFunc									cmdResetEvent;
+CmdWaitEventsFunc									cmdWaitEvents;
+CmdPipelineBarrierFunc								cmdPipelineBarrier;
+CmdBeginQueryFunc									cmdBeginQuery;
+CmdEndQueryFunc										cmdEndQuery;
+CmdResetQueryPoolFunc								cmdResetQueryPool;
+CmdWriteTimestampFunc								cmdWriteTimestamp;
+CmdCopyQueryPoolResultsFunc							cmdCopyQueryPoolResults;
+CmdPushConstantsFunc								cmdPushConstants;
+CmdBeginRenderPassFunc								cmdBeginRenderPass;
+CmdNextSubpassFunc									cmdNextSubpass;
+CmdEndRenderPassFunc								cmdEndRenderPass;
+CmdExecuteCommandsFunc								cmdExecuteCommands;
diff --git a/external/vulkancts/framework/vulkan/vkDeviceUtil.cpp b/external/vulkancts/framework/vulkan/vkDeviceUtil.cpp
new file mode 100644
index 0000000..1a1caa3
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDeviceUtil.cpp
@@ -0,0 +1,90 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Instance and device initialization utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDeviceUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "tcuCommandLine.hpp"
+
+#include "qpInfo.h"
+
+#include <vector>
+
+namespace vk
+{
+
+using std::vector;
+
+Move<VkInstance> createDefaultInstance (const PlatformInterface& vkPlatform)
+{
+	const struct VkApplicationInfo		appInfo			=
+	{
+		VK_STRUCTURE_TYPE_APPLICATION_INFO,
+		DE_NULL,
+		"deqp",									// pAppName
+		qpGetReleaseId(),						// appVersion
+		"deqp",									// pEngineName
+		qpGetReleaseId(),						// engineVersion
+		VK_API_VERSION							// apiVersion
+	};
+	const struct VkInstanceCreateInfo	instanceInfo	=
+	{
+		VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+		DE_NULL,
+		(VkInstanceCreateFlags)0,
+		&appInfo,
+		0u,										// enabledLayerNameCount
+		DE_NULL,								// ppEnabledLayerNames
+		0u,										// enabledExtensionNameCount;
+		DE_NULL									// ppEnabledExtensionNames
+	};
+
+	return createInstance(vkPlatform, &instanceInfo);
+}
+
+VkPhysicalDevice chooseDevice (const InstanceInterface& vkInstance, VkInstance instance, const tcu::CommandLine& cmdLine)
+{
+	const vector<VkPhysicalDevice>	devices	= enumeratePhysicalDevices(vkInstance, instance);
+
+	if (devices.empty())
+		TCU_THROW(NotSupportedError, "No Vulkan devices available");
+
+	if (!de::inBounds(cmdLine.getVKDeviceId(), 1, (int)devices.size()+1))
+		TCU_THROW(InternalError, "Invalid --deqp-vk-device-id");
+
+	return devices[(size_t)(cmdLine.getVKDeviceId()-1)];
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkDeviceUtil.hpp b/external/vulkancts/framework/vulkan/vkDeviceUtil.hpp
new file mode 100644
index 0000000..f57d819
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkDeviceUtil.hpp
@@ -0,0 +1,53 @@
+#ifndef _VKDEVICEUTIL_HPP
+#define _VKDEVICEUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Instance and device initialization utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+
+namespace tcu
+{
+class CommandLine;
+}
+
+namespace vk
+{
+
+Move<VkInstance>	createDefaultInstance	(const PlatformInterface& vkPlatform);
+VkPhysicalDevice	chooseDevice			(const InstanceInterface& vkInstance, VkInstance instance, const tcu::CommandLine& cmdLine);
+
+} // vk
+
+#endif // _VKDEVICEUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkFunctionPointerTypes.inl b/external/vulkancts/framework/vulkan/vkFunctionPointerTypes.inl
new file mode 100644
index 0000000..8630c6d
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkFunctionPointerTypes.inl
@@ -0,0 +1,140 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateInstanceFunc)								(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyInstanceFunc)								(VkInstance instance, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EnumeratePhysicalDevicesFunc)						(VkInstance instance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDeviceFeaturesFunc)						(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDeviceFormatPropertiesFunc)				(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* GetPhysicalDeviceImageFormatPropertiesFunc)		(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDevicePropertiesFunc)					(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDeviceQueueFamilyPropertiesFunc)		(VkPhysicalDevice physicalDevice, deUint32* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDeviceMemoryPropertiesFunc)				(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
+typedef VKAPI_ATTR PFN_vkVoidFunction	(VKAPI_CALL* GetInstanceProcAddrFunc)							(VkInstance instance, const char* pName);
+typedef VKAPI_ATTR PFN_vkVoidFunction	(VKAPI_CALL* GetDeviceProcAddrFunc)								(VkDevice device, const char* pName);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateDeviceFunc)									(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyDeviceFunc)									(VkDevice device, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EnumerateInstanceExtensionPropertiesFunc)			(const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EnumerateDeviceExtensionPropertiesFunc)			(VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EnumerateInstanceLayerPropertiesFunc)				(deUint32* pPropertyCount, VkLayerProperties* pProperties);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EnumerateDeviceLayerPropertiesFunc)				(VkPhysicalDevice physicalDevice, deUint32* pPropertyCount, VkLayerProperties* pProperties);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetDeviceQueueFunc)								(VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* QueueSubmitFunc)									(VkQueue queue, deUint32 submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* QueueWaitIdleFunc)									(VkQueue queue);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* DeviceWaitIdleFunc)								(VkDevice device);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* AllocateMemoryFunc)								(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+typedef VKAPI_ATTR void					(VKAPI_CALL* FreeMemoryFunc)									(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* MapMemoryFunc)										(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
+typedef VKAPI_ATTR void					(VKAPI_CALL* UnmapMemoryFunc)									(VkDevice device, VkDeviceMemory memory);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* FlushMappedMemoryRangesFunc)						(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* InvalidateMappedMemoryRangesFunc)					(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetDeviceMemoryCommitmentFunc)						(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* BindBufferMemoryFunc)								(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* BindImageMemoryFunc)								(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetBufferMemoryRequirementsFunc)					(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetImageMemoryRequirementsFunc)					(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetImageSparseMemoryRequirementsFunc)				(VkDevice device, VkImage image, deUint32* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetPhysicalDeviceSparseImageFormatPropertiesFunc)	(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, deUint32* pPropertyCount, VkSparseImageFormatProperties* pProperties);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* QueueBindSparseFunc)								(VkQueue queue, deUint32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateFenceFunc)									(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyFenceFunc)									(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* ResetFencesFunc)									(VkDevice device, deUint32 fenceCount, const VkFence* pFences);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* GetFenceStatusFunc)								(VkDevice device, VkFence fence);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* WaitForFencesFunc)									(VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateSemaphoreFunc)								(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroySemaphoreFunc)								(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateEventFunc)									(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyEventFunc)									(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* GetEventStatusFunc)								(VkDevice device, VkEvent event);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* SetEventFunc)										(VkDevice device, VkEvent event);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* ResetEventFunc)									(VkDevice device, VkEvent event);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateQueryPoolFunc)								(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyQueryPoolFunc)								(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* GetQueryPoolResultsFunc)							(VkDevice device, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, deUintptr dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateBufferFunc)									(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyBufferFunc)									(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateBufferViewFunc)								(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyBufferViewFunc)								(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateImageFunc)									(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyImageFunc)									(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetImageSubresourceLayoutFunc)						(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateImageViewFunc)								(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyImageViewFunc)								(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateShaderModuleFunc)							(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyShaderModuleFunc)							(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreatePipelineCacheFunc)							(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyPipelineCacheFunc)							(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* GetPipelineCacheDataFunc)							(VkDevice device, VkPipelineCache pipelineCache, deUintptr* pDataSize, void* pData);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* MergePipelineCachesFunc)							(VkDevice device, VkPipelineCache dstCache, deUint32 srcCacheCount, const VkPipelineCache* pSrcCaches);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateGraphicsPipelinesFunc)						(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateComputePipelinesFunc)						(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyPipelineFunc)								(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreatePipelineLayoutFunc)							(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyPipelineLayoutFunc)							(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateSamplerFunc)									(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroySamplerFunc)								(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateDescriptorSetLayoutFunc)						(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyDescriptorSetLayoutFunc)					(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateDescriptorPoolFunc)							(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyDescriptorPoolFunc)							(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* ResetDescriptorPoolFunc)							(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* AllocateDescriptorSetsFunc)						(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* FreeDescriptorSetsFunc)							(VkDevice device, VkDescriptorPool descriptorPool, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
+typedef VKAPI_ATTR void					(VKAPI_CALL* UpdateDescriptorSetsFunc)							(VkDevice device, deUint32 descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, deUint32 descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateFramebufferFunc)								(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyFramebufferFunc)							(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateRenderPassFunc)								(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyRenderPassFunc)								(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR void					(VKAPI_CALL* GetRenderAreaGranularityFunc)						(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* CreateCommandPoolFunc)								(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+typedef VKAPI_ATTR void					(VKAPI_CALL* DestroyCommandPoolFunc)							(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* ResetCommandPoolFunc)								(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* AllocateCommandBuffersFunc)						(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers);
+typedef VKAPI_ATTR void					(VKAPI_CALL* FreeCommandBuffersFunc)							(VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* BeginCommandBufferFunc)							(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* EndCommandBufferFunc)								(VkCommandBuffer commandBuffer);
+typedef VKAPI_ATTR VkResult				(VKAPI_CALL* ResetCommandBufferFunc)							(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBindPipelineFunc)								(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetViewportFunc)								(VkCommandBuffer commandBuffer, deUint32 firstViewport, deUint32 viewportCount, const VkViewport* pViewports);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetScissorFunc)									(VkCommandBuffer commandBuffer, deUint32 firstScissor, deUint32 scissorCount, const VkRect2D* pScissors);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetLineWidthFunc)								(VkCommandBuffer commandBuffer, float lineWidth);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetDepthBiasFunc)								(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetBlendConstantsFunc)							(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetDepthBoundsFunc)								(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetStencilCompareMaskFunc)						(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 compareMask);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetStencilWriteMaskFunc)						(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 writeMask);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetStencilReferenceFunc)						(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 reference);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBindDescriptorSetsFunc)							(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, deUint32 firstSet, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets, deUint32 dynamicOffsetCount, const deUint32* pDynamicOffsets);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBindIndexBufferFunc)							(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBindVertexBuffersFunc)							(VkCommandBuffer commandBuffer, deUint32 firstBinding, deUint32 bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDrawFunc)										(VkCommandBuffer commandBuffer, deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDrawIndexedFunc)								(VkCommandBuffer commandBuffer, deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDrawIndirectFunc)								(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDrawIndexedIndirectFunc)						(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDispatchFunc)									(VkCommandBuffer commandBuffer, deUint32 x, deUint32 y, deUint32 z);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdDispatchIndirectFunc)							(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdCopyBufferFunc)									(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferCopy* pRegions);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdCopyImageFunc)									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageCopy* pRegions);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBlitImageFunc)									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageBlit* pRegions, VkFilter filter);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdCopyBufferToImageFunc)							(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkBufferImageCopy* pRegions);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdCopyImageToBufferFunc)							(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferImageCopy* pRegions);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdUpdateBufferFunc)								(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const deUint32* pData);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdFillBufferFunc)									(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, deUint32 data);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdClearColorImageFunc)							(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, deUint32 rangeCount, const VkImageSubresourceRange* pRanges);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdClearDepthStencilImageFunc)						(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, deUint32 rangeCount, const VkImageSubresourceRange* pRanges);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdClearAttachmentsFunc)							(VkCommandBuffer commandBuffer, deUint32 attachmentCount, const VkClearAttachment* pAttachments, deUint32 rectCount, const VkClearRect* pRects);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdResolveImageFunc)								(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageResolve* pRegions);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdSetEventFunc)									(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdResetEventFunc)									(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdWaitEventsFunc)									(VkCommandBuffer commandBuffer, deUint32 eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdPipelineBarrierFunc)							(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBeginQueryFunc)									(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query, VkQueryControlFlags flags);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdEndQueryFunc)									(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdResetQueryPoolFunc)								(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdWriteTimestampFunc)								(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, deUint32 query);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdCopyQueryPoolResultsFunc)						(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdPushConstantsFunc)								(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size, const void* pValues);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdBeginRenderPassFunc)							(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdNextSubpassFunc)								(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdEndRenderPassFunc)								(VkCommandBuffer commandBuffer);
+typedef VKAPI_ATTR void					(VKAPI_CALL* CmdExecuteCommandsFunc)							(VkCommandBuffer commandBuffer, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers);
diff --git a/external/vulkancts/framework/vulkan/vkGlslToSpirV.cpp b/external/vulkancts/framework/vulkan/vkGlslToSpirV.cpp
new file mode 100644
index 0000000..c98d50f
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkGlslToSpirV.cpp
@@ -0,0 +1,311 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief GLSL to SPIR-V.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkGlslToSpirV.hpp"
+#include "deArrayUtil.hpp"
+#include "deMemory.h"
+#include "deClock.h"
+#include "qpDebugOut.h"
+
+#if defined(DEQP_HAVE_GLSLANG)
+#	include "deSingleton.h"
+#	include "deMutex.hpp"
+
+#	include "SPIRV/GlslangToSpv.h"
+#	include "SPIRV/disassemble.h"
+#	include "SPIRV/doc.h"
+#	include "glslang/Include/InfoSink.h"
+#	include "glslang/Include/ShHandle.h"
+#	include "glslang/MachineIndependent/localintermediate.h"
+#	include "glslang/Public/ShaderLang.h"
+
+#endif
+
+namespace vk
+{
+
+using std::string;
+using std::vector;
+
+#if defined(DEQP_HAVE_GLSLANG)
+
+namespace
+{
+
+EShLanguage getGlslangStage (glu::ShaderType type)
+{
+	static const EShLanguage stageMap[] =
+	{
+		EShLangVertex,
+		EShLangFragment,
+		EShLangGeometry,
+		EShLangTessControl,
+		EShLangTessEvaluation,
+		EShLangCompute,
+	};
+	return de::getSizedArrayElement<glu::SHADERTYPE_LAST>(stageMap, type);
+}
+
+static volatile deSingletonState	s_glslangInitState	= DE_SINGLETON_STATE_NOT_INITIALIZED;
+static de::Mutex					s_glslangLock;
+
+void initGlslang (void*)
+{
+	// Main compiler
+	glslang::InitializeProcess();
+
+	// SPIR-V disassembly
+	spv::Parameterize();
+}
+
+void prepareGlslang (void)
+{
+	deInitSingleton(&s_glslangInitState, initGlslang, DE_NULL);
+}
+
+// \todo [2015-06-19 pyry] Specialize these per GLSL version
+
+// Fail compilation if more members are added to TLimits or TBuiltInResource
+struct LimitsSizeHelper_s			{ bool m0, m1, m2, m3, m4, m5, m6, m7, m8; };
+struct BuiltInResourceSizeHelper_s	{ int m[83]; LimitsSizeHelper_s l; };
+
+DE_STATIC_ASSERT(sizeof(TLimits)			== sizeof(LimitsSizeHelper_s));
+DE_STATIC_ASSERT(sizeof(TBuiltInResource)	== sizeof(BuiltInResourceSizeHelper_s));
+
+void getDefaultLimits (TLimits* limits)
+{
+	limits->nonInductiveForLoops					= true;
+	limits->whileLoops								= true;
+	limits->doWhileLoops							= true;
+	limits->generalUniformIndexing					= true;
+	limits->generalAttributeMatrixVectorIndexing	= true;
+	limits->generalVaryingIndexing					= true;
+	limits->generalSamplerIndexing					= true;
+	limits->generalVariableIndexing					= true;
+	limits->generalConstantMatrixVectorIndexing		= true;
+}
+
+void getDefaultBuiltInResources (TBuiltInResource* builtin)
+{
+	getDefaultLimits(&builtin->limits);
+
+	builtin->maxLights									= 32;
+	builtin->maxClipPlanes								= 6;
+	builtin->maxTextureUnits							= 32;
+	builtin->maxTextureCoords							= 32;
+	builtin->maxVertexAttribs							= 64;
+	builtin->maxVertexUniformComponents					= 4096;
+	builtin->maxVaryingFloats							= 64;
+	builtin->maxVertexTextureImageUnits					= 32;
+	builtin->maxCombinedTextureImageUnits				= 80;
+	builtin->maxTextureImageUnits						= 32;
+	builtin->maxFragmentUniformComponents				= 4096;
+	builtin->maxDrawBuffers								= 32;
+	builtin->maxVertexUniformVectors					= 128;
+	builtin->maxVaryingVectors							= 8;
+	builtin->maxFragmentUniformVectors					= 16;
+	builtin->maxVertexOutputVectors						= 16;
+	builtin->maxFragmentInputVectors					= 15;
+	builtin->minProgramTexelOffset						= -8;
+	builtin->maxProgramTexelOffset						= 7;
+	builtin->maxClipDistances							= 8;
+	builtin->maxComputeWorkGroupCountX					= 65535;
+	builtin->maxComputeWorkGroupCountY					= 65535;
+	builtin->maxComputeWorkGroupCountZ					= 65535;
+	builtin->maxComputeWorkGroupSizeX					= 1024;
+	builtin->maxComputeWorkGroupSizeY					= 1024;
+	builtin->maxComputeWorkGroupSizeZ					= 64;
+	builtin->maxComputeUniformComponents				= 1024;
+	builtin->maxComputeTextureImageUnits				= 16;
+	builtin->maxComputeImageUniforms					= 8;
+	builtin->maxComputeAtomicCounters					= 8;
+	builtin->maxComputeAtomicCounterBuffers				= 1;
+	builtin->maxVaryingComponents						= 60;
+	builtin->maxVertexOutputComponents					= 64;
+	builtin->maxGeometryInputComponents					= 64;
+	builtin->maxGeometryOutputComponents				= 128;
+	builtin->maxFragmentInputComponents					= 128;
+	builtin->maxImageUnits								= 8;
+	builtin->maxCombinedImageUnitsAndFragmentOutputs	= 8;
+	builtin->maxCombinedShaderOutputResources			= 8;
+	builtin->maxImageSamples							= 0;
+	builtin->maxVertexImageUniforms						= 0;
+	builtin->maxTessControlImageUniforms				= 0;
+	builtin->maxTessEvaluationImageUniforms				= 0;
+	builtin->maxGeometryImageUniforms					= 0;
+	builtin->maxFragmentImageUniforms					= 8;
+	builtin->maxCombinedImageUniforms					= 8;
+	builtin->maxGeometryTextureImageUnits				= 16;
+	builtin->maxGeometryOutputVertices					= 256;
+	builtin->maxGeometryTotalOutputComponents			= 1024;
+	builtin->maxGeometryUniformComponents				= 1024;
+	builtin->maxGeometryVaryingComponents				= 64;
+	builtin->maxTessControlInputComponents				= 128;
+	builtin->maxTessControlOutputComponents				= 128;
+	builtin->maxTessControlTextureImageUnits			= 16;
+	builtin->maxTessControlUniformComponents			= 1024;
+	builtin->maxTessControlTotalOutputComponents		= 4096;
+	builtin->maxTessEvaluationInputComponents			= 128;
+	builtin->maxTessEvaluationOutputComponents			= 128;
+	builtin->maxTessEvaluationTextureImageUnits			= 16;
+	builtin->maxTessEvaluationUniformComponents			= 1024;
+	builtin->maxTessPatchComponents						= 120;
+	builtin->maxPatchVertices							= 32;
+	builtin->maxTessGenLevel							= 64;
+	builtin->maxViewports								= 16;
+	builtin->maxVertexAtomicCounters					= 0;
+	builtin->maxTessControlAtomicCounters				= 0;
+	builtin->maxTessEvaluationAtomicCounters			= 0;
+	builtin->maxGeometryAtomicCounters					= 0;
+	builtin->maxFragmentAtomicCounters					= 8;
+	builtin->maxCombinedAtomicCounters					= 8;
+	builtin->maxAtomicCounterBindings					= 1;
+	builtin->maxVertexAtomicCounterBuffers				= 0;
+	builtin->maxTessControlAtomicCounterBuffers			= 0;
+	builtin->maxTessEvaluationAtomicCounterBuffers		= 0;
+	builtin->maxGeometryAtomicCounterBuffers			= 0;
+	builtin->maxFragmentAtomicCounterBuffers			= 1;
+	builtin->maxCombinedAtomicCounterBuffers			= 1;
+	builtin->maxAtomicCounterBufferSize					= 16384;
+	builtin->maxTransformFeedbackBuffers				= 4;
+	builtin->maxTransformFeedbackInterleavedComponents	= 64;
+	builtin->maxCullDistances							= 8;
+	builtin->maxCombinedClipAndCullDistances			= 8;
+	builtin->maxSamples									= 4;
+};
+
+} // anonymous
+
+void glslToSpirV (const glu::ProgramSources& program, std::vector<deUint8>* dst, glu::ShaderProgramInfo* buildInfo)
+{
+	TBuiltInResource	builtinRes;
+
+	prepareGlslang();
+	getDefaultBuiltInResources(&builtinRes);
+
+	// \note Compiles only first found shader
+	for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+	{
+		if (!program.sources[shaderType].empty())
+		{
+			const de::ScopedLock	compileLock			(s_glslangLock);
+			const std::string&		srcText				= program.sources[shaderType][0];
+			const char*				srcPtrs[]			= { srcText.c_str() };
+			const int				srcLengths[]		= { (int)srcText.size() };
+			vector<deUint32>		spvBlob;
+			const EShLanguage		shaderStage			= getGlslangStage(glu::ShaderType(shaderType));
+			glslang::TShader		shader				(shaderStage);
+			glslang::TProgram		program;
+
+			shader.setStrings(srcPtrs, DE_LENGTH_OF_ARRAY(srcPtrs));
+			program.addShader(&shader);
+
+			{
+				const deUint64	compileStartTime	= deGetMicroseconds();
+				const int		compileRes			= shader.parse(&builtinRes, 110, false, (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules));
+				glu::ShaderInfo	shaderBuildInfo;
+
+				shaderBuildInfo.type			= (glu::ShaderType)shaderType;
+				shaderBuildInfo.source			= srcText;
+				shaderBuildInfo.infoLog			= shader.getInfoLog(); // \todo [2015-07-13 pyry] Include debug log?
+				shaderBuildInfo.compileTimeUs	= deGetMicroseconds()-compileStartTime;
+				shaderBuildInfo.compileOk		= (compileRes != 0);
+
+				buildInfo->shaders.push_back(shaderBuildInfo);
+
+				if (compileRes == 0)
+					TCU_FAIL("Failed to compile shader");
+			}
+
+			{
+				const deUint64	linkStartTime	= deGetMicroseconds();
+				const int		linkRes			= program.link((EShMessages)(EShMsgSpvRules | EShMsgVulkanRules));
+
+				buildInfo->program.infoLog		= program.getInfoLog(); // \todo [2015-11-05 scygan] Include debug log?
+				buildInfo->program.linkOk		= (linkRes != 0);
+				buildInfo->program.linkTimeUs	= deGetMicroseconds()-linkStartTime;
+
+				if (linkRes == 0)
+					TCU_FAIL("Failed to link shader");
+			}
+
+			{
+				const glslang::TIntermediate* const	intermediate	= program.getIntermediate(shaderStage);
+				glslang::GlslangToSpv(*intermediate, spvBlob);
+			}
+
+			dst->resize(spvBlob.size() * sizeof(deUint32));
+#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
+			deMemcpy(&(*dst)[0], &spvBlob[0], dst->size());
+#else
+#	error "Big-endian not supported"
+#endif
+
+			return;
+		}
+	}
+
+	TCU_THROW(InternalError, "Can't compile empty program");
+}
+
+void disassembleSpirV (size_t binarySize, const deUint8* binary, std::ostream* dst)
+{
+	std::vector<deUint32>	binForDisasm	(binarySize/4);
+
+	DE_ASSERT(binarySize%4 == 0);
+
+#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
+	deMemcpy(&binForDisasm[0], binary, binarySize);
+#else
+#	error "Big-endian not supported"
+#endif
+
+	spv::Disassemble(*dst, binForDisasm);
+}
+
+#else // defined(DEQP_HAVE_GLSLANG)
+
+void glslToSpirV (const glu::ProgramSources&, std::vector<deUint8>*, glu::ShaderProgramInfo*)
+{
+	TCU_THROW(NotSupportedError, "GLSL to SPIR-V compilation not supported (DEQP_HAVE_GLSLANG not defined)");
+}
+
+void disassembleSpirV (size_t, const deUint8*, std::ostream*)
+{
+	TCU_THROW(NotSupportedError, "SPIR-V disassembling not supported (DEQP_HAVE_GLSLANG not defined)");
+}
+
+#endif
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkGlslToSpirV.hpp b/external/vulkancts/framework/vulkan/vkGlslToSpirV.hpp
new file mode 100644
index 0000000..df4513b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkGlslToSpirV.hpp
@@ -0,0 +1,54 @@
+#ifndef _VKGLSLTOSPIRV_HPP
+#define _VKGLSLTOSPIRV_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief GLSL to SPIR-V.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkPrograms.hpp"
+#include "gluShaderProgram.hpp"
+
+#include <ostream>
+
+namespace vk
+{
+
+//! Compile GLSL program to SPIR-V. Will fail with NotSupportedError if compiler is not available.
+void	glslToSpirV			(const glu::ProgramSources& src, std::vector<deUint8>* dst, glu::ShaderProgramInfo* buildInfo);
+
+//! Disassemble SPIR-V binary
+void	disassembleSpirV	(size_t binarySize, const deUint8* binary, std::ostream* dst);
+
+} // vk
+
+#endif // _VKGLSLTOSPIRV_HPP
diff --git a/external/vulkancts/framework/vulkan/vkHandleType.inl b/external/vulkancts/framework/vulkan/vkHandleType.inl
new file mode 100644
index 0000000..d82a730
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkHandleType.inl
@@ -0,0 +1,37 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+enum HandleType
+{
+	HANDLE_TYPE_INSTANCE = 0,
+	HANDLE_TYPE_PHYSICAL_DEVICE,
+	HANDLE_TYPE_DEVICE,
+	HANDLE_TYPE_QUEUE,
+	HANDLE_TYPE_SEMAPHORE,
+	HANDLE_TYPE_COMMAND_BUFFER,
+	HANDLE_TYPE_FENCE,
+	HANDLE_TYPE_DEVICE_MEMORY,
+	HANDLE_TYPE_BUFFER,
+	HANDLE_TYPE_IMAGE,
+	HANDLE_TYPE_EVENT,
+	HANDLE_TYPE_QUERY_POOL,
+	HANDLE_TYPE_BUFFER_VIEW,
+	HANDLE_TYPE_IMAGE_VIEW,
+	HANDLE_TYPE_SHADER_MODULE,
+	HANDLE_TYPE_PIPELINE_CACHE,
+	HANDLE_TYPE_PIPELINE_LAYOUT,
+	HANDLE_TYPE_RENDER_PASS,
+	HANDLE_TYPE_PIPELINE,
+	HANDLE_TYPE_DESCRIPTOR_SET_LAYOUT,
+	HANDLE_TYPE_SAMPLER,
+	HANDLE_TYPE_DESCRIPTOR_POOL,
+	HANDLE_TYPE_DESCRIPTOR_SET,
+	HANDLE_TYPE_FRAMEBUFFER,
+	HANDLE_TYPE_COMMAND_POOL,
+	HANDLE_TYPE_SURFACE_KHR,
+	HANDLE_TYPE_SWAPCHAIN_KHR,
+	HANDLE_TYPE_DISPLAY_KHR,
+	HANDLE_TYPE_DISPLAY_MODE_KHR,
+	HANDLE_TYPE_LAST
+};
+
diff --git a/external/vulkancts/framework/vulkan/vkImageUtil.cpp b/external/vulkancts/framework/vulkan/vkImageUtil.cpp
new file mode 100644
index 0000000..3f42d06
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkImageUtil.cpp
@@ -0,0 +1,1009 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for images.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkImageUtil.hpp"
+#include "tcuTextureUtil.hpp"
+
+namespace vk
+{
+
+bool isFloatFormat (VkFormat format)
+{
+	return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_FLOATING_POINT;
+}
+
+bool isUnormFormat (VkFormat format)
+{
+	return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
+}
+
+bool isSnormFormat (VkFormat format)
+{
+	return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT;
+}
+
+bool isIntFormat (VkFormat format)
+{
+	return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER;
+}
+
+bool isUintFormat (VkFormat format)
+{
+	return tcu::getTextureChannelClass(mapVkFormat(format).type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
+}
+
+bool isDepthStencilFormat (VkFormat format)
+{
+	if (isCompressedFormat(format))
+		return false;
+
+	const tcu::TextureFormat tcuFormat = mapVkFormat(format);
+	return tcuFormat.order == tcu::TextureFormat::D || tcuFormat.order == tcu::TextureFormat::S || tcuFormat.order == tcu::TextureFormat::DS;
+}
+
+bool isCompressedFormat (VkFormat format)
+{
+	// update this mapping if VkFormat changes
+	DE_STATIC_ASSERT(VK_FORMAT_LAST == 185);
+
+	switch (format)
+	{
+		case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+		case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+		case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+		case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+		case VK_FORMAT_BC2_UNORM_BLOCK:
+		case VK_FORMAT_BC2_SRGB_BLOCK:
+		case VK_FORMAT_BC3_UNORM_BLOCK:
+		case VK_FORMAT_BC3_SRGB_BLOCK:
+		case VK_FORMAT_BC4_UNORM_BLOCK:
+		case VK_FORMAT_BC4_SNORM_BLOCK:
+		case VK_FORMAT_BC5_UNORM_BLOCK:
+		case VK_FORMAT_BC5_SNORM_BLOCK:
+		case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+		case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+		case VK_FORMAT_BC7_UNORM_BLOCK:
+		case VK_FORMAT_BC7_SRGB_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+		case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+		case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+		case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+		case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+		case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+		case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+		case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+		case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+			return true;
+
+		default:
+			return false;
+	}
+}
+
+VkFormat mapTextureFormat (const tcu::TextureFormat& format)
+{
+	DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELORDER_LAST < (1<<16));
+	DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELTYPE_LAST < (1<<16));
+
+#define PACK_FMT(ORDER, TYPE) ((int(ORDER) << 16) | int(TYPE))
+#define FMT_CASE(ORDER, TYPE) PACK_FMT(tcu::TextureFormat::ORDER, tcu::TextureFormat::TYPE)
+
+	// update this mapping if VkFormat changes
+	DE_STATIC_ASSERT(VK_FORMAT_LAST == 185);
+
+	switch (PACK_FMT(format.order, format.type))
+	{
+		case FMT_CASE(RG, UNORM_BYTE_44):					return VK_FORMAT_R4G4_UNORM_PACK8;
+		case FMT_CASE(RGB, UNORM_SHORT_565):				return VK_FORMAT_R5G6B5_UNORM_PACK16;
+		case FMT_CASE(RGBA, UNORM_SHORT_4444):				return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
+		case FMT_CASE(RGBA, UNORM_SHORT_5551):				return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
+
+		case FMT_CASE(BGR, UNORM_SHORT_565):				return VK_FORMAT_B5G6R5_UNORM_PACK16;
+		case FMT_CASE(BGRA, UNORM_SHORT_4444):				return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
+		case FMT_CASE(BGRA, UNORM_SHORT_5551):				return VK_FORMAT_B5G5R5A1_UNORM_PACK16;
+
+		case FMT_CASE(ARGB, UNORM_SHORT_1555):				return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
+
+		case FMT_CASE(R, UNORM_INT8):						return VK_FORMAT_R8_UNORM;
+		case FMT_CASE(R, SNORM_INT8):						return VK_FORMAT_R8_SNORM;
+		case FMT_CASE(R, UNSIGNED_INT8):					return VK_FORMAT_R8_UINT;
+		case FMT_CASE(R, SIGNED_INT8):						return VK_FORMAT_R8_SINT;
+		case FMT_CASE(sR, UNORM_INT8):						return VK_FORMAT_R8_SRGB;
+
+		case FMT_CASE(RG, UNORM_INT8):						return VK_FORMAT_R8G8_UNORM;
+		case FMT_CASE(RG, SNORM_INT8):						return VK_FORMAT_R8G8_SNORM;
+		case FMT_CASE(RG, UNSIGNED_INT8):					return VK_FORMAT_R8G8_UINT;
+		case FMT_CASE(RG, SIGNED_INT8):						return VK_FORMAT_R8G8_SINT;
+		case FMT_CASE(sRG, UNORM_INT8):						return VK_FORMAT_R8G8_SRGB;
+
+		case FMT_CASE(RGB, UNORM_INT8):						return VK_FORMAT_R8G8B8_UNORM;
+		case FMT_CASE(RGB, SNORM_INT8):						return VK_FORMAT_R8G8B8_SNORM;
+		case FMT_CASE(RGB, UNSIGNED_INT8):					return VK_FORMAT_R8G8B8_UINT;
+		case FMT_CASE(RGB, SIGNED_INT8):					return VK_FORMAT_R8G8B8_SINT;
+		case FMT_CASE(sRGB, UNORM_INT8):					return VK_FORMAT_R8G8B8_SRGB;
+
+		case FMT_CASE(RGBA, UNORM_INT8):					return VK_FORMAT_R8G8B8A8_UNORM;
+		case FMT_CASE(RGBA, SNORM_INT8):					return VK_FORMAT_R8G8B8A8_SNORM;
+		case FMT_CASE(RGBA, UNSIGNED_INT8):					return VK_FORMAT_R8G8B8A8_UINT;
+		case FMT_CASE(RGBA, SIGNED_INT8):					return VK_FORMAT_R8G8B8A8_SINT;
+		case FMT_CASE(sRGBA, UNORM_INT8):					return VK_FORMAT_R8G8B8A8_SRGB;
+
+		case FMT_CASE(RGBA, UNORM_INT_1010102_REV):			return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
+		case FMT_CASE(RGBA, SNORM_INT_1010102_REV):			return VK_FORMAT_A2B10G10R10_SNORM_PACK32;
+		case FMT_CASE(RGBA, UNSIGNED_INT_1010102_REV):		return VK_FORMAT_A2B10G10R10_UINT_PACK32;
+		case FMT_CASE(RGBA, SIGNED_INT_1010102_REV):		return VK_FORMAT_A2B10G10R10_SINT_PACK32;
+
+		case FMT_CASE(R, UNORM_INT16):						return VK_FORMAT_R16_UNORM;
+		case FMT_CASE(R, SNORM_INT16):						return VK_FORMAT_R16_SNORM;
+		case FMT_CASE(R, UNSIGNED_INT16):					return VK_FORMAT_R16_UINT;
+		case FMT_CASE(R, SIGNED_INT16):						return VK_FORMAT_R16_SINT;
+		case FMT_CASE(R, HALF_FLOAT):						return VK_FORMAT_R16_SFLOAT;
+
+		case FMT_CASE(RG, UNORM_INT16):						return VK_FORMAT_R16G16_UNORM;
+		case FMT_CASE(RG, SNORM_INT16):						return VK_FORMAT_R16G16_SNORM;
+		case FMT_CASE(RG, UNSIGNED_INT16):					return VK_FORMAT_R16G16_UINT;
+		case FMT_CASE(RG, SIGNED_INT16):					return VK_FORMAT_R16G16_SINT;
+		case FMT_CASE(RG, HALF_FLOAT):						return VK_FORMAT_R16G16_SFLOAT;
+
+		case FMT_CASE(RGB, UNORM_INT16):					return VK_FORMAT_R16G16B16_UNORM;
+		case FMT_CASE(RGB, SNORM_INT16):					return VK_FORMAT_R16G16B16_SNORM;
+		case FMT_CASE(RGB, UNSIGNED_INT16):					return VK_FORMAT_R16G16B16_UINT;
+		case FMT_CASE(RGB, SIGNED_INT16):					return VK_FORMAT_R16G16B16_SINT;
+		case FMT_CASE(RGB, HALF_FLOAT):						return VK_FORMAT_R16G16B16_SFLOAT;
+
+		case FMT_CASE(RGBA, UNORM_INT16):					return VK_FORMAT_R16G16B16A16_UNORM;
+		case FMT_CASE(RGBA, SNORM_INT16):					return VK_FORMAT_R16G16B16A16_SNORM;
+		case FMT_CASE(RGBA, UNSIGNED_INT16):				return VK_FORMAT_R16G16B16A16_UINT;
+		case FMT_CASE(RGBA, SIGNED_INT16):					return VK_FORMAT_R16G16B16A16_SINT;
+		case FMT_CASE(RGBA, HALF_FLOAT):					return VK_FORMAT_R16G16B16A16_SFLOAT;
+
+		case FMT_CASE(R, UNSIGNED_INT32):					return VK_FORMAT_R32_UINT;
+		case FMT_CASE(R, SIGNED_INT32):						return VK_FORMAT_R32_SINT;
+		case FMT_CASE(R, FLOAT):							return VK_FORMAT_R32_SFLOAT;
+
+		case FMT_CASE(RG, UNSIGNED_INT32):					return VK_FORMAT_R32G32_UINT;
+		case FMT_CASE(RG, SIGNED_INT32):					return VK_FORMAT_R32G32_SINT;
+		case FMT_CASE(RG, FLOAT):							return VK_FORMAT_R32G32_SFLOAT;
+
+		case FMT_CASE(RGB, UNSIGNED_INT32):					return VK_FORMAT_R32G32B32_UINT;
+		case FMT_CASE(RGB, SIGNED_INT32):					return VK_FORMAT_R32G32B32_SINT;
+		case FMT_CASE(RGB, FLOAT):							return VK_FORMAT_R32G32B32_SFLOAT;
+
+		case FMT_CASE(RGBA, UNSIGNED_INT32):				return VK_FORMAT_R32G32B32A32_UINT;
+		case FMT_CASE(RGBA, SIGNED_INT32):					return VK_FORMAT_R32G32B32A32_SINT;
+		case FMT_CASE(RGBA, FLOAT):							return VK_FORMAT_R32G32B32A32_SFLOAT;
+
+		case FMT_CASE(R, FLOAT64):							return VK_FORMAT_R64_SFLOAT;
+		case FMT_CASE(RG, FLOAT64):							return VK_FORMAT_R64G64_SFLOAT;
+		case FMT_CASE(RGB, FLOAT64):						return VK_FORMAT_R64G64B64_SFLOAT;
+		case FMT_CASE(RGBA, FLOAT64):						return VK_FORMAT_R64G64B64A64_SFLOAT;
+
+		case FMT_CASE(RGB, UNSIGNED_INT_11F_11F_10F_REV):	return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
+		case FMT_CASE(RGB, UNSIGNED_INT_999_E5_REV):		return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
+
+		case FMT_CASE(BGR, UNORM_INT8):						return VK_FORMAT_B8G8R8_UNORM;
+		case FMT_CASE(BGR, SNORM_INT8):						return VK_FORMAT_B8G8R8_SNORM;
+		case FMT_CASE(BGR, UNSIGNED_INT8):					return VK_FORMAT_B8G8R8_UINT;
+		case FMT_CASE(BGR, SIGNED_INT8):					return VK_FORMAT_B8G8R8_SINT;
+		case FMT_CASE(sBGR, UNORM_INT8):					return VK_FORMAT_B8G8R8_SRGB;
+
+		case FMT_CASE(BGRA, UNORM_INT8):					return VK_FORMAT_B8G8R8A8_UNORM;
+		case FMT_CASE(BGRA, SNORM_INT8):					return VK_FORMAT_B8G8R8A8_SNORM;
+		case FMT_CASE(BGRA, UNSIGNED_INT8):					return VK_FORMAT_B8G8R8A8_UINT;
+		case FMT_CASE(BGRA, SIGNED_INT8):					return VK_FORMAT_B8G8R8A8_SINT;
+		case FMT_CASE(sBGRA, UNORM_INT8):					return VK_FORMAT_B8G8R8A8_SRGB;
+
+		case FMT_CASE(BGRA, UNORM_INT_1010102_REV):			return VK_FORMAT_A2R10G10B10_UNORM_PACK32;
+		case FMT_CASE(BGRA, SNORM_INT_1010102_REV):			return VK_FORMAT_A2R10G10B10_SNORM_PACK32;
+		case FMT_CASE(BGRA, UNSIGNED_INT_1010102_REV):		return VK_FORMAT_A2R10G10B10_UINT_PACK32;
+		case FMT_CASE(BGRA, SIGNED_INT_1010102_REV):		return VK_FORMAT_A2R10G10B10_SINT_PACK32;
+
+		case FMT_CASE(D, UNORM_INT16):						return VK_FORMAT_D16_UNORM;
+		case FMT_CASE(D, UNSIGNED_INT_24_8_REV):			return VK_FORMAT_X8_D24_UNORM_PACK32;
+		case FMT_CASE(D, FLOAT):							return VK_FORMAT_D32_SFLOAT;
+
+		case FMT_CASE(S, UNSIGNED_INT8):					return VK_FORMAT_S8_UINT;
+
+		case FMT_CASE(DS, UNSIGNED_INT_16_8_8):				return VK_FORMAT_D16_UNORM_S8_UINT;
+		case FMT_CASE(DS, UNSIGNED_INT_24_8_REV):			return VK_FORMAT_D24_UNORM_S8_UINT;
+		case FMT_CASE(DS, FLOAT_UNSIGNED_INT_24_8_REV):		return VK_FORMAT_D32_SFLOAT_S8_UINT;
+
+		default:
+			TCU_THROW(InternalError, "Unknown texture format");
+	}
+
+#undef PACK_FMT
+#undef FMT_CASE
+}
+
+tcu::TextureFormat mapVkFormat (VkFormat format)
+{
+	using tcu::TextureFormat;
+
+	// update this mapping if VkFormat changes
+	DE_STATIC_ASSERT(VK_FORMAT_LAST == 185);
+
+	switch (format)
+	{
+		case VK_FORMAT_R4G4_UNORM_PACK8:		return TextureFormat(TextureFormat::RG,		TextureFormat::UNORM_BYTE_44);
+		case VK_FORMAT_R5G6B5_UNORM_PACK16:		return TextureFormat(TextureFormat::RGB,	TextureFormat::UNORM_SHORT_565);
+		case VK_FORMAT_R4G4B4A4_UNORM_PACK16:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_SHORT_4444);
+		case VK_FORMAT_R5G5B5A1_UNORM_PACK16:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_SHORT_5551);
+
+		case VK_FORMAT_B5G6R5_UNORM_PACK16:		return TextureFormat(TextureFormat::BGR,	TextureFormat::UNORM_SHORT_565);
+		case VK_FORMAT_B4G4R4A4_UNORM_PACK16:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNORM_SHORT_4444);
+		case VK_FORMAT_B5G5R5A1_UNORM_PACK16:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNORM_SHORT_5551);
+
+		case VK_FORMAT_A1R5G5B5_UNORM_PACK16:	return TextureFormat(TextureFormat::ARGB,	TextureFormat::UNORM_SHORT_1555);
+
+		case VK_FORMAT_R8_UNORM:				return TextureFormat(TextureFormat::R,		TextureFormat::UNORM_INT8);
+		case VK_FORMAT_R8_SNORM:				return TextureFormat(TextureFormat::R,		TextureFormat::SNORM_INT8);
+		case VK_FORMAT_R8_USCALED:				return TextureFormat(TextureFormat::R,		TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8_SSCALED:				return TextureFormat(TextureFormat::R,		TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8_UINT:					return TextureFormat(TextureFormat::R,		TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8_SINT:					return TextureFormat(TextureFormat::R,		TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8_SRGB:					return TextureFormat(TextureFormat::sR,		TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_R8G8_UNORM:				return TextureFormat(TextureFormat::RG,		TextureFormat::UNORM_INT8);
+		case VK_FORMAT_R8G8_SNORM:				return TextureFormat(TextureFormat::RG,		TextureFormat::SNORM_INT8);
+		case VK_FORMAT_R8G8_USCALED:			return TextureFormat(TextureFormat::RG,		TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8_SSCALED:			return TextureFormat(TextureFormat::RG,		TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8_UINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8_SINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8_SRGB:				return TextureFormat(TextureFormat::sRG,	TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_R8G8B8_UNORM:			return TextureFormat(TextureFormat::RGB,	TextureFormat::UNORM_INT8);
+		case VK_FORMAT_R8G8B8_SNORM:			return TextureFormat(TextureFormat::RGB,	TextureFormat::SNORM_INT8);
+		case VK_FORMAT_R8G8B8_USCALED:			return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8B8_SSCALED:			return TextureFormat(TextureFormat::RGB,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8B8_UINT:				return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8B8_SINT:				return TextureFormat(TextureFormat::RGB,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8B8_SRGB:				return TextureFormat(TextureFormat::sRGB,	TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_R8G8B8A8_UNORM:			return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_INT8);
+		case VK_FORMAT_R8G8B8A8_SNORM:			return TextureFormat(TextureFormat::RGBA,	TextureFormat::SNORM_INT8);
+		case VK_FORMAT_R8G8B8A8_USCALED:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8B8A8_SSCALED:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8B8A8_UINT:			return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_R8G8B8A8_SINT:			return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_R8G8B8A8_SRGB:			return TextureFormat(TextureFormat::sRGBA,	TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_R16_UNORM:				return TextureFormat(TextureFormat::R,		TextureFormat::UNORM_INT16);
+		case VK_FORMAT_R16_SNORM:				return TextureFormat(TextureFormat::R,		TextureFormat::SNORM_INT16);
+		case VK_FORMAT_R16_USCALED:				return TextureFormat(TextureFormat::R,		TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16_SSCALED:				return TextureFormat(TextureFormat::R,		TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16_UINT:				return TextureFormat(TextureFormat::R,		TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16_SINT:				return TextureFormat(TextureFormat::R,		TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16_SFLOAT:				return TextureFormat(TextureFormat::R,		TextureFormat::HALF_FLOAT);
+
+		case VK_FORMAT_R16G16_UNORM:			return TextureFormat(TextureFormat::RG,		TextureFormat::UNORM_INT16);
+		case VK_FORMAT_R16G16_SNORM:			return TextureFormat(TextureFormat::RG,		TextureFormat::SNORM_INT16);
+		case VK_FORMAT_R16G16_USCALED:			return TextureFormat(TextureFormat::RG,		TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16_SSCALED:			return TextureFormat(TextureFormat::RG,		TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16_UINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16_SINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16_SFLOAT:			return TextureFormat(TextureFormat::RG,		TextureFormat::HALF_FLOAT);
+
+		case VK_FORMAT_R16G16B16_UNORM:			return TextureFormat(TextureFormat::RGB,	TextureFormat::UNORM_INT16);
+		case VK_FORMAT_R16G16B16_SNORM:			return TextureFormat(TextureFormat::RGB,	TextureFormat::SNORM_INT16);
+		case VK_FORMAT_R16G16B16_USCALED:		return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16B16_SSCALED:		return TextureFormat(TextureFormat::RGB,	TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16B16_UINT:			return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16B16_SINT:			return TextureFormat(TextureFormat::RGB,	TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16B16_SFLOAT:		return TextureFormat(TextureFormat::RGB,	TextureFormat::HALF_FLOAT);
+
+		case VK_FORMAT_R16G16B16A16_UNORM:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_INT16);
+		case VK_FORMAT_R16G16B16A16_SNORM:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::SNORM_INT16);
+		case VK_FORMAT_R16G16B16A16_USCALED:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16B16A16_SSCALED:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16B16A16_UINT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT16);
+		case VK_FORMAT_R16G16B16A16_SINT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT16);
+		case VK_FORMAT_R16G16B16A16_SFLOAT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::HALF_FLOAT);
+
+		case VK_FORMAT_R32_UINT:				return TextureFormat(TextureFormat::R,		TextureFormat::UNSIGNED_INT32);
+		case VK_FORMAT_R32_SINT:				return TextureFormat(TextureFormat::R,		TextureFormat::SIGNED_INT32);
+		case VK_FORMAT_R32_SFLOAT:				return TextureFormat(TextureFormat::R,		TextureFormat::FLOAT);
+
+		case VK_FORMAT_R32G32_UINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::UNSIGNED_INT32);
+		case VK_FORMAT_R32G32_SINT:				return TextureFormat(TextureFormat::RG,		TextureFormat::SIGNED_INT32);
+		case VK_FORMAT_R32G32_SFLOAT:			return TextureFormat(TextureFormat::RG,		TextureFormat::FLOAT);
+
+		case VK_FORMAT_R32G32B32_UINT:			return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT32);
+		case VK_FORMAT_R32G32B32_SINT:			return TextureFormat(TextureFormat::RGB,	TextureFormat::SIGNED_INT32);
+		case VK_FORMAT_R32G32B32_SFLOAT:		return TextureFormat(TextureFormat::RGB,	TextureFormat::FLOAT);
+
+		case VK_FORMAT_R32G32B32A32_UINT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT32);
+		case VK_FORMAT_R32G32B32A32_SINT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT32);
+		case VK_FORMAT_R32G32B32A32_SFLOAT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::FLOAT);
+
+		case VK_FORMAT_R64_SFLOAT:				return TextureFormat(TextureFormat::R,		TextureFormat::FLOAT64);
+		case VK_FORMAT_R64G64_SFLOAT:			return TextureFormat(TextureFormat::RG,		TextureFormat::FLOAT64);
+		case VK_FORMAT_R64G64B64_SFLOAT:		return TextureFormat(TextureFormat::RGB,	TextureFormat::FLOAT64);
+		case VK_FORMAT_R64G64B64A64_SFLOAT:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::FLOAT64);
+
+		case VK_FORMAT_B10G11R11_UFLOAT_PACK32:	return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT_11F_11F_10F_REV);
+		case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:	return TextureFormat(TextureFormat::RGB,	TextureFormat::UNSIGNED_INT_999_E5_REV);
+
+		case VK_FORMAT_B8G8R8_UNORM:			return TextureFormat(TextureFormat::BGR,	TextureFormat::UNORM_INT8);
+		case VK_FORMAT_B8G8R8_SNORM:			return TextureFormat(TextureFormat::BGR,	TextureFormat::SNORM_INT8);
+		case VK_FORMAT_B8G8R8_USCALED:			return TextureFormat(TextureFormat::BGR,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_B8G8R8_SSCALED:			return TextureFormat(TextureFormat::BGR,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_B8G8R8_UINT:				return TextureFormat(TextureFormat::BGR,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_B8G8R8_SINT:				return TextureFormat(TextureFormat::BGR,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_B8G8R8_SRGB:				return TextureFormat(TextureFormat::sBGR,	TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_B8G8R8A8_UNORM:			return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNORM_INT8);
+		case VK_FORMAT_B8G8R8A8_SNORM:			return TextureFormat(TextureFormat::BGRA,	TextureFormat::SNORM_INT8);
+		case VK_FORMAT_B8G8R8A8_USCALED:		return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_B8G8R8A8_SSCALED:		return TextureFormat(TextureFormat::BGRA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_B8G8R8A8_UINT:			return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_B8G8R8A8_SINT:			return TextureFormat(TextureFormat::BGRA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_B8G8R8A8_SRGB:			return TextureFormat(TextureFormat::sBGRA,	TextureFormat::UNORM_INT8);
+
+		case VK_FORMAT_D16_UNORM:				return TextureFormat(TextureFormat::D,		TextureFormat::UNORM_INT16);
+		case VK_FORMAT_X8_D24_UNORM_PACK32:		return TextureFormat(TextureFormat::D,		TextureFormat::UNSIGNED_INT_24_8_REV);
+		case VK_FORMAT_D32_SFLOAT:				return TextureFormat(TextureFormat::D,		TextureFormat::FLOAT);
+
+		case VK_FORMAT_S8_UINT:					return TextureFormat(TextureFormat::S,		TextureFormat::UNSIGNED_INT8);
+
+		// \note There is no standard interleaved memory layout for DS formats; buffer-image copies
+		//		 will always operate on either D or S aspect only. See Khronos bug 12998
+		case VK_FORMAT_D16_UNORM_S8_UINT:		return TextureFormat(TextureFormat::DS,		TextureFormat::UNSIGNED_INT_16_8_8);
+		case VK_FORMAT_D24_UNORM_S8_UINT:		return TextureFormat(TextureFormat::DS,		TextureFormat::UNSIGNED_INT_24_8_REV);
+		case VK_FORMAT_D32_SFLOAT_S8_UINT:		return TextureFormat(TextureFormat::DS,		TextureFormat::FLOAT_UNSIGNED_INT_24_8_REV);
+
+#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
+		case VK_FORMAT_A8B8G8R8_UNORM_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_INT8);
+		case VK_FORMAT_A8B8G8R8_SNORM_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SNORM_INT8);
+		case VK_FORMAT_A8B8G8R8_USCALED_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_A8B8G8R8_UINT_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT8);
+		case VK_FORMAT_A8B8G8R8_SINT_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT8);
+		case VK_FORMAT_A8B8G8R8_SRGB_PACK32:	return TextureFormat(TextureFormat::sRGBA,	TextureFormat::UNORM_INT8);
+#else
+#	error "Big-endian not supported"
+#endif
+
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNORM_INT_1010102_REV);
+		case VK_FORMAT_A2R10G10B10_SNORM_PACK32:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::SNORM_INT_1010102_REV);
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNSIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:	return TextureFormat(TextureFormat::BGRA,	TextureFormat::SIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2R10G10B10_UINT_PACK32:		return TextureFormat(TextureFormat::BGRA,	TextureFormat::UNSIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2R10G10B10_SINT_PACK32:		return TextureFormat(TextureFormat::BGRA,	TextureFormat::SIGNED_INT_1010102_REV);
+
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNORM_INT_1010102_REV);
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SNORM_INT_1010102_REV);
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:	return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::UNSIGNED_INT_1010102_REV);
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:		return TextureFormat(TextureFormat::RGBA,	TextureFormat::SIGNED_INT_1010102_REV);
+
+
+		default:
+			TCU_THROW(InternalError, "Unknown image format");
+	}
+}
+
+tcu::CompressedTexFormat mapVkCompressedFormat (VkFormat format)
+{
+	// update this mapping if VkFormat changes
+	DE_STATIC_ASSERT(VK_FORMAT_LAST == 185);
+
+	switch (format)
+	{
+		case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8;
+		case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8;
+		case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:	return tcu::COMPRESSEDTEXFORMAT_ETC2_RGB8_PUNCHTHROUGH_ALPHA1;
+		case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:	return tcu::COMPRESSEDTEXFORMAT_ETC2_SRGB8_PUNCHTHROUGH_ALPHA1;
+		case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:	return tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_RGBA8;
+		case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:	return tcu::COMPRESSEDTEXFORMAT_ETC2_EAC_SRGB8_ALPHA8;
+		case VK_FORMAT_EAC_R11_UNORM_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_EAC_R11;
+		case VK_FORMAT_EAC_R11_SNORM_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_R11;
+		case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_EAC_RG11;
+		case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_EAC_SIGNED_RG11;
+		case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_RGBA;
+		case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_4x4_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_RGBA;
+		case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_5x4_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_RGBA;
+		case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_5x5_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_RGBA;
+		case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_6x5_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_RGBA;
+		case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_6x6_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_RGBA;
+		case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_8x5_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_RGBA;
+		case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_8x6_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_RGBA;
+		case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:			return tcu::COMPRESSEDTEXFORMAT_ASTC_8x8_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_RGBA;
+		case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x5_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_RGBA;
+		case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x6_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_RGBA;
+		case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x8_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_RGBA;
+		case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_10x10_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_RGBA;
+		case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_12x10_SRGB8_ALPHA8;
+		case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_RGBA;
+		case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:		return tcu::COMPRESSEDTEXFORMAT_ASTC_12x12_SRGB8_ALPHA8;
+		default:
+			break;
+	}
+
+	return tcu::COMPRESSEDTEXFORMAT_LAST;
+}
+
+VkComponentMapping getFormatComponentMapping (VkFormat format)
+{
+	using tcu::TextureFormat;
+
+	static const VkComponentMapping	R		= {	VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ONE	};
+	static const VkComponentMapping	RG		= {	VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_G,		VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ONE	};
+	static const VkComponentMapping	RGB		= {	VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_G,		VK_COMPONENT_SWIZZLE_B,		VK_COMPONENT_SWIZZLE_ONE	};
+	static const VkComponentMapping	RGBA	= {	VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_G,		VK_COMPONENT_SWIZZLE_B,		VK_COMPONENT_SWIZZLE_A		};
+	static const VkComponentMapping	S		= { VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_A		};
+	static const VkComponentMapping	DS		= {	VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_ZERO,	VK_COMPONENT_SWIZZLE_A		};
+	static const VkComponentMapping	BGRA	= {	VK_COMPONENT_SWIZZLE_B,		VK_COMPONENT_SWIZZLE_G,		VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_A		};
+	static const VkComponentMapping	BGR		= {	VK_COMPONENT_SWIZZLE_B,		VK_COMPONENT_SWIZZLE_G,		VK_COMPONENT_SWIZZLE_R,		VK_COMPONENT_SWIZZLE_ONE	};
+
+	if (format == VK_FORMAT_UNDEFINED)
+		return RGBA;
+
+	const tcu::TextureFormat tcuFormat = (isCompressedFormat(format)) ? tcu::getUncompressedFormat(mapVkCompressedFormat(format))
+																	  : mapVkFormat(format);
+
+	switch (tcuFormat.order)
+	{
+		case TextureFormat::R:		return R;
+		case TextureFormat::RG:		return RG;
+		case TextureFormat::RGB:	return RGB;
+		case TextureFormat::RGBA:	return RGBA;
+		case TextureFormat::BGRA:	return BGRA;
+		case TextureFormat::BGR:	return BGR;
+		case TextureFormat::sR:		return R;
+		case TextureFormat::sRG:	return RG;
+		case TextureFormat::sRGB:	return RGB;
+		case TextureFormat::sRGBA:	return RGBA;
+		case TextureFormat::sBGR:	return BGR;
+		case TextureFormat::sBGRA:	return BGRA;
+		case TextureFormat::D:		return R;
+		case TextureFormat::S:		return S;
+		case TextureFormat::DS:		return DS;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return RGBA;
+}
+
+static bool isScaledFormat (VkFormat format)
+{
+	// update this mapping if VkFormat changes
+	DE_STATIC_ASSERT(VK_FORMAT_LAST == 185);
+
+	switch (format)
+	{
+		case VK_FORMAT_R8_USCALED:
+		case VK_FORMAT_R8_SSCALED:
+		case VK_FORMAT_R8G8_USCALED:
+		case VK_FORMAT_R8G8_SSCALED:
+		case VK_FORMAT_R8G8B8_USCALED:
+		case VK_FORMAT_R8G8B8_SSCALED:
+		case VK_FORMAT_R8G8B8A8_USCALED:
+		case VK_FORMAT_R8G8B8A8_SSCALED:
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+		case VK_FORMAT_R16_USCALED:
+		case VK_FORMAT_R16_SSCALED:
+		case VK_FORMAT_R16G16_USCALED:
+		case VK_FORMAT_R16G16_SSCALED:
+		case VK_FORMAT_R16G16B16_USCALED:
+		case VK_FORMAT_R16G16B16_SSCALED:
+		case VK_FORMAT_R16G16B16A16_USCALED:
+		case VK_FORMAT_R16G16B16A16_SSCALED:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+			return true;
+
+		default:
+			return false;
+	}
+}
+
+static bool fullTextureFormatRoundTripSupported (VkFormat format)
+{
+	if (isScaledFormat(format))
+	{
+		// *SCALED formats get mapped to correspoding (u)int formats since
+		// accessing them through (float) getPixel/setPixel has same behavior
+		// as in shader access in Vulkan.
+		// Unfortunately full round-trip between tcu::TextureFormat and VkFormat
+		// for most SCALED formats is not supported though.
+
+		const tcu::TextureFormat	tcuFormat	= mapVkFormat(format);
+
+		switch (tcuFormat.type)
+		{
+			case tcu::TextureFormat::UNSIGNED_INT8:
+			case tcu::TextureFormat::UNSIGNED_INT16:
+			case tcu::TextureFormat::UNSIGNED_INT32:
+			case tcu::TextureFormat::SIGNED_INT8:
+			case tcu::TextureFormat::SIGNED_INT16:
+			case tcu::TextureFormat::SIGNED_INT32:
+			case tcu::TextureFormat::UNSIGNED_INT_1010102_REV:
+			case tcu::TextureFormat::SIGNED_INT_1010102_REV:
+				return false;
+
+			default:
+				return true;
+		}
+	}
+	else
+	{
+		switch (format)
+		{
+			case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+			case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+			case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+			case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+			case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+			case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+			case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+				return false; // These map to regular byte array formats
+
+			default:
+				break;
+		}
+
+		return (format != VK_FORMAT_UNDEFINED);
+	}
+}
+
+void imageUtilSelfTest (void)
+{
+	for (int formatNdx = 0; formatNdx < VK_FORMAT_LAST; formatNdx++)
+	{
+		const VkFormat	format	= (VkFormat)formatNdx;
+
+		if (format == VK_FORMAT_R64_UINT			||
+			format == VK_FORMAT_R64_SINT			||
+			format == VK_FORMAT_R64G64_UINT			||
+			format == VK_FORMAT_R64G64_SINT			||
+			format == VK_FORMAT_R64G64B64_UINT		||
+			format == VK_FORMAT_R64G64B64_SINT		||
+			format == VK_FORMAT_R64G64B64A64_UINT	||
+			format == VK_FORMAT_R64G64B64A64_SINT)
+			continue; // \todo [2015-12-05 pyry] Add framework support for (u)int64 channel type
+
+		if (format != VK_FORMAT_UNDEFINED && !isCompressedFormat(format))
+		{
+			const tcu::TextureFormat	tcuFormat		= mapVkFormat(format);
+			const VkFormat				remappedFormat	= mapTextureFormat(tcuFormat);
+
+			DE_TEST_ASSERT(isValid(tcuFormat));
+
+			if (fullTextureFormatRoundTripSupported(format))
+				DE_TEST_ASSERT(format == remappedFormat);
+		}
+	}
+}
+
+VkFilter mapFilterMode (tcu::Sampler::FilterMode filterMode)
+{
+	DE_STATIC_ASSERT(tcu::Sampler::FILTERMODE_LAST == 6);
+
+	switch (filterMode)
+	{
+		case tcu::Sampler::NEAREST:					return VK_FILTER_NEAREST;
+		case tcu::Sampler::LINEAR:					return VK_FILTER_LINEAR;
+		case tcu::Sampler::NEAREST_MIPMAP_NEAREST:	return VK_FILTER_NEAREST;
+		case tcu::Sampler::NEAREST_MIPMAP_LINEAR:	return VK_FILTER_NEAREST;
+		case tcu::Sampler::LINEAR_MIPMAP_NEAREST:	return VK_FILTER_LINEAR;
+		case tcu::Sampler::LINEAR_MIPMAP_LINEAR:	return VK_FILTER_LINEAR;
+		default:
+			DE_FATAL("Illegal filter mode");
+			return (VkFilter)0;
+
+	}
+}
+
+VkSamplerMipmapMode mapMipmapMode (tcu::Sampler::FilterMode filterMode)
+{
+	DE_STATIC_ASSERT(tcu::Sampler::FILTERMODE_LAST == 6);
+
+	// \note VkSamplerCreateInfo doesn't have a flag for disabling mipmapping. Instead
+	//		 minLod = 0 and maxLod = 0.25 should be used to match OpenGL NEAREST and LINEAR
+	//		 filtering mode behavior.
+
+	switch (filterMode)
+	{
+		case tcu::Sampler::NEAREST:					return VK_SAMPLER_MIPMAP_MODE_NEAREST;
+		case tcu::Sampler::LINEAR:					return VK_SAMPLER_MIPMAP_MODE_NEAREST;
+		case tcu::Sampler::NEAREST_MIPMAP_NEAREST:	return VK_SAMPLER_MIPMAP_MODE_NEAREST;
+		case tcu::Sampler::NEAREST_MIPMAP_LINEAR:	return VK_SAMPLER_MIPMAP_MODE_LINEAR;
+		case tcu::Sampler::LINEAR_MIPMAP_NEAREST:	return VK_SAMPLER_MIPMAP_MODE_NEAREST;
+		case tcu::Sampler::LINEAR_MIPMAP_LINEAR:	return VK_SAMPLER_MIPMAP_MODE_LINEAR;
+		default:
+			DE_FATAL("Illegal filter mode");
+			return (VkSamplerMipmapMode)0;
+	}
+}
+
+VkSamplerAddressMode mapWrapMode (tcu::Sampler::WrapMode wrapMode)
+{
+	switch (wrapMode)
+	{
+		case tcu::Sampler::CLAMP_TO_EDGE:		return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
+		case tcu::Sampler::CLAMP_TO_BORDER:		return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
+		case tcu::Sampler::REPEAT_GL:			return VK_SAMPLER_ADDRESS_MODE_REPEAT;
+		case tcu::Sampler::MIRRORED_REPEAT_GL:	return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
+		case tcu::Sampler::MIRRORED_ONCE:		return VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE;
+		default:
+			DE_FATAL("Wrap mode can't be mapped to Vulkan");
+			return (vk::VkSamplerAddressMode)0;
+	}
+}
+
+vk::VkCompareOp mapCompareMode (tcu::Sampler::CompareMode mode)
+{
+	switch (mode)
+	{
+		case tcu::Sampler::COMPAREMODE_NONE:				return vk::VK_COMPARE_OP_NEVER;
+		case tcu::Sampler::COMPAREMODE_LESS:				return vk::VK_COMPARE_OP_LESS;
+		case tcu::Sampler::COMPAREMODE_LESS_OR_EQUAL:		return vk::VK_COMPARE_OP_LESS_OR_EQUAL;
+		case tcu::Sampler::COMPAREMODE_GREATER:				return vk::VK_COMPARE_OP_GREATER;
+		case tcu::Sampler::COMPAREMODE_GREATER_OR_EQUAL:	return vk::VK_COMPARE_OP_GREATER_OR_EQUAL;
+		case tcu::Sampler::COMPAREMODE_EQUAL:				return vk::VK_COMPARE_OP_EQUAL;
+		case tcu::Sampler::COMPAREMODE_NOT_EQUAL:			return vk::VK_COMPARE_OP_NOT_EQUAL;
+		case tcu::Sampler::COMPAREMODE_ALWAYS:				return vk::VK_COMPARE_OP_ALWAYS;
+		case tcu::Sampler::COMPAREMODE_NEVER:				return vk::VK_COMPARE_OP_NEVER;
+		default:
+			DE_FATAL("Illegal compare mode");
+			return (vk::VkCompareOp)0;
+	}
+}
+
+static VkBorderColor mapBorderColor (tcu::TextureChannelClass channelClass, const rr::GenericVec4& color)
+{
+	if (channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+	{
+		const tcu::UVec4	uColor	= color.get<deUint32>();
+
+		if (uColor		== tcu::UVec4(0, 0, 0, 0)) return VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
+		else if (uColor	== tcu::UVec4(0, 0, 0, 1)) return VK_BORDER_COLOR_INT_OPAQUE_BLACK;
+		else if (uColor == tcu::UVec4(1, 1, 1, 1)) return VK_BORDER_COLOR_INT_OPAQUE_WHITE;
+	}
+	else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+	{
+		const tcu::IVec4	sColor	= color.get<deInt32>();
+
+		if (sColor		== tcu::IVec4(0, 0, 0, 0)) return VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
+		else if (sColor	== tcu::IVec4(0, 0, 0, 1)) return VK_BORDER_COLOR_INT_OPAQUE_BLACK;
+		else if (sColor == tcu::IVec4(1, 1, 1, 1)) return VK_BORDER_COLOR_INT_OPAQUE_WHITE;
+	}
+	else
+	{
+		const tcu::Vec4		fColor	= color.get<float>();
+
+		if (fColor		== tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f)) return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
+		else if (fColor == tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f)) return VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
+		else if (fColor == tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f)) return VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
+	}
+
+	DE_FATAL("Unsupported border color");
+	return VK_BORDER_COLOR_LAST;
+}
+
+VkSamplerCreateInfo mapSampler (const tcu::Sampler& sampler, const tcu::TextureFormat& format)
+{
+	const bool					compareEnabled	= (sampler.compare != tcu::Sampler::COMPAREMODE_NONE);
+	const VkCompareOp			compareOp		= (compareEnabled) ? (mapCompareMode(sampler.compare)) : (VK_COMPARE_OP_ALWAYS);
+	const VkBorderColor			borderColor		= mapBorderColor(getTextureChannelClass(format.type), sampler.borderColor);
+	const bool					isMipmapEnabled	= (sampler.minFilter != tcu::Sampler::NEAREST && sampler.minFilter != tcu::Sampler::LINEAR);
+
+	const VkSamplerCreateInfo	createInfo		=
+	{
+		VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+		DE_NULL,
+		(VkSamplerCreateFlags)0,
+		mapFilterMode(sampler.magFilter),							// magFilter
+		mapFilterMode(sampler.minFilter),							// minFilter
+		mapMipmapMode(sampler.minFilter),							// mipMode
+		mapWrapMode(sampler.wrapS),									// addressU
+		mapWrapMode(sampler.wrapT),									// addressV
+		mapWrapMode(sampler.wrapR),									// addressW
+		0.0f,														// mipLodBias
+		VK_FALSE,													// anisotropyEnable
+		1.0f,														// maxAnisotropy
+		(VkBool32)(compareEnabled ? VK_TRUE : VK_FALSE),			// compareEnable
+		compareOp,													// compareOp
+		0.0f,														// minLod
+		(isMipmapEnabled ? 1000.0f : 0.25f),						// maxLod
+		borderColor,												// borderColor
+		(VkBool32)(sampler.normalizedCoords ? VK_FALSE : VK_TRUE),	// unnormalizedCoords
+	};
+
+	return createInfo;
+}
+
+tcu::Sampler mapVkSampler (const VkSamplerCreateInfo& samplerCreateInfo)
+{
+	// \note minLod & maxLod are not supported by tcu::Sampler. LOD must be clamped
+	//       before passing it to tcu::Texture*::sample*()
+
+	tcu::Sampler sampler(mapVkSamplerAddressMode(samplerCreateInfo.addressModeU),
+						 mapVkSamplerAddressMode(samplerCreateInfo.addressModeV),
+						 mapVkSamplerAddressMode(samplerCreateInfo.addressModeW),
+						 mapVkMinTexFilter(samplerCreateInfo.minFilter, samplerCreateInfo.mipmapMode),
+						 mapVkMagTexFilter(samplerCreateInfo.magFilter),
+						 0.0f,
+						 !samplerCreateInfo.unnormalizedCoordinates,
+						 samplerCreateInfo.compareEnable ? mapVkSamplerCompareOp(samplerCreateInfo.compareOp)
+														 : tcu::Sampler::COMPAREMODE_NONE,
+						 0,
+						 tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f),
+						 true);
+
+	if (samplerCreateInfo.anisotropyEnable)
+		TCU_THROW(InternalError, "Anisotropic filtering is not supported by tcu::Sampler");
+
+	switch (samplerCreateInfo.borderColor)
+	{
+		case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
+			sampler.borderColor = tcu::UVec4(0,0,0,1);
+			break;
+		case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
+			sampler.borderColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
+			break;
+		case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
+			sampler.borderColor = tcu::UVec4(1, 1, 1, 1);
+			break;
+		case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
+			sampler.borderColor = tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f);
+			break;
+		case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
+			sampler.borderColor = tcu::UVec4(0,0,0,0);
+			break;
+		case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
+			sampler.borderColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f);
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	return sampler;
+}
+
+tcu::Sampler::CompareMode mapVkSamplerCompareOp (VkCompareOp compareOp)
+{
+	switch (compareOp)
+	{
+		case VK_COMPARE_OP_NEVER:				return tcu::Sampler::COMPAREMODE_NEVER;
+		case VK_COMPARE_OP_LESS:				return tcu::Sampler::COMPAREMODE_LESS;
+		case VK_COMPARE_OP_EQUAL:				return tcu::Sampler::COMPAREMODE_EQUAL;
+		case VK_COMPARE_OP_LESS_OR_EQUAL:		return tcu::Sampler::COMPAREMODE_LESS_OR_EQUAL;
+		case VK_COMPARE_OP_GREATER:				return tcu::Sampler::COMPAREMODE_GREATER;
+		case VK_COMPARE_OP_NOT_EQUAL:			return tcu::Sampler::COMPAREMODE_NOT_EQUAL;
+		case VK_COMPARE_OP_GREATER_OR_EQUAL:	return tcu::Sampler::COMPAREMODE_GREATER_OR_EQUAL;
+		case VK_COMPARE_OP_ALWAYS:				return tcu::Sampler::COMPAREMODE_ALWAYS;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return tcu::Sampler::COMPAREMODE_LAST;
+}
+
+tcu::Sampler::WrapMode mapVkSamplerAddressMode (VkSamplerAddressMode addressMode)
+{
+	switch (addressMode)
+	{
+		case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE:			return tcu::Sampler::CLAMP_TO_EDGE;
+		case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER:		return tcu::Sampler::CLAMP_TO_BORDER;
+		case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:		return tcu::Sampler::MIRRORED_REPEAT_GL;
+		case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:	return tcu::Sampler::MIRRORED_ONCE;
+		case VK_SAMPLER_ADDRESS_MODE_REPEAT:				return tcu::Sampler::REPEAT_GL;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return tcu::Sampler::WRAPMODE_LAST;
+}
+
+tcu::Sampler::FilterMode mapVkMinTexFilter (VkFilter filter, VkSamplerMipmapMode mipMode)
+{
+	switch (filter)
+	{
+		case VK_FILTER_LINEAR:
+			switch (mipMode)
+			{
+				case VK_SAMPLER_MIPMAP_MODE_LINEAR:		return tcu::Sampler::LINEAR_MIPMAP_LINEAR;
+				case VK_SAMPLER_MIPMAP_MODE_NEAREST:	return tcu::Sampler::LINEAR_MIPMAP_NEAREST;
+				default:
+					break;
+			}
+			break;
+
+		case VK_FILTER_NEAREST:
+			switch (mipMode)
+			{
+				case VK_SAMPLER_MIPMAP_MODE_LINEAR:		return tcu::Sampler::NEAREST_MIPMAP_LINEAR;
+				case VK_SAMPLER_MIPMAP_MODE_NEAREST:	return tcu::Sampler::NEAREST_MIPMAP_NEAREST;
+				default:
+					break;
+			}
+			break;
+
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return tcu::Sampler::FILTERMODE_LAST;
+}
+
+tcu::Sampler::FilterMode mapVkMagTexFilter (VkFilter filter)
+{
+	switch (filter)
+	{
+		case VK_FILTER_LINEAR:		return tcu::Sampler::LINEAR;
+		case VK_FILTER_NEAREST:		return tcu::Sampler::NEAREST;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return tcu::Sampler::FILTERMODE_LAST;
+}
+
+
+int mapVkComponentSwizzle (const vk::VkComponentSwizzle& channelSwizzle)
+{
+	switch (channelSwizzle)
+	{
+		case vk::VK_COMPONENT_SWIZZLE_ZERO:	return 0;
+		case vk::VK_COMPONENT_SWIZZLE_ONE:	return 1;
+		case vk::VK_COMPONENT_SWIZZLE_R:	return 2;
+		case vk::VK_COMPONENT_SWIZZLE_G:	return 3;
+		case vk::VK_COMPONENT_SWIZZLE_B:	return 4;
+		case vk::VK_COMPONENT_SWIZZLE_A:	return 5;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return 0;
+}
+
+tcu::UVec4 mapVkComponentMapping (const vk::VkComponentMapping& mapping)
+{
+	tcu::UVec4 swizzle;
+
+	swizzle.x() = mapVkComponentSwizzle(mapping.r);
+	swizzle.y() = mapVkComponentSwizzle(mapping.g);
+	swizzle.z() = mapVkComponentSwizzle(mapping.b);
+	swizzle.w() = mapVkComponentSwizzle(mapping.a);
+
+	return swizzle;
+}
+
+//! Get a format the matches the layout in buffer memory used for a
+//! buffer<->image copy on a depth/stencil format.
+tcu::TextureFormat getDepthCopyFormat (VkFormat combinedFormat)
+{
+	switch (combinedFormat)
+	{
+		case VK_FORMAT_D16_UNORM:
+		case VK_FORMAT_X8_D24_UNORM_PACK32:
+		case VK_FORMAT_D32_SFLOAT:
+			return mapVkFormat(combinedFormat);
+
+		case VK_FORMAT_D16_UNORM_S8_UINT:
+			return mapVkFormat(VK_FORMAT_D16_UNORM);
+		case VK_FORMAT_D24_UNORM_S8_UINT:
+			return mapVkFormat(VK_FORMAT_X8_D24_UNORM_PACK32);
+		case VK_FORMAT_D32_SFLOAT_S8_UINT:
+			return mapVkFormat(VK_FORMAT_D32_SFLOAT);
+
+		case VK_FORMAT_S8_UINT:
+		default:
+			DE_FATAL("Unexpected depth/stencil format");
+			return tcu::TextureFormat();
+	}
+}
+
+//! Get a format the matches the layout in buffer memory used for a
+//! buffer<->image copy on a depth/stencil format.
+tcu::TextureFormat getStencilCopyFormat (VkFormat combinedFormat)
+{
+	switch (combinedFormat)
+	{
+		case VK_FORMAT_D16_UNORM_S8_UINT:
+		case VK_FORMAT_D24_UNORM_S8_UINT:
+		case VK_FORMAT_D32_SFLOAT_S8_UINT:
+		case VK_FORMAT_S8_UINT:
+			return mapVkFormat(VK_FORMAT_S8_UINT);
+
+		case VK_FORMAT_D16_UNORM:
+		case VK_FORMAT_X8_D24_UNORM_PACK32:
+		case VK_FORMAT_D32_SFLOAT:
+		default:
+			DE_FATAL("Unexpected depth/stencil format");
+			return tcu::TextureFormat();
+	}
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkImageUtil.hpp b/external/vulkancts/framework/vulkan/vkImageUtil.hpp
new file mode 100644
index 0000000..6ab0b53
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkImageUtil.hpp
@@ -0,0 +1,79 @@
+#ifndef _VKIMAGEUTIL_HPP
+#define _VKIMAGEUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for images.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuTexture.hpp"
+#include "tcuCompressedTexture.hpp"
+
+namespace vk
+{
+
+bool						isFloatFormat				(VkFormat format);
+bool						isUnormFormat				(VkFormat format);
+bool						isSnormFormat				(VkFormat format);
+bool						isIntFormat					(VkFormat format);
+bool						isUintFormat				(VkFormat format);
+bool						isDepthStencilFormat		(VkFormat format);
+bool						isCompressedFormat			(VkFormat format);
+
+tcu::TextureFormat			mapVkFormat					(VkFormat format);
+tcu::CompressedTexFormat	mapVkCompressedFormat		(VkFormat format);
+tcu::TextureFormat			getDepthCopyFormat			(VkFormat combinedFormat);
+tcu::TextureFormat			getStencilCopyFormat		(VkFormat combinedFormat);
+
+tcu::Sampler				mapVkSampler				(const VkSamplerCreateInfo& samplerCreateInfo);
+tcu::Sampler::CompareMode	mapVkSamplerCompareOp		(VkCompareOp compareOp);
+tcu::Sampler::WrapMode		mapVkSamplerAddressMode		(VkSamplerAddressMode addressMode);
+tcu::Sampler::FilterMode	mapVkMinTexFilter			(VkFilter filter, VkSamplerMipmapMode mipMode);
+tcu::Sampler::FilterMode	mapVkMagTexFilter			(VkFilter filter);
+int							mapVkComponentSwizzle		(const VkComponentSwizzle& channelSwizzle);
+tcu::UVec4					mapVkComponentMapping		(const vk::VkComponentMapping& mapping);
+
+VkComponentMapping			getFormatComponentMapping	(VkFormat format);
+VkFilter					mapFilterMode				(tcu::Sampler::FilterMode filterMode);
+VkSamplerMipmapMode			mapMipmapMode				(tcu::Sampler::FilterMode filterMode);
+VkSamplerAddressMode		mapWrapMode					(tcu::Sampler::WrapMode wrapMode);
+VkCompareOp					mapCompareMode				(tcu::Sampler::CompareMode mode);
+VkFormat					mapTextureFormat			(const tcu::TextureFormat& format);
+VkSamplerCreateInfo			mapSampler					(const tcu::Sampler& sampler, const tcu::TextureFormat& format);
+
+void						imageUtilSelfTest			(void);
+
+} // vk
+
+#endif // _VKIMAGEUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkInitDeviceFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkInitDeviceFunctionPointers.inl
new file mode 100644
index 0000000..4e4c44b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkInitDeviceFunctionPointers.inl
@@ -0,0 +1,124 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+m_vk.destroyDevice									= (DestroyDeviceFunc)									GET_PROC_ADDR("vkDestroyDevice");
+m_vk.getDeviceQueue									= (GetDeviceQueueFunc)									GET_PROC_ADDR("vkGetDeviceQueue");
+m_vk.queueSubmit									= (QueueSubmitFunc)										GET_PROC_ADDR("vkQueueSubmit");
+m_vk.queueWaitIdle									= (QueueWaitIdleFunc)									GET_PROC_ADDR("vkQueueWaitIdle");
+m_vk.deviceWaitIdle									= (DeviceWaitIdleFunc)									GET_PROC_ADDR("vkDeviceWaitIdle");
+m_vk.allocateMemory									= (AllocateMemoryFunc)									GET_PROC_ADDR("vkAllocateMemory");
+m_vk.freeMemory										= (FreeMemoryFunc)										GET_PROC_ADDR("vkFreeMemory");
+m_vk.mapMemory										= (MapMemoryFunc)										GET_PROC_ADDR("vkMapMemory");
+m_vk.unmapMemory									= (UnmapMemoryFunc)										GET_PROC_ADDR("vkUnmapMemory");
+m_vk.flushMappedMemoryRanges						= (FlushMappedMemoryRangesFunc)							GET_PROC_ADDR("vkFlushMappedMemoryRanges");
+m_vk.invalidateMappedMemoryRanges					= (InvalidateMappedMemoryRangesFunc)					GET_PROC_ADDR("vkInvalidateMappedMemoryRanges");
+m_vk.getDeviceMemoryCommitment						= (GetDeviceMemoryCommitmentFunc)						GET_PROC_ADDR("vkGetDeviceMemoryCommitment");
+m_vk.bindBufferMemory								= (BindBufferMemoryFunc)								GET_PROC_ADDR("vkBindBufferMemory");
+m_vk.bindImageMemory								= (BindImageMemoryFunc)									GET_PROC_ADDR("vkBindImageMemory");
+m_vk.getBufferMemoryRequirements					= (GetBufferMemoryRequirementsFunc)						GET_PROC_ADDR("vkGetBufferMemoryRequirements");
+m_vk.getImageMemoryRequirements						= (GetImageMemoryRequirementsFunc)						GET_PROC_ADDR("vkGetImageMemoryRequirements");
+m_vk.getImageSparseMemoryRequirements				= (GetImageSparseMemoryRequirementsFunc)				GET_PROC_ADDR("vkGetImageSparseMemoryRequirements");
+m_vk.getPhysicalDeviceSparseImageFormatProperties	= (GetPhysicalDeviceSparseImageFormatPropertiesFunc)	GET_PROC_ADDR("vkGetPhysicalDeviceSparseImageFormatProperties");
+m_vk.queueBindSparse								= (QueueBindSparseFunc)									GET_PROC_ADDR("vkQueueBindSparse");
+m_vk.createFence									= (CreateFenceFunc)										GET_PROC_ADDR("vkCreateFence");
+m_vk.destroyFence									= (DestroyFenceFunc)									GET_PROC_ADDR("vkDestroyFence");
+m_vk.resetFences									= (ResetFencesFunc)										GET_PROC_ADDR("vkResetFences");
+m_vk.getFenceStatus									= (GetFenceStatusFunc)									GET_PROC_ADDR("vkGetFenceStatus");
+m_vk.waitForFences									= (WaitForFencesFunc)									GET_PROC_ADDR("vkWaitForFences");
+m_vk.createSemaphore								= (CreateSemaphoreFunc)									GET_PROC_ADDR("vkCreateSemaphore");
+m_vk.destroySemaphore								= (DestroySemaphoreFunc)								GET_PROC_ADDR("vkDestroySemaphore");
+m_vk.createEvent									= (CreateEventFunc)										GET_PROC_ADDR("vkCreateEvent");
+m_vk.destroyEvent									= (DestroyEventFunc)									GET_PROC_ADDR("vkDestroyEvent");
+m_vk.getEventStatus									= (GetEventStatusFunc)									GET_PROC_ADDR("vkGetEventStatus");
+m_vk.setEvent										= (SetEventFunc)										GET_PROC_ADDR("vkSetEvent");
+m_vk.resetEvent										= (ResetEventFunc)										GET_PROC_ADDR("vkResetEvent");
+m_vk.createQueryPool								= (CreateQueryPoolFunc)									GET_PROC_ADDR("vkCreateQueryPool");
+m_vk.destroyQueryPool								= (DestroyQueryPoolFunc)								GET_PROC_ADDR("vkDestroyQueryPool");
+m_vk.getQueryPoolResults							= (GetQueryPoolResultsFunc)								GET_PROC_ADDR("vkGetQueryPoolResults");
+m_vk.createBuffer									= (CreateBufferFunc)									GET_PROC_ADDR("vkCreateBuffer");
+m_vk.destroyBuffer									= (DestroyBufferFunc)									GET_PROC_ADDR("vkDestroyBuffer");
+m_vk.createBufferView								= (CreateBufferViewFunc)								GET_PROC_ADDR("vkCreateBufferView");
+m_vk.destroyBufferView								= (DestroyBufferViewFunc)								GET_PROC_ADDR("vkDestroyBufferView");
+m_vk.createImage									= (CreateImageFunc)										GET_PROC_ADDR("vkCreateImage");
+m_vk.destroyImage									= (DestroyImageFunc)									GET_PROC_ADDR("vkDestroyImage");
+m_vk.getImageSubresourceLayout						= (GetImageSubresourceLayoutFunc)						GET_PROC_ADDR("vkGetImageSubresourceLayout");
+m_vk.createImageView								= (CreateImageViewFunc)									GET_PROC_ADDR("vkCreateImageView");
+m_vk.destroyImageView								= (DestroyImageViewFunc)								GET_PROC_ADDR("vkDestroyImageView");
+m_vk.createShaderModule								= (CreateShaderModuleFunc)								GET_PROC_ADDR("vkCreateShaderModule");
+m_vk.destroyShaderModule							= (DestroyShaderModuleFunc)								GET_PROC_ADDR("vkDestroyShaderModule");
+m_vk.createPipelineCache							= (CreatePipelineCacheFunc)								GET_PROC_ADDR("vkCreatePipelineCache");
+m_vk.destroyPipelineCache							= (DestroyPipelineCacheFunc)							GET_PROC_ADDR("vkDestroyPipelineCache");
+m_vk.getPipelineCacheData							= (GetPipelineCacheDataFunc)							GET_PROC_ADDR("vkGetPipelineCacheData");
+m_vk.mergePipelineCaches							= (MergePipelineCachesFunc)								GET_PROC_ADDR("vkMergePipelineCaches");
+m_vk.createGraphicsPipelines						= (CreateGraphicsPipelinesFunc)							GET_PROC_ADDR("vkCreateGraphicsPipelines");
+m_vk.createComputePipelines							= (CreateComputePipelinesFunc)							GET_PROC_ADDR("vkCreateComputePipelines");
+m_vk.destroyPipeline								= (DestroyPipelineFunc)									GET_PROC_ADDR("vkDestroyPipeline");
+m_vk.createPipelineLayout							= (CreatePipelineLayoutFunc)							GET_PROC_ADDR("vkCreatePipelineLayout");
+m_vk.destroyPipelineLayout							= (DestroyPipelineLayoutFunc)							GET_PROC_ADDR("vkDestroyPipelineLayout");
+m_vk.createSampler									= (CreateSamplerFunc)									GET_PROC_ADDR("vkCreateSampler");
+m_vk.destroySampler									= (DestroySamplerFunc)									GET_PROC_ADDR("vkDestroySampler");
+m_vk.createDescriptorSetLayout						= (CreateDescriptorSetLayoutFunc)						GET_PROC_ADDR("vkCreateDescriptorSetLayout");
+m_vk.destroyDescriptorSetLayout						= (DestroyDescriptorSetLayoutFunc)						GET_PROC_ADDR("vkDestroyDescriptorSetLayout");
+m_vk.createDescriptorPool							= (CreateDescriptorPoolFunc)							GET_PROC_ADDR("vkCreateDescriptorPool");
+m_vk.destroyDescriptorPool							= (DestroyDescriptorPoolFunc)							GET_PROC_ADDR("vkDestroyDescriptorPool");
+m_vk.resetDescriptorPool							= (ResetDescriptorPoolFunc)								GET_PROC_ADDR("vkResetDescriptorPool");
+m_vk.allocateDescriptorSets							= (AllocateDescriptorSetsFunc)							GET_PROC_ADDR("vkAllocateDescriptorSets");
+m_vk.freeDescriptorSets								= (FreeDescriptorSetsFunc)								GET_PROC_ADDR("vkFreeDescriptorSets");
+m_vk.updateDescriptorSets							= (UpdateDescriptorSetsFunc)							GET_PROC_ADDR("vkUpdateDescriptorSets");
+m_vk.createFramebuffer								= (CreateFramebufferFunc)								GET_PROC_ADDR("vkCreateFramebuffer");
+m_vk.destroyFramebuffer								= (DestroyFramebufferFunc)								GET_PROC_ADDR("vkDestroyFramebuffer");
+m_vk.createRenderPass								= (CreateRenderPassFunc)								GET_PROC_ADDR("vkCreateRenderPass");
+m_vk.destroyRenderPass								= (DestroyRenderPassFunc)								GET_PROC_ADDR("vkDestroyRenderPass");
+m_vk.getRenderAreaGranularity						= (GetRenderAreaGranularityFunc)						GET_PROC_ADDR("vkGetRenderAreaGranularity");
+m_vk.createCommandPool								= (CreateCommandPoolFunc)								GET_PROC_ADDR("vkCreateCommandPool");
+m_vk.destroyCommandPool								= (DestroyCommandPoolFunc)								GET_PROC_ADDR("vkDestroyCommandPool");
+m_vk.resetCommandPool								= (ResetCommandPoolFunc)								GET_PROC_ADDR("vkResetCommandPool");
+m_vk.allocateCommandBuffers							= (AllocateCommandBuffersFunc)							GET_PROC_ADDR("vkAllocateCommandBuffers");
+m_vk.freeCommandBuffers								= (FreeCommandBuffersFunc)								GET_PROC_ADDR("vkFreeCommandBuffers");
+m_vk.beginCommandBuffer								= (BeginCommandBufferFunc)								GET_PROC_ADDR("vkBeginCommandBuffer");
+m_vk.endCommandBuffer								= (EndCommandBufferFunc)								GET_PROC_ADDR("vkEndCommandBuffer");
+m_vk.resetCommandBuffer								= (ResetCommandBufferFunc)								GET_PROC_ADDR("vkResetCommandBuffer");
+m_vk.cmdBindPipeline								= (CmdBindPipelineFunc)									GET_PROC_ADDR("vkCmdBindPipeline");
+m_vk.cmdSetViewport									= (CmdSetViewportFunc)									GET_PROC_ADDR("vkCmdSetViewport");
+m_vk.cmdSetScissor									= (CmdSetScissorFunc)									GET_PROC_ADDR("vkCmdSetScissor");
+m_vk.cmdSetLineWidth								= (CmdSetLineWidthFunc)									GET_PROC_ADDR("vkCmdSetLineWidth");
+m_vk.cmdSetDepthBias								= (CmdSetDepthBiasFunc)									GET_PROC_ADDR("vkCmdSetDepthBias");
+m_vk.cmdSetBlendConstants							= (CmdSetBlendConstantsFunc)							GET_PROC_ADDR("vkCmdSetBlendConstants");
+m_vk.cmdSetDepthBounds								= (CmdSetDepthBoundsFunc)								GET_PROC_ADDR("vkCmdSetDepthBounds");
+m_vk.cmdSetStencilCompareMask						= (CmdSetStencilCompareMaskFunc)						GET_PROC_ADDR("vkCmdSetStencilCompareMask");
+m_vk.cmdSetStencilWriteMask							= (CmdSetStencilWriteMaskFunc)							GET_PROC_ADDR("vkCmdSetStencilWriteMask");
+m_vk.cmdSetStencilReference							= (CmdSetStencilReferenceFunc)							GET_PROC_ADDR("vkCmdSetStencilReference");
+m_vk.cmdBindDescriptorSets							= (CmdBindDescriptorSetsFunc)							GET_PROC_ADDR("vkCmdBindDescriptorSets");
+m_vk.cmdBindIndexBuffer								= (CmdBindIndexBufferFunc)								GET_PROC_ADDR("vkCmdBindIndexBuffer");
+m_vk.cmdBindVertexBuffers							= (CmdBindVertexBuffersFunc)							GET_PROC_ADDR("vkCmdBindVertexBuffers");
+m_vk.cmdDraw										= (CmdDrawFunc)											GET_PROC_ADDR("vkCmdDraw");
+m_vk.cmdDrawIndexed									= (CmdDrawIndexedFunc)									GET_PROC_ADDR("vkCmdDrawIndexed");
+m_vk.cmdDrawIndirect								= (CmdDrawIndirectFunc)									GET_PROC_ADDR("vkCmdDrawIndirect");
+m_vk.cmdDrawIndexedIndirect							= (CmdDrawIndexedIndirectFunc)							GET_PROC_ADDR("vkCmdDrawIndexedIndirect");
+m_vk.cmdDispatch									= (CmdDispatchFunc)										GET_PROC_ADDR("vkCmdDispatch");
+m_vk.cmdDispatchIndirect							= (CmdDispatchIndirectFunc)								GET_PROC_ADDR("vkCmdDispatchIndirect");
+m_vk.cmdCopyBuffer									= (CmdCopyBufferFunc)									GET_PROC_ADDR("vkCmdCopyBuffer");
+m_vk.cmdCopyImage									= (CmdCopyImageFunc)									GET_PROC_ADDR("vkCmdCopyImage");
+m_vk.cmdBlitImage									= (CmdBlitImageFunc)									GET_PROC_ADDR("vkCmdBlitImage");
+m_vk.cmdCopyBufferToImage							= (CmdCopyBufferToImageFunc)							GET_PROC_ADDR("vkCmdCopyBufferToImage");
+m_vk.cmdCopyImageToBuffer							= (CmdCopyImageToBufferFunc)							GET_PROC_ADDR("vkCmdCopyImageToBuffer");
+m_vk.cmdUpdateBuffer								= (CmdUpdateBufferFunc)									GET_PROC_ADDR("vkCmdUpdateBuffer");
+m_vk.cmdFillBuffer									= (CmdFillBufferFunc)									GET_PROC_ADDR("vkCmdFillBuffer");
+m_vk.cmdClearColorImage								= (CmdClearColorImageFunc)								GET_PROC_ADDR("vkCmdClearColorImage");
+m_vk.cmdClearDepthStencilImage						= (CmdClearDepthStencilImageFunc)						GET_PROC_ADDR("vkCmdClearDepthStencilImage");
+m_vk.cmdClearAttachments							= (CmdClearAttachmentsFunc)								GET_PROC_ADDR("vkCmdClearAttachments");
+m_vk.cmdResolveImage								= (CmdResolveImageFunc)									GET_PROC_ADDR("vkCmdResolveImage");
+m_vk.cmdSetEvent									= (CmdSetEventFunc)										GET_PROC_ADDR("vkCmdSetEvent");
+m_vk.cmdResetEvent									= (CmdResetEventFunc)									GET_PROC_ADDR("vkCmdResetEvent");
+m_vk.cmdWaitEvents									= (CmdWaitEventsFunc)									GET_PROC_ADDR("vkCmdWaitEvents");
+m_vk.cmdPipelineBarrier								= (CmdPipelineBarrierFunc)								GET_PROC_ADDR("vkCmdPipelineBarrier");
+m_vk.cmdBeginQuery									= (CmdBeginQueryFunc)									GET_PROC_ADDR("vkCmdBeginQuery");
+m_vk.cmdEndQuery									= (CmdEndQueryFunc)										GET_PROC_ADDR("vkCmdEndQuery");
+m_vk.cmdResetQueryPool								= (CmdResetQueryPoolFunc)								GET_PROC_ADDR("vkCmdResetQueryPool");
+m_vk.cmdWriteTimestamp								= (CmdWriteTimestampFunc)								GET_PROC_ADDR("vkCmdWriteTimestamp");
+m_vk.cmdCopyQueryPoolResults						= (CmdCopyQueryPoolResultsFunc)							GET_PROC_ADDR("vkCmdCopyQueryPoolResults");
+m_vk.cmdPushConstants								= (CmdPushConstantsFunc)								GET_PROC_ADDR("vkCmdPushConstants");
+m_vk.cmdBeginRenderPass								= (CmdBeginRenderPassFunc)								GET_PROC_ADDR("vkCmdBeginRenderPass");
+m_vk.cmdNextSubpass									= (CmdNextSubpassFunc)									GET_PROC_ADDR("vkCmdNextSubpass");
+m_vk.cmdEndRenderPass								= (CmdEndRenderPassFunc)								GET_PROC_ADDR("vkCmdEndRenderPass");
+m_vk.cmdExecuteCommands								= (CmdExecuteCommandsFunc)								GET_PROC_ADDR("vkCmdExecuteCommands");
diff --git a/external/vulkancts/framework/vulkan/vkInitInstanceFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkInitInstanceFunctionPointers.inl
new file mode 100644
index 0000000..1bd339c
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkInitInstanceFunctionPointers.inl
@@ -0,0 +1,15 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+m_vk.destroyInstance						= (DestroyInstanceFunc)							GET_PROC_ADDR("vkDestroyInstance");
+m_vk.enumeratePhysicalDevices				= (EnumeratePhysicalDevicesFunc)				GET_PROC_ADDR("vkEnumeratePhysicalDevices");
+m_vk.getPhysicalDeviceFeatures				= (GetPhysicalDeviceFeaturesFunc)				GET_PROC_ADDR("vkGetPhysicalDeviceFeatures");
+m_vk.getPhysicalDeviceFormatProperties		= (GetPhysicalDeviceFormatPropertiesFunc)		GET_PROC_ADDR("vkGetPhysicalDeviceFormatProperties");
+m_vk.getPhysicalDeviceImageFormatProperties	= (GetPhysicalDeviceImageFormatPropertiesFunc)	GET_PROC_ADDR("vkGetPhysicalDeviceImageFormatProperties");
+m_vk.getPhysicalDeviceProperties			= (GetPhysicalDevicePropertiesFunc)				GET_PROC_ADDR("vkGetPhysicalDeviceProperties");
+m_vk.getPhysicalDeviceQueueFamilyProperties	= (GetPhysicalDeviceQueueFamilyPropertiesFunc)	GET_PROC_ADDR("vkGetPhysicalDeviceQueueFamilyProperties");
+m_vk.getPhysicalDeviceMemoryProperties		= (GetPhysicalDeviceMemoryPropertiesFunc)		GET_PROC_ADDR("vkGetPhysicalDeviceMemoryProperties");
+m_vk.getDeviceProcAddr						= (GetDeviceProcAddrFunc)						GET_PROC_ADDR("vkGetDeviceProcAddr");
+m_vk.createDevice							= (CreateDeviceFunc)							GET_PROC_ADDR("vkCreateDevice");
+m_vk.enumerateDeviceExtensionProperties		= (EnumerateDeviceExtensionPropertiesFunc)		GET_PROC_ADDR("vkEnumerateDeviceExtensionProperties");
+m_vk.enumerateDeviceLayerProperties			= (EnumerateDeviceLayerPropertiesFunc)			GET_PROC_ADDR("vkEnumerateDeviceLayerProperties");
diff --git a/external/vulkancts/framework/vulkan/vkInitPlatformFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkInitPlatformFunctionPointers.inl
new file mode 100644
index 0000000..480eaac
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkInitPlatformFunctionPointers.inl
@@ -0,0 +1,7 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+m_vk.createInstance							= (CreateInstanceFunc)							GET_PROC_ADDR("vkCreateInstance");
+m_vk.getInstanceProcAddr					= (GetInstanceProcAddrFunc)						GET_PROC_ADDR("vkGetInstanceProcAddr");
+m_vk.enumerateInstanceExtensionProperties	= (EnumerateInstanceExtensionPropertiesFunc)	GET_PROC_ADDR("vkEnumerateInstanceExtensionProperties");
+m_vk.enumerateInstanceLayerProperties		= (EnumerateInstanceLayerPropertiesFunc)		GET_PROC_ADDR("vkEnumerateInstanceLayerProperties");
diff --git a/external/vulkancts/framework/vulkan/vkInstanceDriverImpl.inl b/external/vulkancts/framework/vulkan/vkInstanceDriverImpl.inl
new file mode 100644
index 0000000..12dc4fd
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkInstanceDriverImpl.inl
@@ -0,0 +1,63 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+
+void InstanceDriver::destroyInstance (VkInstance instance, const VkAllocationCallbacks* pAllocator) const
+{
+	m_vk.destroyInstance(instance, pAllocator);
+}
+
+VkResult InstanceDriver::enumeratePhysicalDevices (VkInstance instance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const
+{
+	return m_vk.enumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
+}
+
+void InstanceDriver::getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const
+{
+	m_vk.getPhysicalDeviceFeatures(physicalDevice, pFeatures);
+}
+
+void InstanceDriver::getPhysicalDeviceFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const
+{
+	m_vk.getPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
+}
+
+VkResult InstanceDriver::getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const
+{
+	return m_vk.getPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
+}
+
+void InstanceDriver::getPhysicalDeviceProperties (VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const
+{
+	m_vk.getPhysicalDeviceProperties(physicalDevice, pProperties);
+}
+
+void InstanceDriver::getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice physicalDevice, deUint32* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const
+{
+	m_vk.getPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
+}
+
+void InstanceDriver::getPhysicalDeviceMemoryProperties (VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const
+{
+	m_vk.getPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
+}
+
+PFN_vkVoidFunction InstanceDriver::getDeviceProcAddr (VkDevice device, const char* pName) const
+{
+	return m_vk.getDeviceProcAddr(device, pName);
+}
+
+VkResult InstanceDriver::createDevice (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const
+{
+	return m_vk.createDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
+}
+
+VkResult InstanceDriver::enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const
+{
+	return m_vk.enumerateDeviceExtensionProperties(physicalDevice, pLayerName, pPropertyCount, pProperties);
+}
+
+VkResult InstanceDriver::enumerateDeviceLayerProperties (VkPhysicalDevice physicalDevice, deUint32* pPropertyCount, VkLayerProperties* pProperties) const
+{
+	return m_vk.enumerateDeviceLayerProperties(physicalDevice, pPropertyCount, pProperties);
+}
diff --git a/external/vulkancts/framework/vulkan/vkInstanceFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkInstanceFunctionPointers.inl
new file mode 100644
index 0000000..e4e072f
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkInstanceFunctionPointers.inl
@@ -0,0 +1,15 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+DestroyInstanceFunc							destroyInstance;
+EnumeratePhysicalDevicesFunc				enumeratePhysicalDevices;
+GetPhysicalDeviceFeaturesFunc				getPhysicalDeviceFeatures;
+GetPhysicalDeviceFormatPropertiesFunc		getPhysicalDeviceFormatProperties;
+GetPhysicalDeviceImageFormatPropertiesFunc	getPhysicalDeviceImageFormatProperties;
+GetPhysicalDevicePropertiesFunc				getPhysicalDeviceProperties;
+GetPhysicalDeviceQueueFamilyPropertiesFunc	getPhysicalDeviceQueueFamilyProperties;
+GetPhysicalDeviceMemoryPropertiesFunc		getPhysicalDeviceMemoryProperties;
+GetDeviceProcAddrFunc						getDeviceProcAddr;
+CreateDeviceFunc							createDevice;
+EnumerateDeviceExtensionPropertiesFunc		enumerateDeviceExtensionProperties;
+EnumerateDeviceLayerPropertiesFunc			enumerateDeviceLayerProperties;
diff --git a/external/vulkancts/framework/vulkan/vkMemUtil.cpp b/external/vulkancts/framework/vulkan/vkMemUtil.cpp
new file mode 100644
index 0000000..f253a18
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkMemUtil.cpp
@@ -0,0 +1,252 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory management utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkMemUtil.hpp"
+#include "vkStrUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "deInt32.h"
+
+#include <sstream>
+
+namespace vk
+{
+
+using de::UniquePtr;
+using de::MovePtr;
+
+namespace
+{
+
+class HostPtr
+{
+public:
+								HostPtr		(const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags);
+								~HostPtr	(void);
+
+	void*						get			(void) const { return m_ptr; }
+
+private:
+	const DeviceInterface&		m_vkd;
+	const VkDevice				m_device;
+	const VkDeviceMemory		m_memory;
+	void* const					m_ptr;
+};
+
+void* mapMemory (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory mem, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags)
+{
+	void* hostPtr = DE_NULL;
+	VK_CHECK(vkd.mapMemory(device, mem, offset, size, flags, &hostPtr));
+	TCU_CHECK(hostPtr);
+	return hostPtr;
+}
+
+HostPtr::HostPtr (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags)
+	: m_vkd		(vkd)
+	, m_device	(device)
+	, m_memory	(memory)
+	, m_ptr		(mapMemory(vkd, device, memory, offset, size, flags))
+{
+}
+
+HostPtr::~HostPtr (void)
+{
+	m_vkd.unmapMemory(m_device, m_memory);
+}
+
+deUint32 selectMatchingMemoryType (const VkPhysicalDeviceMemoryProperties& deviceMemProps, deUint32 allowedMemTypeBits, MemoryRequirement requirement)
+{
+	for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < deviceMemProps.memoryTypeCount; memoryTypeNdx++)
+	{
+		if ((allowedMemTypeBits & (1u << memoryTypeNdx)) != 0 &&
+			requirement.matchesHeap(deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags))
+			return memoryTypeNdx;
+	}
+
+	TCU_THROW(NotSupportedError, "No compatible memory type found");
+}
+
+bool isHostVisibleMemory (const VkPhysicalDeviceMemoryProperties& deviceMemProps, deUint32 memoryTypeNdx)
+{
+	DE_ASSERT(memoryTypeNdx < deviceMemProps.memoryTypeCount);
+	return (deviceMemProps.memoryTypes[memoryTypeNdx].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0u;
+}
+
+} // anonymous
+
+// Allocation
+
+Allocation::Allocation (VkDeviceMemory memory, VkDeviceSize offset, void* hostPtr)
+	: m_memory	(memory)
+	, m_offset	(offset)
+	, m_hostPtr	(hostPtr)
+{
+}
+
+Allocation::~Allocation (void)
+{
+}
+
+// MemoryRequirement
+
+const MemoryRequirement MemoryRequirement::Any				= MemoryRequirement(0x0u);
+const MemoryRequirement MemoryRequirement::HostVisible		= MemoryRequirement(MemoryRequirement::FLAG_HOST_VISIBLE);
+const MemoryRequirement MemoryRequirement::Coherent			= MemoryRequirement(MemoryRequirement::FLAG_COHERENT);
+const MemoryRequirement MemoryRequirement::LazilyAllocated	= MemoryRequirement(MemoryRequirement::FLAG_LAZY_ALLOCATION);
+
+bool MemoryRequirement::matchesHeap (VkMemoryPropertyFlags heapFlags) const
+{
+	// sanity check
+	if ((m_flags & FLAG_COHERENT) && !(m_flags & FLAG_HOST_VISIBLE))
+		DE_FATAL("Coherent memory must be host-visible");
+	if ((m_flags & FLAG_HOST_VISIBLE) && (m_flags & FLAG_LAZY_ALLOCATION))
+		DE_FATAL("Lazily allocated memory cannot be mappable");
+
+	// host-visible
+	if ((m_flags & FLAG_HOST_VISIBLE) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
+		return false;
+
+	// coherent
+	if ((m_flags & FLAG_COHERENT) && !(heapFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT))
+		return false;
+
+	// lazy
+	if ((m_flags & FLAG_LAZY_ALLOCATION) && !(heapFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT))
+		return false;
+
+	return true;
+}
+
+MemoryRequirement::MemoryRequirement (deUint32 flags)
+	: m_flags(flags)
+{
+}
+
+// SimpleAllocator
+
+class SimpleAllocation : public Allocation
+{
+public:
+									SimpleAllocation	(Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr);
+	virtual							~SimpleAllocation	(void);
+
+private:
+	const Unique<VkDeviceMemory>	m_memHolder;
+	const UniquePtr<HostPtr>		m_hostPtr;
+};
+
+SimpleAllocation::SimpleAllocation (Move<VkDeviceMemory> mem, MovePtr<HostPtr> hostPtr)
+	: Allocation	(*mem, (VkDeviceSize)0, hostPtr ? hostPtr->get() : DE_NULL)
+	, m_memHolder	(mem)
+	, m_hostPtr		(hostPtr)
+{
+}
+
+SimpleAllocation::~SimpleAllocation (void)
+{
+}
+
+SimpleAllocator::SimpleAllocator (const DeviceInterface& vk, VkDevice device, const VkPhysicalDeviceMemoryProperties& deviceMemProps)
+	: m_vk		(vk)
+	, m_device	(device)
+	, m_memProps(deviceMemProps)
+{
+}
+
+MovePtr<Allocation> SimpleAllocator::allocate (const VkMemoryAllocateInfo& allocInfo, VkDeviceSize alignment)
+{
+	DE_UNREF(alignment);
+
+	Move<VkDeviceMemory>	mem		= allocateMemory(m_vk, m_device, &allocInfo);
+	MovePtr<HostPtr>		hostPtr;
+
+	if (isHostVisibleMemory(m_memProps, allocInfo.memoryTypeIndex))
+		hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, 0u, allocInfo.allocationSize, 0u));
+
+	return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr));
+}
+
+MovePtr<Allocation> SimpleAllocator::allocate (const VkMemoryRequirements& memReqs, MemoryRequirement requirement)
+{
+	const deUint32				memoryTypeNdx	= selectMatchingMemoryType(m_memProps, memReqs.memoryTypeBits, requirement);
+	const VkMemoryAllocateInfo	allocInfo		=
+	{
+		VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	//	VkStructureType			sType;
+		DE_NULL,								//	const void*				pNext;
+		memReqs.size,							//	VkDeviceSize			allocationSize;
+		memoryTypeNdx,							//	deUint32				memoryTypeIndex;
+	};
+
+	Move<VkDeviceMemory>		mem				= allocateMemory(m_vk, m_device, &allocInfo);
+	MovePtr<HostPtr>			hostPtr;
+
+	if (requirement & MemoryRequirement::HostVisible)
+	{
+		DE_ASSERT(isHostVisibleMemory(m_memProps, allocInfo.memoryTypeIndex));
+		hostPtr = MovePtr<HostPtr>(new HostPtr(m_vk, m_device, *mem, 0u, allocInfo.allocationSize, 0u));
+	}
+
+	return MovePtr<Allocation>(new SimpleAllocation(mem, hostPtr));
+}
+
+void flushMappedMemoryRange (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size)
+{
+	const VkMappedMemoryRange	range	=
+	{
+		VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+		DE_NULL,
+		memory,
+		offset,
+		size
+	};
+
+	VK_CHECK(vkd.flushMappedMemoryRanges(device, 1u, &range));
+}
+
+void invalidateMappedMemoryRange (const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size)
+{
+	const VkMappedMemoryRange	range	=
+	{
+		VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+		DE_NULL,
+		memory,
+		offset,
+		size
+	};
+
+	VK_CHECK(vkd.invalidateMappedMemoryRanges(device, 1u, &range));
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkMemUtil.hpp b/external/vulkancts/framework/vulkan/vkMemUtil.hpp
new file mode 100644
index 0000000..f73ff2b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkMemUtil.hpp
@@ -0,0 +1,152 @@
+#ifndef _VKMEMUTIL_HPP
+#define _VKMEMUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory management utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "deUniquePtr.hpp"
+
+namespace vk
+{
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Memory allocation interface
+ *
+ * Allocation represents block of device memory and is allocated by
+ * Allocator implementation. Test code should use Allocator for allocating
+ * memory, unless there is a reason not to (for example testing vkAllocMemory).
+ *
+ * Allocation doesn't necessarily correspond to a whole VkDeviceMemory, but
+ * instead it may represent sub-allocation. Thus whenever VkDeviceMemory
+ * (getMemory()) managed by Allocation is passed to Vulkan API calls,
+ * offset given by getOffset() must be used.
+ *
+ * If host-visible memory was requested, host pointer to the memory can
+ * be queried with getHostPtr(). No offset is needed when accessing host
+ * pointer, i.e. the pointer is already adjusted in case of sub-allocation.
+ *
+ * Memory mappings are managed solely by Allocation, i.e. unmapping or
+ * re-mapping VkDeviceMemory owned by Allocation is not allowed.
+ *//*--------------------------------------------------------------------*/
+class Allocation
+{
+public:
+	virtual					~Allocation	(void);
+
+	//! Get VkDeviceMemory backing this allocation
+	VkDeviceMemory			getMemory	(void) const { return m_memory;							}
+
+	//! Get offset in VkDeviceMemory for this allocation
+	VkDeviceSize			getOffset	(void) const { return m_offset;							}
+
+	//! Get host pointer for this allocation. Only available for host-visible allocations
+	void*					getHostPtr	(void) const { DE_ASSERT(m_hostPtr); return m_hostPtr;	}
+
+protected:
+							Allocation	(VkDeviceMemory memory, VkDeviceSize offset, void* hostPtr);
+
+private:
+	const VkDeviceMemory	m_memory;
+	const VkDeviceSize		m_offset;
+	void* const				m_hostPtr;
+};
+
+//! Memory allocation requirements
+class MemoryRequirement
+{
+public:
+	static const MemoryRequirement	Any;
+	static const MemoryRequirement	HostVisible;
+	static const MemoryRequirement	Coherent;
+	static const MemoryRequirement	LazilyAllocated;
+
+	inline MemoryRequirement		operator|			(MemoryRequirement requirement) const
+	{
+		return MemoryRequirement(m_flags | requirement.m_flags);
+	}
+
+	inline MemoryRequirement		operator&			(MemoryRequirement requirement) const
+	{
+		return MemoryRequirement(m_flags & requirement.m_flags);
+	}
+
+	bool							matchesHeap			(VkMemoryPropertyFlags heapFlags) const;
+
+	inline operator					bool				(void) const { return m_flags != 0u; }
+
+private:
+	explicit						MemoryRequirement	(deUint32 flags);
+
+	const deUint32					m_flags;
+
+	enum Flags
+	{
+		FLAG_HOST_VISIBLE		= 1u << 0u,
+		FLAG_COHERENT			= 1u << 1u,
+		FLAG_LAZY_ALLOCATION	= 1u << 2u,
+	};
+};
+
+//! Memory allocator interface
+class Allocator
+{
+public:
+									Allocator	(void) {}
+	virtual							~Allocator	(void) {}
+
+	virtual de::MovePtr<Allocation>	allocate	(const VkMemoryAllocateInfo& allocInfo, VkDeviceSize alignment) = 0;
+	virtual de::MovePtr<Allocation>	allocate	(const VkMemoryRequirements& memRequirements, MemoryRequirement requirement) = 0;
+};
+
+//! Allocator that backs every allocation with its own VkDeviceMemory
+class SimpleAllocator : public Allocator
+{
+public:
+											SimpleAllocator	(const DeviceInterface& vk, VkDevice device, const VkPhysicalDeviceMemoryProperties& deviceMemProps);
+
+	de::MovePtr<Allocation>					allocate		(const VkMemoryAllocateInfo& allocInfo, VkDeviceSize alignment);
+	de::MovePtr<Allocation>					allocate		(const VkMemoryRequirements& memRequirements, MemoryRequirement requirement);
+
+private:
+	const DeviceInterface&					m_vk;
+	const VkDevice							m_device;
+	const VkPhysicalDeviceMemoryProperties	m_memProps;
+};
+
+void	flushMappedMemoryRange		(const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size);
+void	invalidateMappedMemoryRange	(const DeviceInterface& vkd, VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size);
+
+} // vk
+
+#endif // _VKMEMUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkNullDriver.cpp b/external/vulkancts/framework/vulkan/vkNullDriver.cpp
new file mode 100644
index 0000000..9ccfb2c
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkNullDriver.cpp
@@ -0,0 +1,673 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Null (dummy) Vulkan implementation.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkNullDriver.hpp"
+#include "vkPlatform.hpp"
+#include "vkImageUtil.hpp"
+#include "tcuFunctionLibrary.hpp"
+#include "deMemory.h"
+
+#include <stdexcept>
+#include <algorithm>
+
+namespace vk
+{
+
+namespace
+{
+
+using std::vector;
+
+// Memory management
+
+template<typename T>
+void* allocateSystemMem (const VkAllocationCallbacks* pAllocator, VkSystemAllocationScope scope)
+{
+	void* ptr = pAllocator->pfnAllocation(pAllocator->pUserData, sizeof(T), sizeof(void*), scope);
+	if (!ptr)
+		throw std::bad_alloc();
+	return ptr;
+}
+
+void freeSystemMem (const VkAllocationCallbacks* pAllocator, void* mem)
+{
+	pAllocator->pfnFree(pAllocator->pUserData, mem);
+}
+
+template<typename Object, typename Handle, typename Parent, typename CreateInfo>
+Handle allocateHandle (Parent parent, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	Object* obj = DE_NULL;
+
+	if (pAllocator)
+	{
+		void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+		try
+		{
+			obj = new (mem) Object(parent, pCreateInfo);
+			DE_ASSERT(obj == mem);
+		}
+		catch (...)
+		{
+			pAllocator->pfnFree(pAllocator->pUserData, mem);
+			throw;
+		}
+	}
+	else
+		obj = new Object(parent, pCreateInfo);
+
+	return reinterpret_cast<Handle>(obj);
+}
+
+template<typename Object, typename Handle, typename CreateInfo>
+Handle allocateHandle (const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	Object* obj = DE_NULL;
+
+	if (pAllocator)
+	{
+		void* mem = allocateSystemMem<Object>(pAllocator, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+		try
+		{
+			obj = new (mem) Object(pCreateInfo);
+			DE_ASSERT(obj == mem);
+		}
+		catch (...)
+		{
+			pAllocator->pfnFree(pAllocator->pUserData, mem);
+			throw;
+		}
+	}
+	else
+		obj = new Object(pCreateInfo);
+
+	return reinterpret_cast<Handle>(obj);
+}
+
+template<typename Object, typename Handle>
+void freeHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
+{
+	Object* obj = reinterpret_cast<Object*>(handle);
+
+	if (pAllocator)
+	{
+		obj->~Object();
+		freeSystemMem(pAllocator, reinterpret_cast<void*>(obj));
+	}
+	else
+		delete obj;
+}
+
+template<typename Object, typename Handle, typename CreateInfo>
+Handle allocateNonDispHandle (VkDevice device, const CreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	Object* const	obj		= allocateHandle<Object, Object*>(device, pCreateInfo, pAllocator);
+	return Handle((deUint64)(deUintptr)obj);
+}
+
+template<typename Object, typename Handle>
+void freeNonDispHandle (Handle handle, const VkAllocationCallbacks* pAllocator)
+{
+	freeHandle<Object>(reinterpret_cast<Object*>((deUintptr)handle.getInternal()), pAllocator);
+}
+
+// Object definitions
+
+#define VK_NULL_RETURN(STMT)					\
+	do {										\
+		try {									\
+			STMT;								\
+			return VK_SUCCESS;					\
+		} catch (const std::bad_alloc&) {		\
+			return VK_ERROR_OUT_OF_HOST_MEMORY;	\
+		} catch (VkResult res) {				\
+			return res;							\
+		}										\
+	} while (deGetFalse())
+
+// \todo [2015-07-14 pyry] Check FUNC type by checkedCastToPtr<T>() or similar
+#define VK_NULL_FUNC_ENTRY(NAME, FUNC)	{ #NAME, (deFunctionPtr)FUNC }
+
+#define VK_NULL_DEFINE_DEVICE_OBJ(NAME)				\
+struct NAME											\
+{													\
+	NAME (VkDevice, const Vk##NAME##CreateInfo*) {}	\
+}
+
+VK_NULL_DEFINE_DEVICE_OBJ(Fence);
+VK_NULL_DEFINE_DEVICE_OBJ(Semaphore);
+VK_NULL_DEFINE_DEVICE_OBJ(Event);
+VK_NULL_DEFINE_DEVICE_OBJ(QueryPool);
+VK_NULL_DEFINE_DEVICE_OBJ(BufferView);
+VK_NULL_DEFINE_DEVICE_OBJ(ImageView);
+VK_NULL_DEFINE_DEVICE_OBJ(ShaderModule);
+VK_NULL_DEFINE_DEVICE_OBJ(PipelineCache);
+VK_NULL_DEFINE_DEVICE_OBJ(PipelineLayout);
+VK_NULL_DEFINE_DEVICE_OBJ(RenderPass);
+VK_NULL_DEFINE_DEVICE_OBJ(DescriptorSetLayout);
+VK_NULL_DEFINE_DEVICE_OBJ(Sampler);
+VK_NULL_DEFINE_DEVICE_OBJ(Framebuffer);
+VK_NULL_DEFINE_DEVICE_OBJ(CommandPool);
+
+class Instance
+{
+public:
+										Instance		(const VkInstanceCreateInfo* instanceInfo);
+										~Instance		(void) {}
+
+	PFN_vkVoidFunction					getProcAddr		(const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
+
+private:
+	const tcu::StaticFunctionLibrary	m_functions;
+};
+
+class Device
+{
+public:
+										Device			(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* deviceInfo);
+										~Device			(void) {}
+
+	PFN_vkVoidFunction					getProcAddr		(const char* name) const { return (PFN_vkVoidFunction)m_functions.getFunction(name); }
+
+private:
+	const tcu::StaticFunctionLibrary	m_functions;
+};
+
+class Pipeline
+{
+public:
+	Pipeline (VkDevice, const VkGraphicsPipelineCreateInfo*) {}
+	Pipeline (VkDevice, const VkComputePipelineCreateInfo*) {}
+};
+
+void* allocateHeap (const VkMemoryAllocateInfo* pAllocInfo)
+{
+	// \todo [2015-12-03 pyry] Alignment requirements?
+	// \todo [2015-12-03 pyry] Empty allocations okay?
+	if (pAllocInfo->allocationSize > 0)
+	{
+		void* const heapPtr = deMalloc((size_t)pAllocInfo->allocationSize);
+		if (!heapPtr)
+			throw std::bad_alloc();
+		return heapPtr;
+	}
+	else
+		return DE_NULL;
+}
+
+void freeHeap (void* ptr)
+{
+	deFree(ptr);
+}
+
+class DeviceMemory
+{
+public:
+						DeviceMemory	(VkDevice, const VkMemoryAllocateInfo* pAllocInfo)
+							: m_memory(allocateHeap(pAllocInfo))
+						{
+						}
+						~DeviceMemory	(void)
+						{
+							freeHeap(m_memory);
+						}
+
+	void*				getPtr			(void) const { return m_memory; }
+
+private:
+	void* const			m_memory;
+};
+
+class Buffer
+{
+public:
+						Buffer		(VkDevice, const VkBufferCreateInfo* pCreateInfo)
+							: m_size(pCreateInfo->size)
+						{}
+
+	VkDeviceSize		getSize		(void) const { return m_size;	}
+
+private:
+	const VkDeviceSize	m_size;
+};
+
+class Image
+{
+public:
+								Image			(VkDevice, const VkImageCreateInfo* pCreateInfo)
+									: m_imageType	(pCreateInfo->imageType)
+									, m_format		(pCreateInfo->format)
+									, m_extent		(pCreateInfo->extent)
+									, m_samples		(pCreateInfo->samples)
+								{}
+
+	VkImageType					getImageType	(void) const { return m_imageType;	}
+	VkFormat					getFormat		(void) const { return m_format;		}
+	VkExtent3D					getExtent		(void) const { return m_extent;		}
+	VkSampleCountFlagBits		getSamples		(void) const { return m_samples;	}
+
+private:
+	const VkImageType			m_imageType;
+	const VkFormat				m_format;
+	const VkExtent3D			m_extent;
+	const VkSampleCountFlagBits	m_samples;
+};
+
+class CommandBuffer
+{
+public:
+						CommandBuffer(VkDevice, VkCommandPool, VkCommandBufferLevel)
+						{}
+};
+
+class DescriptorSet
+{
+public:
+	DescriptorSet (VkDevice, VkDescriptorPool, VkDescriptorSetLayout) {}
+};
+
+class DescriptorPool
+{
+public:
+										DescriptorPool	(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo)
+											: m_device	(device)
+											, m_flags	(pCreateInfo->flags)
+										{}
+										~DescriptorPool	(void)
+										{
+											reset();
+										}
+
+	VkDescriptorSet						allocate		(VkDescriptorSetLayout setLayout);
+	void								free			(VkDescriptorSet set);
+
+	void								reset			(void);
+
+private:
+	const VkDevice						m_device;
+	const VkDescriptorPoolCreateFlags	m_flags;
+
+	vector<DescriptorSet*>				m_managedSets;
+};
+
+VkDescriptorSet DescriptorPool::allocate (VkDescriptorSetLayout setLayout)
+{
+	DescriptorSet* const	impl	= new DescriptorSet(m_device, VkDescriptorPool(reinterpret_cast<deUintptr>(this)), setLayout);
+
+	try
+	{
+		m_managedSets.push_back(impl);
+	}
+	catch (...)
+	{
+		delete impl;
+		throw;
+	}
+
+	return VkDescriptorSet(reinterpret_cast<deUintptr>(impl));
+}
+
+void DescriptorPool::free (VkDescriptorSet set)
+{
+	DescriptorSet* const	impl	= reinterpret_cast<DescriptorSet*>((deUintptr)set.getInternal());
+
+	DE_ASSERT(m_flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT);
+
+	delete impl;
+
+	for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
+	{
+		if (m_managedSets[ndx] == impl)
+		{
+			std::swap(m_managedSets[ndx], m_managedSets.back());
+			m_managedSets.pop_back();
+			return;
+		}
+	}
+
+	DE_FATAL("VkDescriptorSet not owned by VkDescriptorPool");
+}
+
+void DescriptorPool::reset (void)
+{
+	for (size_t ndx = 0; ndx < m_managedSets.size(); ++ndx)
+		delete m_managedSets[ndx];
+	m_managedSets.clear();
+}
+
+// API implementation
+
+extern "C"
+{
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getInstanceProcAddr (VkInstance instance, const char* pName)
+{
+	return reinterpret_cast<Instance*>(instance)->getProcAddr(pName);
+}
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL getDeviceProcAddr (VkDevice device, const char* pName)
+{
+	return reinterpret_cast<Device*>(device)->getProcAddr(pName);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createGraphicsPipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
+{
+	deUint32 allocNdx;
+	try
+	{
+		for (allocNdx = 0; allocNdx < count; allocNdx++)
+			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
+
+		return VK_SUCCESS;
+	}
+	catch (const std::bad_alloc&)
+	{
+		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
+			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
+
+		return VK_ERROR_OUT_OF_HOST_MEMORY;
+	}
+	catch (VkResult err)
+	{
+		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
+			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
+
+		return err;
+	}
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createComputePipelines (VkDevice device, VkPipelineCache, deUint32 count, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
+{
+	deUint32 allocNdx;
+	try
+	{
+		for (allocNdx = 0; allocNdx < count; allocNdx++)
+			pPipelines[allocNdx] = allocateNonDispHandle<Pipeline, VkPipeline>(device, pCreateInfos+allocNdx, pAllocator);
+
+		return VK_SUCCESS;
+	}
+	catch (const std::bad_alloc&)
+	{
+		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
+			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
+
+		return VK_ERROR_OUT_OF_HOST_MEMORY;
+	}
+	catch (VkResult err)
+	{
+		for (deUint32 freeNdx = 0; freeNdx < allocNdx; freeNdx++)
+			freeNonDispHandle<Pipeline, VkPipeline>(pPipelines[freeNdx], pAllocator);
+
+		return err;
+	}
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL enumeratePhysicalDevices (VkInstance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pDevices)
+{
+	if (pDevices && *pPhysicalDeviceCount >= 1u)
+		*pDevices = reinterpret_cast<VkPhysicalDevice>((void*)(deUintptr)1u);
+
+	*pPhysicalDeviceCount = 1;
+
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceProperties (VkPhysicalDevice, VkPhysicalDeviceProperties* props)
+{
+	deMemset(props, 0, sizeof(VkPhysicalDeviceProperties));
+
+	props->apiVersion		= VK_API_VERSION;
+	props->driverVersion	= 1u;
+	props->deviceType		= VK_PHYSICAL_DEVICE_TYPE_OTHER;
+
+	deMemcpy(props->deviceName, "null", 5);
+
+	// \todo [2015-09-25 pyry] Fill in reasonable limits
+	props->limits.maxTexelBufferElements	= 8096;
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceQueueFamilyProperties (VkPhysicalDevice, deUint32* count, VkQueueFamilyProperties* props)
+{
+	if (props && *count >= 1u)
+	{
+		deMemset(props, 0, sizeof(VkQueueFamilyProperties));
+
+		props->queueCount			= 1u;
+		props->queueFlags			= VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT;
+		props->timestampValidBits	= 64;
+	}
+
+	*count = 1u;
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceMemoryProperties (VkPhysicalDevice, VkPhysicalDeviceMemoryProperties* props)
+{
+	deMemset(props, 0, sizeof(VkPhysicalDeviceMemoryProperties));
+
+	props->memoryTypeCount				= 1u;
+	props->memoryTypes[0].heapIndex		= 0u;
+	props->memoryTypes[0].propertyFlags	= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
+
+	props->memoryHeapCount				= 1u;
+	props->memoryHeaps[0].size			= 1ull << 31;
+	props->memoryHeaps[0].flags			= 0u;
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFormatProperties (VkPhysicalDevice, VkFormat, VkFormatProperties* pFormatProperties)
+{
+	const VkFormatFeatureFlags	allFeatures	= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT
+											| VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT
+											| VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT
+											| VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT
+											| VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT
+											| VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT
+											| VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT
+											| VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
+											| VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT
+											| VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT
+											| VK_FORMAT_FEATURE_BLIT_SRC_BIT
+											| VK_FORMAT_FEATURE_BLIT_DST_BIT;
+
+	pFormatProperties->linearTilingFeatures		= allFeatures;
+	pFormatProperties->optimalTilingFeatures	= allFeatures;
+	pFormatProperties->bufferFeatures			= allFeatures;
+}
+
+VKAPI_ATTR void VKAPI_CALL getBufferMemoryRequirements (VkDevice, VkBuffer bufferHandle, VkMemoryRequirements* requirements)
+{
+	const Buffer*	buffer	= reinterpret_cast<const Buffer*>(bufferHandle.getInternal());
+
+	requirements->memoryTypeBits	= 1u;
+	requirements->size				= buffer->getSize();
+	requirements->alignment			= (VkDeviceSize)1u;
+}
+
+VkDeviceSize getPackedImageDataSize (VkFormat format, VkExtent3D extent, VkSampleCountFlagBits samples)
+{
+	return (VkDeviceSize)getPixelSize(mapVkFormat(format))
+			* (VkDeviceSize)extent.width
+			* (VkDeviceSize)extent.height
+			* (VkDeviceSize)extent.depth
+			* (VkDeviceSize)samples;
+}
+
+VkDeviceSize getCompressedImageDataSize (VkFormat format, VkExtent3D extent)
+{
+	try
+	{
+		const tcu::CompressedTexFormat	tcuFormat		= mapVkCompressedFormat(format);
+		const size_t					blockSize		= tcu::getBlockSize(tcuFormat);
+		const tcu::IVec3				blockPixelSize	= tcu::getBlockPixelSize(tcuFormat);
+		const int						numBlocksX		= deDivRoundUp32((int)extent.width, blockPixelSize.x());
+		const int						numBlocksY		= deDivRoundUp32((int)extent.height, blockPixelSize.y());
+		const int						numBlocksZ		= deDivRoundUp32((int)extent.depth, blockPixelSize.z());
+
+		return blockSize*numBlocksX*numBlocksY*numBlocksZ;
+	}
+	catch (...)
+	{
+		return 0; // Unsupported compressed format
+	}
+}
+
+VKAPI_ATTR void VKAPI_CALL getImageMemoryRequirements (VkDevice, VkImage imageHandle, VkMemoryRequirements* requirements)
+{
+	const Image*	image	= reinterpret_cast<const Image*>(imageHandle.getInternal());
+
+	requirements->memoryTypeBits	= 1u;
+	requirements->alignment			= 16u;
+
+	if (isCompressedFormat(image->getFormat()))
+		requirements->size = getCompressedImageDataSize(image->getFormat(), image->getExtent());
+	else
+		requirements->size = getPackedImageDataSize(image->getFormat(), image->getExtent(), image->getSamples());
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL mapMemory (VkDevice, VkDeviceMemory memHandle, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData)
+{
+	const DeviceMemory*	memory	= reinterpret_cast<DeviceMemory*>(memHandle.getInternal());
+
+	DE_UNREF(size);
+	DE_UNREF(flags);
+
+	*ppData = (deUint8*)memory->getPtr() + offset;
+
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL allocateDescriptorSets (VkDevice, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets)
+{
+	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)pAllocateInfo->descriptorPool.getInternal());
+
+	for (deUint32 ndx = 0; ndx < pAllocateInfo->descriptorSetCount; ++ndx)
+	{
+		try
+		{
+			pDescriptorSets[ndx] = poolImpl->allocate(pAllocateInfo->pSetLayouts[ndx]);
+		}
+		catch (const std::bad_alloc&)
+		{
+			for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
+				delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
+
+			return VK_ERROR_OUT_OF_HOST_MEMORY;
+		}
+		catch (VkResult res)
+		{
+			for (deUint32 freeNdx = 0; freeNdx < ndx; freeNdx++)
+				delete reinterpret_cast<DescriptorSet*>((deUintptr)pDescriptorSets[freeNdx].getInternal());
+
+			return res;
+		}
+	}
+
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL freeDescriptorSets (VkDevice, VkDescriptorPool descriptorPool, deUint32 count, const VkDescriptorSet* pDescriptorSets)
+{
+	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
+
+	for (deUint32 ndx = 0; ndx < count; ++ndx)
+		poolImpl->free(pDescriptorSets[ndx]);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL resetDescriptorPool (VkDevice, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags)
+{
+	DescriptorPool* const	poolImpl	= reinterpret_cast<DescriptorPool*>((deUintptr)descriptorPool.getInternal());
+
+	poolImpl->reset();
+
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL allocateCommandBuffers (VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers)
+{
+	if (pAllocateInfo && pCommandBuffers)
+	{
+		for (deUint32 ndx = 0; ndx < pAllocateInfo->commandBufferCount; ++ndx)
+		{
+			pCommandBuffers[ndx] = reinterpret_cast<VkCommandBuffer>(new CommandBuffer(device, pAllocateInfo->commandPool, pAllocateInfo->level));
+		}
+	}
+
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL freeCommandBuffers (VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
+{
+	DE_UNREF(device);
+	DE_UNREF(commandPool);
+
+	for (deUint32 ndx = 0; ndx < commandBufferCount; ++ndx)
+		delete reinterpret_cast<CommandBuffer*>(pCommandBuffers[ndx]);
+}
+
+#include "vkNullDriverImpl.inl"
+
+} // extern "C"
+
+Instance::Instance (const VkInstanceCreateInfo*)
+	: m_functions(s_instanceFunctions, DE_LENGTH_OF_ARRAY(s_instanceFunctions))
+{
+}
+
+Device::Device (VkPhysicalDevice, const VkDeviceCreateInfo*)
+	: m_functions(s_deviceFunctions, DE_LENGTH_OF_ARRAY(s_deviceFunctions))
+{
+}
+
+class NullDriverLibrary : public Library
+{
+public:
+										NullDriverLibrary (void)
+											: m_library	(s_platformFunctions, DE_LENGTH_OF_ARRAY(s_platformFunctions))
+											, m_driver	(m_library)
+										{}
+
+	const PlatformInterface&			getPlatformInterface	(void) const { return m_driver;	}
+
+private:
+	const tcu::StaticFunctionLibrary	m_library;
+	const PlatformDriver				m_driver;
+};
+
+} // anonymous
+
+Library* createNullDriver (void)
+{
+	return new NullDriverLibrary();
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkNullDriver.hpp b/external/vulkancts/framework/vulkan/vkNullDriver.hpp
new file mode 100644
index 0000000..3d6460d
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkNullDriver.hpp
@@ -0,0 +1,48 @@
+#ifndef _VKNULLDRIVER_HPP
+#define _VKNULLDRIVER_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Null (dummy) Vulkan implementation.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+
+namespace vk
+{
+
+class Library;
+
+Library*	createNullDriver	(void);
+
+} // vk
+
+#endif // _VKNULLDRIVER_HPP
diff --git a/external/vulkancts/framework/vulkan/vkNullDriverImpl.inl b/external/vulkancts/framework/vulkan/vkNullDriverImpl.inl
new file mode 100644
index 0000000..722feea
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkNullDriverImpl.inl
@@ -0,0 +1,1063 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+VKAPI_ATTR VkResult VKAPI_CALL createInstance (const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pInstance = allocateHandle<Instance, VkInstance>(pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createDevice (VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pDevice = allocateHandle<Device, VkDevice>(physicalDevice, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL allocateMemory (VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pMemory = allocateNonDispHandle<DeviceMemory, VkDeviceMemory>(device, pAllocateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createFence (VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pFence = allocateNonDispHandle<Fence, VkFence>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createSemaphore (VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pSemaphore = allocateNonDispHandle<Semaphore, VkSemaphore>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createEvent (VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pEvent = allocateNonDispHandle<Event, VkEvent>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createQueryPool (VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pQueryPool = allocateNonDispHandle<QueryPool, VkQueryPool>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createBuffer (VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pBuffer = allocateNonDispHandle<Buffer, VkBuffer>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createBufferView (VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pView = allocateNonDispHandle<BufferView, VkBufferView>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createImage (VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pImage = allocateNonDispHandle<Image, VkImage>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createImageView (VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pView = allocateNonDispHandle<ImageView, VkImageView>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createShaderModule (VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pShaderModule = allocateNonDispHandle<ShaderModule, VkShaderModule>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createPipelineCache (VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pPipelineCache = allocateNonDispHandle<PipelineCache, VkPipelineCache>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createPipelineLayout (VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pPipelineLayout = allocateNonDispHandle<PipelineLayout, VkPipelineLayout>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createSampler (VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pSampler = allocateNonDispHandle<Sampler, VkSampler>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createDescriptorSetLayout (VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pSetLayout = allocateNonDispHandle<DescriptorSetLayout, VkDescriptorSetLayout>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createDescriptorPool (VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pDescriptorPool = allocateNonDispHandle<DescriptorPool, VkDescriptorPool>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createFramebuffer (VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pFramebuffer = allocateNonDispHandle<Framebuffer, VkFramebuffer>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createRenderPass (VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pRenderPass = allocateNonDispHandle<RenderPass, VkRenderPass>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL createCommandPool (VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool)
+{
+	DE_UNREF(pAllocator);
+	VK_NULL_RETURN((*pCommandPool = allocateNonDispHandle<CommandPool, VkCommandPool>(device, pCreateInfo, pAllocator)));
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyInstance (VkInstance instance, const VkAllocationCallbacks* pAllocator)
+{
+	freeHandle<Instance, VkInstance>(instance, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyDevice (VkDevice device, const VkAllocationCallbacks* pAllocator)
+{
+	freeHandle<Device, VkDevice>(device, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL freeMemory (VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<DeviceMemory, VkDeviceMemory>(memory, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyFence (VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Fence, VkFence>(fence, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroySemaphore (VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Semaphore, VkSemaphore>(semaphore, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyEvent (VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Event, VkEvent>(event, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyQueryPool (VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<QueryPool, VkQueryPool>(queryPool, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyBuffer (VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Buffer, VkBuffer>(buffer, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyBufferView (VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<BufferView, VkBufferView>(bufferView, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyImage (VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Image, VkImage>(image, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyImageView (VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<ImageView, VkImageView>(imageView, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyShaderModule (VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<ShaderModule, VkShaderModule>(shaderModule, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyPipelineCache (VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<PipelineCache, VkPipelineCache>(pipelineCache, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyPipeline (VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Pipeline, VkPipeline>(pipeline, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyPipelineLayout (VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<PipelineLayout, VkPipelineLayout>(pipelineLayout, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroySampler (VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Sampler, VkSampler>(sampler, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyDescriptorSetLayout (VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<DescriptorSetLayout, VkDescriptorSetLayout>(descriptorSetLayout, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyDescriptorPool (VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<DescriptorPool, VkDescriptorPool>(descriptorPool, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyFramebuffer (VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<Framebuffer, VkFramebuffer>(framebuffer, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyRenderPass (VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<RenderPass, VkRenderPass>(renderPass, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL destroyCommandPool (VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator)
+{
+	DE_UNREF(device);
+	freeNonDispHandle<CommandPool, VkCommandPool>(commandPool, pAllocator);
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceFeatures (VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures)
+{
+	DE_UNREF(physicalDevice);
+	DE_UNREF(pFeatures);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL getPhysicalDeviceImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties)
+{
+	DE_UNREF(physicalDevice);
+	DE_UNREF(format);
+	DE_UNREF(type);
+	DE_UNREF(tiling);
+	DE_UNREF(usage);
+	DE_UNREF(flags);
+	DE_UNREF(pImageFormatProperties);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
+{
+	DE_UNREF(pLayerName);
+	DE_UNREF(pPropertyCount);
+	DE_UNREF(pProperties);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceExtensionProperties (VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties)
+{
+	DE_UNREF(physicalDevice);
+	DE_UNREF(pLayerName);
+	DE_UNREF(pPropertyCount);
+	DE_UNREF(pProperties);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL enumerateInstanceLayerProperties (deUint32* pPropertyCount, VkLayerProperties* pProperties)
+{
+	DE_UNREF(pPropertyCount);
+	DE_UNREF(pProperties);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL enumerateDeviceLayerProperties (VkPhysicalDevice physicalDevice, deUint32* pPropertyCount, VkLayerProperties* pProperties)
+{
+	DE_UNREF(physicalDevice);
+	DE_UNREF(pPropertyCount);
+	DE_UNREF(pProperties);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL getDeviceQueue (VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue)
+{
+	DE_UNREF(device);
+	DE_UNREF(queueFamilyIndex);
+	DE_UNREF(queueIndex);
+	DE_UNREF(pQueue);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL queueSubmit (VkQueue queue, deUint32 submitCount, const VkSubmitInfo* pSubmits, VkFence fence)
+{
+	DE_UNREF(queue);
+	DE_UNREF(submitCount);
+	DE_UNREF(pSubmits);
+	DE_UNREF(fence);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL queueWaitIdle (VkQueue queue)
+{
+	DE_UNREF(queue);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL deviceWaitIdle (VkDevice device)
+{
+	DE_UNREF(device);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL unmapMemory (VkDevice device, VkDeviceMemory memory)
+{
+	DE_UNREF(device);
+	DE_UNREF(memory);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL flushMappedMemoryRanges (VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges)
+{
+	DE_UNREF(device);
+	DE_UNREF(memoryRangeCount);
+	DE_UNREF(pMemoryRanges);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL invalidateMappedMemoryRanges (VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges)
+{
+	DE_UNREF(device);
+	DE_UNREF(memoryRangeCount);
+	DE_UNREF(pMemoryRanges);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL getDeviceMemoryCommitment (VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes)
+{
+	DE_UNREF(device);
+	DE_UNREF(memory);
+	DE_UNREF(pCommittedMemoryInBytes);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL bindBufferMemory (VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset)
+{
+	DE_UNREF(device);
+	DE_UNREF(buffer);
+	DE_UNREF(memory);
+	DE_UNREF(memoryOffset);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL bindImageMemory (VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset)
+{
+	DE_UNREF(device);
+	DE_UNREF(image);
+	DE_UNREF(memory);
+	DE_UNREF(memoryOffset);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL getImageSparseMemoryRequirements (VkDevice device, VkImage image, deUint32* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements)
+{
+	DE_UNREF(device);
+	DE_UNREF(image);
+	DE_UNREF(pSparseMemoryRequirementCount);
+	DE_UNREF(pSparseMemoryRequirements);
+}
+
+VKAPI_ATTR void VKAPI_CALL getPhysicalDeviceSparseImageFormatProperties (VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, deUint32* pPropertyCount, VkSparseImageFormatProperties* pProperties)
+{
+	DE_UNREF(physicalDevice);
+	DE_UNREF(format);
+	DE_UNREF(type);
+	DE_UNREF(samples);
+	DE_UNREF(usage);
+	DE_UNREF(tiling);
+	DE_UNREF(pPropertyCount);
+	DE_UNREF(pProperties);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL queueBindSparse (VkQueue queue, deUint32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence)
+{
+	DE_UNREF(queue);
+	DE_UNREF(bindInfoCount);
+	DE_UNREF(pBindInfo);
+	DE_UNREF(fence);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL resetFences (VkDevice device, deUint32 fenceCount, const VkFence* pFences)
+{
+	DE_UNREF(device);
+	DE_UNREF(fenceCount);
+	DE_UNREF(pFences);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL getFenceStatus (VkDevice device, VkFence fence)
+{
+	DE_UNREF(device);
+	DE_UNREF(fence);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL waitForFences (VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout)
+{
+	DE_UNREF(device);
+	DE_UNREF(fenceCount);
+	DE_UNREF(pFences);
+	DE_UNREF(waitAll);
+	DE_UNREF(timeout);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL getEventStatus (VkDevice device, VkEvent event)
+{
+	DE_UNREF(device);
+	DE_UNREF(event);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL setEvent (VkDevice device, VkEvent event)
+{
+	DE_UNREF(device);
+	DE_UNREF(event);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL resetEvent (VkDevice device, VkEvent event)
+{
+	DE_UNREF(device);
+	DE_UNREF(event);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL getQueryPoolResults (VkDevice device, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, deUintptr dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags)
+{
+	DE_UNREF(device);
+	DE_UNREF(queryPool);
+	DE_UNREF(firstQuery);
+	DE_UNREF(queryCount);
+	DE_UNREF(dataSize);
+	DE_UNREF(pData);
+	DE_UNREF(stride);
+	DE_UNREF(flags);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL getImageSubresourceLayout (VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout)
+{
+	DE_UNREF(device);
+	DE_UNREF(image);
+	DE_UNREF(pSubresource);
+	DE_UNREF(pLayout);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL getPipelineCacheData (VkDevice device, VkPipelineCache pipelineCache, deUintptr* pDataSize, void* pData)
+{
+	DE_UNREF(device);
+	DE_UNREF(pipelineCache);
+	DE_UNREF(pDataSize);
+	DE_UNREF(pData);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL mergePipelineCaches (VkDevice device, VkPipelineCache dstCache, deUint32 srcCacheCount, const VkPipelineCache* pSrcCaches)
+{
+	DE_UNREF(device);
+	DE_UNREF(dstCache);
+	DE_UNREF(srcCacheCount);
+	DE_UNREF(pSrcCaches);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL updateDescriptorSets (VkDevice device, deUint32 descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, deUint32 descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies)
+{
+	DE_UNREF(device);
+	DE_UNREF(descriptorWriteCount);
+	DE_UNREF(pDescriptorWrites);
+	DE_UNREF(descriptorCopyCount);
+	DE_UNREF(pDescriptorCopies);
+}
+
+VKAPI_ATTR void VKAPI_CALL getRenderAreaGranularity (VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity)
+{
+	DE_UNREF(device);
+	DE_UNREF(renderPass);
+	DE_UNREF(pGranularity);
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL resetCommandPool (VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags)
+{
+	DE_UNREF(device);
+	DE_UNREF(commandPool);
+	DE_UNREF(flags);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL beginCommandBuffer (VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(pBeginInfo);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL endCommandBuffer (VkCommandBuffer commandBuffer)
+{
+	DE_UNREF(commandBuffer);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR VkResult VKAPI_CALL resetCommandBuffer (VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(flags);
+	return VK_SUCCESS;
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBindPipeline (VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(pipelineBindPoint);
+	DE_UNREF(pipeline);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetViewport (VkCommandBuffer commandBuffer, deUint32 firstViewport, deUint32 viewportCount, const VkViewport* pViewports)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(firstViewport);
+	DE_UNREF(viewportCount);
+	DE_UNREF(pViewports);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetScissor (VkCommandBuffer commandBuffer, deUint32 firstScissor, deUint32 scissorCount, const VkRect2D* pScissors)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(firstScissor);
+	DE_UNREF(scissorCount);
+	DE_UNREF(pScissors);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetLineWidth (VkCommandBuffer commandBuffer, float lineWidth)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(lineWidth);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetDepthBias (VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(depthBiasConstantFactor);
+	DE_UNREF(depthBiasClamp);
+	DE_UNREF(depthBiasSlopeFactor);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetBlendConstants (VkCommandBuffer commandBuffer, const float blendConstants[4])
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(blendConstants);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetDepthBounds (VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(minDepthBounds);
+	DE_UNREF(maxDepthBounds);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetStencilCompareMask (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 compareMask)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(faceMask);
+	DE_UNREF(compareMask);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetStencilWriteMask (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 writeMask)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(faceMask);
+	DE_UNREF(writeMask);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetStencilReference (VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 reference)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(faceMask);
+	DE_UNREF(reference);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBindDescriptorSets (VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, deUint32 firstSet, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets, deUint32 dynamicOffsetCount, const deUint32* pDynamicOffsets)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(pipelineBindPoint);
+	DE_UNREF(layout);
+	DE_UNREF(firstSet);
+	DE_UNREF(descriptorSetCount);
+	DE_UNREF(pDescriptorSets);
+	DE_UNREF(dynamicOffsetCount);
+	DE_UNREF(pDynamicOffsets);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBindIndexBuffer (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(buffer);
+	DE_UNREF(offset);
+	DE_UNREF(indexType);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBindVertexBuffers (VkCommandBuffer commandBuffer, deUint32 firstBinding, deUint32 bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(firstBinding);
+	DE_UNREF(bindingCount);
+	DE_UNREF(pBuffers);
+	DE_UNREF(pOffsets);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDraw (VkCommandBuffer commandBuffer, deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(vertexCount);
+	DE_UNREF(instanceCount);
+	DE_UNREF(firstVertex);
+	DE_UNREF(firstInstance);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDrawIndexed (VkCommandBuffer commandBuffer, deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(indexCount);
+	DE_UNREF(instanceCount);
+	DE_UNREF(firstIndex);
+	DE_UNREF(vertexOffset);
+	DE_UNREF(firstInstance);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDrawIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(buffer);
+	DE_UNREF(offset);
+	DE_UNREF(drawCount);
+	DE_UNREF(stride);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDrawIndexedIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(buffer);
+	DE_UNREF(offset);
+	DE_UNREF(drawCount);
+	DE_UNREF(stride);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDispatch (VkCommandBuffer commandBuffer, deUint32 x, deUint32 y, deUint32 z)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(x);
+	DE_UNREF(y);
+	DE_UNREF(z);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdDispatchIndirect (VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(buffer);
+	DE_UNREF(offset);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdCopyBuffer (VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferCopy* pRegions)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcBuffer);
+	DE_UNREF(dstBuffer);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdCopyImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageCopy* pRegions)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcImage);
+	DE_UNREF(srcImageLayout);
+	DE_UNREF(dstImage);
+	DE_UNREF(dstImageLayout);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBlitImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageBlit* pRegions, VkFilter filter)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcImage);
+	DE_UNREF(srcImageLayout);
+	DE_UNREF(dstImage);
+	DE_UNREF(dstImageLayout);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+	DE_UNREF(filter);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdCopyBufferToImage (VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkBufferImageCopy* pRegions)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcBuffer);
+	DE_UNREF(dstImage);
+	DE_UNREF(dstImageLayout);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdCopyImageToBuffer (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferImageCopy* pRegions)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcImage);
+	DE_UNREF(srcImageLayout);
+	DE_UNREF(dstBuffer);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdUpdateBuffer (VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const deUint32* pData)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(dstBuffer);
+	DE_UNREF(dstOffset);
+	DE_UNREF(dataSize);
+	DE_UNREF(pData);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdFillBuffer (VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, deUint32 data)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(dstBuffer);
+	DE_UNREF(dstOffset);
+	DE_UNREF(size);
+	DE_UNREF(data);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdClearColorImage (VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, deUint32 rangeCount, const VkImageSubresourceRange* pRanges)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(image);
+	DE_UNREF(imageLayout);
+	DE_UNREF(pColor);
+	DE_UNREF(rangeCount);
+	DE_UNREF(pRanges);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdClearDepthStencilImage (VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, deUint32 rangeCount, const VkImageSubresourceRange* pRanges)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(image);
+	DE_UNREF(imageLayout);
+	DE_UNREF(pDepthStencil);
+	DE_UNREF(rangeCount);
+	DE_UNREF(pRanges);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdClearAttachments (VkCommandBuffer commandBuffer, deUint32 attachmentCount, const VkClearAttachment* pAttachments, deUint32 rectCount, const VkClearRect* pRects)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(attachmentCount);
+	DE_UNREF(pAttachments);
+	DE_UNREF(rectCount);
+	DE_UNREF(pRects);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdResolveImage (VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageResolve* pRegions)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcImage);
+	DE_UNREF(srcImageLayout);
+	DE_UNREF(dstImage);
+	DE_UNREF(dstImageLayout);
+	DE_UNREF(regionCount);
+	DE_UNREF(pRegions);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdSetEvent (VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(event);
+	DE_UNREF(stageMask);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdResetEvent (VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(event);
+	DE_UNREF(stageMask);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdWaitEvents (VkCommandBuffer commandBuffer, deUint32 eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(eventCount);
+	DE_UNREF(pEvents);
+	DE_UNREF(srcStageMask);
+	DE_UNREF(dstStageMask);
+	DE_UNREF(memoryBarrierCount);
+	DE_UNREF(pMemoryBarriers);
+	DE_UNREF(bufferMemoryBarrierCount);
+	DE_UNREF(pBufferMemoryBarriers);
+	DE_UNREF(imageMemoryBarrierCount);
+	DE_UNREF(pImageMemoryBarriers);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdPipelineBarrier (VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(srcStageMask);
+	DE_UNREF(dstStageMask);
+	DE_UNREF(dependencyFlags);
+	DE_UNREF(memoryBarrierCount);
+	DE_UNREF(pMemoryBarriers);
+	DE_UNREF(bufferMemoryBarrierCount);
+	DE_UNREF(pBufferMemoryBarriers);
+	DE_UNREF(imageMemoryBarrierCount);
+	DE_UNREF(pImageMemoryBarriers);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBeginQuery (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query, VkQueryControlFlags flags)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(queryPool);
+	DE_UNREF(query);
+	DE_UNREF(flags);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdEndQuery (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(queryPool);
+	DE_UNREF(query);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdResetQueryPool (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(queryPool);
+	DE_UNREF(firstQuery);
+	DE_UNREF(queryCount);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdWriteTimestamp (VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, deUint32 query)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(pipelineStage);
+	DE_UNREF(queryPool);
+	DE_UNREF(query);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdCopyQueryPoolResults (VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(queryPool);
+	DE_UNREF(firstQuery);
+	DE_UNREF(queryCount);
+	DE_UNREF(dstBuffer);
+	DE_UNREF(dstOffset);
+	DE_UNREF(stride);
+	DE_UNREF(flags);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdPushConstants (VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size, const void* pValues)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(layout);
+	DE_UNREF(stageFlags);
+	DE_UNREF(offset);
+	DE_UNREF(size);
+	DE_UNREF(pValues);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdBeginRenderPass (VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(pRenderPassBegin);
+	DE_UNREF(contents);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdNextSubpass (VkCommandBuffer commandBuffer, VkSubpassContents contents)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(contents);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdEndRenderPass (VkCommandBuffer commandBuffer)
+{
+	DE_UNREF(commandBuffer);
+}
+
+VKAPI_ATTR void VKAPI_CALL cmdExecuteCommands (VkCommandBuffer commandBuffer, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers)
+{
+	DE_UNREF(commandBuffer);
+	DE_UNREF(commandBufferCount);
+	DE_UNREF(pCommandBuffers);
+}
+
+static const tcu::StaticFunctionLibrary::Entry s_platformFunctions[] =
+{
+	VK_NULL_FUNC_ENTRY(vkCreateInstance,						createInstance),
+	VK_NULL_FUNC_ENTRY(vkGetInstanceProcAddr,					getInstanceProcAddr),
+	VK_NULL_FUNC_ENTRY(vkEnumerateInstanceExtensionProperties,	enumerateInstanceExtensionProperties),
+	VK_NULL_FUNC_ENTRY(vkEnumerateInstanceLayerProperties,		enumerateInstanceLayerProperties),
+};
+
+static const tcu::StaticFunctionLibrary::Entry s_instanceFunctions[] =
+{
+	VK_NULL_FUNC_ENTRY(vkDestroyInstance,							destroyInstance),
+	VK_NULL_FUNC_ENTRY(vkEnumeratePhysicalDevices,					enumeratePhysicalDevices),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceFeatures,					getPhysicalDeviceFeatures),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceFormatProperties,			getPhysicalDeviceFormatProperties),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceImageFormatProperties,	getPhysicalDeviceImageFormatProperties),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceProperties,				getPhysicalDeviceProperties),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceQueueFamilyProperties,	getPhysicalDeviceQueueFamilyProperties),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceMemoryProperties,			getPhysicalDeviceMemoryProperties),
+	VK_NULL_FUNC_ENTRY(vkGetDeviceProcAddr,							getDeviceProcAddr),
+	VK_NULL_FUNC_ENTRY(vkCreateDevice,								createDevice),
+	VK_NULL_FUNC_ENTRY(vkEnumerateDeviceExtensionProperties,		enumerateDeviceExtensionProperties),
+	VK_NULL_FUNC_ENTRY(vkEnumerateDeviceLayerProperties,			enumerateDeviceLayerProperties),
+};
+
+static const tcu::StaticFunctionLibrary::Entry s_deviceFunctions[] =
+{
+	VK_NULL_FUNC_ENTRY(vkDestroyDevice,									destroyDevice),
+	VK_NULL_FUNC_ENTRY(vkGetDeviceQueue,								getDeviceQueue),
+	VK_NULL_FUNC_ENTRY(vkQueueSubmit,									queueSubmit),
+	VK_NULL_FUNC_ENTRY(vkQueueWaitIdle,									queueWaitIdle),
+	VK_NULL_FUNC_ENTRY(vkDeviceWaitIdle,								deviceWaitIdle),
+	VK_NULL_FUNC_ENTRY(vkAllocateMemory,								allocateMemory),
+	VK_NULL_FUNC_ENTRY(vkFreeMemory,									freeMemory),
+	VK_NULL_FUNC_ENTRY(vkMapMemory,										mapMemory),
+	VK_NULL_FUNC_ENTRY(vkUnmapMemory,									unmapMemory),
+	VK_NULL_FUNC_ENTRY(vkFlushMappedMemoryRanges,						flushMappedMemoryRanges),
+	VK_NULL_FUNC_ENTRY(vkInvalidateMappedMemoryRanges,					invalidateMappedMemoryRanges),
+	VK_NULL_FUNC_ENTRY(vkGetDeviceMemoryCommitment,						getDeviceMemoryCommitment),
+	VK_NULL_FUNC_ENTRY(vkBindBufferMemory,								bindBufferMemory),
+	VK_NULL_FUNC_ENTRY(vkBindImageMemory,								bindImageMemory),
+	VK_NULL_FUNC_ENTRY(vkGetBufferMemoryRequirements,					getBufferMemoryRequirements),
+	VK_NULL_FUNC_ENTRY(vkGetImageMemoryRequirements,					getImageMemoryRequirements),
+	VK_NULL_FUNC_ENTRY(vkGetImageSparseMemoryRequirements,				getImageSparseMemoryRequirements),
+	VK_NULL_FUNC_ENTRY(vkGetPhysicalDeviceSparseImageFormatProperties,	getPhysicalDeviceSparseImageFormatProperties),
+	VK_NULL_FUNC_ENTRY(vkQueueBindSparse,								queueBindSparse),
+	VK_NULL_FUNC_ENTRY(vkCreateFence,									createFence),
+	VK_NULL_FUNC_ENTRY(vkDestroyFence,									destroyFence),
+	VK_NULL_FUNC_ENTRY(vkResetFences,									resetFences),
+	VK_NULL_FUNC_ENTRY(vkGetFenceStatus,								getFenceStatus),
+	VK_NULL_FUNC_ENTRY(vkWaitForFences,									waitForFences),
+	VK_NULL_FUNC_ENTRY(vkCreateSemaphore,								createSemaphore),
+	VK_NULL_FUNC_ENTRY(vkDestroySemaphore,								destroySemaphore),
+	VK_NULL_FUNC_ENTRY(vkCreateEvent,									createEvent),
+	VK_NULL_FUNC_ENTRY(vkDestroyEvent,									destroyEvent),
+	VK_NULL_FUNC_ENTRY(vkGetEventStatus,								getEventStatus),
+	VK_NULL_FUNC_ENTRY(vkSetEvent,										setEvent),
+	VK_NULL_FUNC_ENTRY(vkResetEvent,									resetEvent),
+	VK_NULL_FUNC_ENTRY(vkCreateQueryPool,								createQueryPool),
+	VK_NULL_FUNC_ENTRY(vkDestroyQueryPool,								destroyQueryPool),
+	VK_NULL_FUNC_ENTRY(vkGetQueryPoolResults,							getQueryPoolResults),
+	VK_NULL_FUNC_ENTRY(vkCreateBuffer,									createBuffer),
+	VK_NULL_FUNC_ENTRY(vkDestroyBuffer,									destroyBuffer),
+	VK_NULL_FUNC_ENTRY(vkCreateBufferView,								createBufferView),
+	VK_NULL_FUNC_ENTRY(vkDestroyBufferView,								destroyBufferView),
+	VK_NULL_FUNC_ENTRY(vkCreateImage,									createImage),
+	VK_NULL_FUNC_ENTRY(vkDestroyImage,									destroyImage),
+	VK_NULL_FUNC_ENTRY(vkGetImageSubresourceLayout,						getImageSubresourceLayout),
+	VK_NULL_FUNC_ENTRY(vkCreateImageView,								createImageView),
+	VK_NULL_FUNC_ENTRY(vkDestroyImageView,								destroyImageView),
+	VK_NULL_FUNC_ENTRY(vkCreateShaderModule,							createShaderModule),
+	VK_NULL_FUNC_ENTRY(vkDestroyShaderModule,							destroyShaderModule),
+	VK_NULL_FUNC_ENTRY(vkCreatePipelineCache,							createPipelineCache),
+	VK_NULL_FUNC_ENTRY(vkDestroyPipelineCache,							destroyPipelineCache),
+	VK_NULL_FUNC_ENTRY(vkGetPipelineCacheData,							getPipelineCacheData),
+	VK_NULL_FUNC_ENTRY(vkMergePipelineCaches,							mergePipelineCaches),
+	VK_NULL_FUNC_ENTRY(vkCreateGraphicsPipelines,						createGraphicsPipelines),
+	VK_NULL_FUNC_ENTRY(vkCreateComputePipelines,						createComputePipelines),
+	VK_NULL_FUNC_ENTRY(vkDestroyPipeline,								destroyPipeline),
+	VK_NULL_FUNC_ENTRY(vkCreatePipelineLayout,							createPipelineLayout),
+	VK_NULL_FUNC_ENTRY(vkDestroyPipelineLayout,							destroyPipelineLayout),
+	VK_NULL_FUNC_ENTRY(vkCreateSampler,									createSampler),
+	VK_NULL_FUNC_ENTRY(vkDestroySampler,								destroySampler),
+	VK_NULL_FUNC_ENTRY(vkCreateDescriptorSetLayout,						createDescriptorSetLayout),
+	VK_NULL_FUNC_ENTRY(vkDestroyDescriptorSetLayout,					destroyDescriptorSetLayout),
+	VK_NULL_FUNC_ENTRY(vkCreateDescriptorPool,							createDescriptorPool),
+	VK_NULL_FUNC_ENTRY(vkDestroyDescriptorPool,							destroyDescriptorPool),
+	VK_NULL_FUNC_ENTRY(vkResetDescriptorPool,							resetDescriptorPool),
+	VK_NULL_FUNC_ENTRY(vkAllocateDescriptorSets,						allocateDescriptorSets),
+	VK_NULL_FUNC_ENTRY(vkFreeDescriptorSets,							freeDescriptorSets),
+	VK_NULL_FUNC_ENTRY(vkUpdateDescriptorSets,							updateDescriptorSets),
+	VK_NULL_FUNC_ENTRY(vkCreateFramebuffer,								createFramebuffer),
+	VK_NULL_FUNC_ENTRY(vkDestroyFramebuffer,							destroyFramebuffer),
+	VK_NULL_FUNC_ENTRY(vkCreateRenderPass,								createRenderPass),
+	VK_NULL_FUNC_ENTRY(vkDestroyRenderPass,								destroyRenderPass),
+	VK_NULL_FUNC_ENTRY(vkGetRenderAreaGranularity,						getRenderAreaGranularity),
+	VK_NULL_FUNC_ENTRY(vkCreateCommandPool,								createCommandPool),
+	VK_NULL_FUNC_ENTRY(vkDestroyCommandPool,							destroyCommandPool),
+	VK_NULL_FUNC_ENTRY(vkResetCommandPool,								resetCommandPool),
+	VK_NULL_FUNC_ENTRY(vkAllocateCommandBuffers,						allocateCommandBuffers),
+	VK_NULL_FUNC_ENTRY(vkFreeCommandBuffers,							freeCommandBuffers),
+	VK_NULL_FUNC_ENTRY(vkBeginCommandBuffer,							beginCommandBuffer),
+	VK_NULL_FUNC_ENTRY(vkEndCommandBuffer,								endCommandBuffer),
+	VK_NULL_FUNC_ENTRY(vkResetCommandBuffer,							resetCommandBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdBindPipeline,								cmdBindPipeline),
+	VK_NULL_FUNC_ENTRY(vkCmdSetViewport,								cmdSetViewport),
+	VK_NULL_FUNC_ENTRY(vkCmdSetScissor,									cmdSetScissor),
+	VK_NULL_FUNC_ENTRY(vkCmdSetLineWidth,								cmdSetLineWidth),
+	VK_NULL_FUNC_ENTRY(vkCmdSetDepthBias,								cmdSetDepthBias),
+	VK_NULL_FUNC_ENTRY(vkCmdSetBlendConstants,							cmdSetBlendConstants),
+	VK_NULL_FUNC_ENTRY(vkCmdSetDepthBounds,								cmdSetDepthBounds),
+	VK_NULL_FUNC_ENTRY(vkCmdSetStencilCompareMask,						cmdSetStencilCompareMask),
+	VK_NULL_FUNC_ENTRY(vkCmdSetStencilWriteMask,						cmdSetStencilWriteMask),
+	VK_NULL_FUNC_ENTRY(vkCmdSetStencilReference,						cmdSetStencilReference),
+	VK_NULL_FUNC_ENTRY(vkCmdBindDescriptorSets,							cmdBindDescriptorSets),
+	VK_NULL_FUNC_ENTRY(vkCmdBindIndexBuffer,							cmdBindIndexBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdBindVertexBuffers,							cmdBindVertexBuffers),
+	VK_NULL_FUNC_ENTRY(vkCmdDraw,										cmdDraw),
+	VK_NULL_FUNC_ENTRY(vkCmdDrawIndexed,								cmdDrawIndexed),
+	VK_NULL_FUNC_ENTRY(vkCmdDrawIndirect,								cmdDrawIndirect),
+	VK_NULL_FUNC_ENTRY(vkCmdDrawIndexedIndirect,						cmdDrawIndexedIndirect),
+	VK_NULL_FUNC_ENTRY(vkCmdDispatch,									cmdDispatch),
+	VK_NULL_FUNC_ENTRY(vkCmdDispatchIndirect,							cmdDispatchIndirect),
+	VK_NULL_FUNC_ENTRY(vkCmdCopyBuffer,									cmdCopyBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdCopyImage,									cmdCopyImage),
+	VK_NULL_FUNC_ENTRY(vkCmdBlitImage,									cmdBlitImage),
+	VK_NULL_FUNC_ENTRY(vkCmdCopyBufferToImage,							cmdCopyBufferToImage),
+	VK_NULL_FUNC_ENTRY(vkCmdCopyImageToBuffer,							cmdCopyImageToBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdUpdateBuffer,								cmdUpdateBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdFillBuffer,									cmdFillBuffer),
+	VK_NULL_FUNC_ENTRY(vkCmdClearColorImage,							cmdClearColorImage),
+	VK_NULL_FUNC_ENTRY(vkCmdClearDepthStencilImage,						cmdClearDepthStencilImage),
+	VK_NULL_FUNC_ENTRY(vkCmdClearAttachments,							cmdClearAttachments),
+	VK_NULL_FUNC_ENTRY(vkCmdResolveImage,								cmdResolveImage),
+	VK_NULL_FUNC_ENTRY(vkCmdSetEvent,									cmdSetEvent),
+	VK_NULL_FUNC_ENTRY(vkCmdResetEvent,									cmdResetEvent),
+	VK_NULL_FUNC_ENTRY(vkCmdWaitEvents,									cmdWaitEvents),
+	VK_NULL_FUNC_ENTRY(vkCmdPipelineBarrier,							cmdPipelineBarrier),
+	VK_NULL_FUNC_ENTRY(vkCmdBeginQuery,									cmdBeginQuery),
+	VK_NULL_FUNC_ENTRY(vkCmdEndQuery,									cmdEndQuery),
+	VK_NULL_FUNC_ENTRY(vkCmdResetQueryPool,								cmdResetQueryPool),
+	VK_NULL_FUNC_ENTRY(vkCmdWriteTimestamp,								cmdWriteTimestamp),
+	VK_NULL_FUNC_ENTRY(vkCmdCopyQueryPoolResults,						cmdCopyQueryPoolResults),
+	VK_NULL_FUNC_ENTRY(vkCmdPushConstants,								cmdPushConstants),
+	VK_NULL_FUNC_ENTRY(vkCmdBeginRenderPass,							cmdBeginRenderPass),
+	VK_NULL_FUNC_ENTRY(vkCmdNextSubpass,								cmdNextSubpass),
+	VK_NULL_FUNC_ENTRY(vkCmdEndRenderPass,								cmdEndRenderPass),
+	VK_NULL_FUNC_ENTRY(vkCmdExecuteCommands,							cmdExecuteCommands),
+};
+
diff --git a/external/vulkancts/framework/vulkan/vkPlatform.cpp b/external/vulkancts/framework/vulkan/vkPlatform.cpp
new file mode 100644
index 0000000..6e958a7
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPlatform.cpp
@@ -0,0 +1,83 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan platform abstraction.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkPlatform.hpp"
+#include "tcuFunctionLibrary.hpp"
+
+namespace vk
+{
+
+PlatformDriver::PlatformDriver (const tcu::FunctionLibrary& library)
+{
+#define GET_PROC_ADDR(NAME) library.getFunction(NAME)
+#include "vkInitPlatformFunctionPointers.inl"
+#undef GET_PROC_ADDR
+}
+
+PlatformDriver::~PlatformDriver (void)
+{
+}
+
+InstanceDriver::InstanceDriver (const PlatformInterface& platformInterface, VkInstance instance)
+{
+#define GET_PROC_ADDR(NAME) platformInterface.getInstanceProcAddr(instance, NAME)
+#include "vkInitInstanceFunctionPointers.inl"
+#undef GET_PROC_ADDR
+}
+
+InstanceDriver::~InstanceDriver (void)
+{
+}
+
+DeviceDriver::DeviceDriver (const InstanceInterface& instanceInterface, VkDevice device)
+{
+#define GET_PROC_ADDR(NAME) instanceInterface.getDeviceProcAddr(device, NAME)
+#include "vkInitDeviceFunctionPointers.inl"
+#undef GET_PROC_ADDR
+}
+
+DeviceDriver::~DeviceDriver (void)
+{
+}
+
+#include "vkPlatformDriverImpl.inl"
+#include "vkInstanceDriverImpl.inl"
+#include "vkDeviceDriverImpl.inl"
+
+void Platform::describePlatform (std::ostream& dst) const
+{
+	dst << "vk::Platform::describePlatform() not implemented";
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkPlatform.hpp b/external/vulkancts/framework/vulkan/vkPlatform.hpp
new file mode 100644
index 0000000..fcbfaf5
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPlatform.hpp
@@ -0,0 +1,126 @@
+#ifndef _VKPLATFORM_HPP
+#define _VKPLATFORM_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan platform abstraction.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+
+#include <ostream>
+
+namespace tcu
+{
+class FunctionLibrary;
+}
+
+namespace vk
+{
+
+class Library
+{
+public:
+										Library					(void) {}
+	virtual								~Library				(void) {}
+
+	virtual const PlatformInterface&	getPlatformInterface	(void) const = 0;
+};
+
+class PlatformDriver : public PlatformInterface
+{
+public:
+				PlatformDriver	(const tcu::FunctionLibrary& library);
+				~PlatformDriver	(void);
+
+#include "vkConcretePlatformInterface.inl"
+
+protected:
+	struct Functions
+	{
+#include "vkPlatformFunctionPointers.inl"
+	};
+
+	Functions	m_vk;
+};
+
+class InstanceDriver : public InstanceInterface
+{
+public:
+				InstanceDriver	(const PlatformInterface& platformInterface, VkInstance instance);
+				~InstanceDriver	(void);
+
+#include "vkConcreteInstanceInterface.inl"
+
+protected:
+	struct Functions
+	{
+#include "vkInstanceFunctionPointers.inl"
+	};
+
+	Functions	m_vk;
+};
+
+class DeviceDriver : public DeviceInterface
+{
+public:
+				DeviceDriver	(const InstanceInterface& instanceInterface, VkDevice device);
+				~DeviceDriver	(void);
+
+#include "vkConcreteDeviceInterface.inl"
+
+protected:
+	struct Functions
+	{
+#include "vkDeviceFunctionPointers.inl"
+	};
+
+	Functions	m_vk;
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Vulkan platform interface
+ *//*--------------------------------------------------------------------*/
+class Platform
+{
+public:
+						Platform			(void) {}
+						~Platform			(void) {}
+
+	// \todo [2015-01-05 pyry] Parametrize this to select for example debug library / interface?
+	virtual Library*	createLibrary		(void) const = 0;
+
+	virtual void		describePlatform	(std::ostream& dst) const;
+};
+
+} // vk
+
+#endif // _VKPLATFORM_HPP
diff --git a/external/vulkancts/framework/vulkan/vkPlatformDriverImpl.inl b/external/vulkancts/framework/vulkan/vkPlatformDriverImpl.inl
new file mode 100644
index 0000000..6a4db55
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPlatformDriverImpl.inl
@@ -0,0 +1,23 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+
+VkResult PlatformDriver::createInstance (const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) const
+{
+	return m_vk.createInstance(pCreateInfo, pAllocator, pInstance);
+}
+
+PFN_vkVoidFunction PlatformDriver::getInstanceProcAddr (VkInstance instance, const char* pName) const
+{
+	return m_vk.getInstanceProcAddr(instance, pName);
+}
+
+VkResult PlatformDriver::enumerateInstanceExtensionProperties (const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const
+{
+	return m_vk.enumerateInstanceExtensionProperties(pLayerName, pPropertyCount, pProperties);
+}
+
+VkResult PlatformDriver::enumerateInstanceLayerProperties (deUint32* pPropertyCount, VkLayerProperties* pProperties) const
+{
+	return m_vk.enumerateInstanceLayerProperties(pPropertyCount, pProperties);
+}
diff --git a/external/vulkancts/framework/vulkan/vkPlatformFunctionPointers.inl b/external/vulkancts/framework/vulkan/vkPlatformFunctionPointers.inl
new file mode 100644
index 0000000..60cea42
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPlatformFunctionPointers.inl
@@ -0,0 +1,7 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+CreateInstanceFunc							createInstance;
+GetInstanceProcAddrFunc						getInstanceProcAddr;
+EnumerateInstanceExtensionPropertiesFunc	enumerateInstanceExtensionProperties;
+EnumerateInstanceLayerPropertiesFunc		enumerateInstanceLayerProperties;
diff --git a/external/vulkancts/framework/vulkan/vkPrograms.cpp b/external/vulkancts/framework/vulkan/vkPrograms.cpp
new file mode 100644
index 0000000..5dc3dd6
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPrograms.cpp
@@ -0,0 +1,131 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Program utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkPrograms.hpp"
+#include "vkGlslToSpirV.hpp"
+#include "vkSpirVAsm.hpp"
+#include "vkRefUtil.hpp"
+
+#include "tcuTestLog.hpp"
+
+#include "deArrayUtil.hpp"
+#include "deMemory.h"
+
+namespace vk
+{
+
+using std::string;
+using std::vector;
+using tcu::TestLog;
+
+// ProgramBinary
+
+ProgramBinary::ProgramBinary (ProgramFormat format, size_t binarySize, const deUint8* binary)
+	: m_format	(format)
+	, m_binary	(binary, binary+binarySize)
+{
+}
+
+// Utils
+
+ProgramBinary* buildProgram (const glu::ProgramSources& program, ProgramFormat binaryFormat, glu::ShaderProgramInfo* buildInfo)
+{
+	if (binaryFormat == PROGRAM_FORMAT_SPIRV)
+	{
+		vector<deUint8> binary;
+		glslToSpirV(program, &binary, buildInfo);
+		return new ProgramBinary(binaryFormat, binary.size(), &binary[0]);
+	}
+	else
+		TCU_THROW(NotSupportedError, "Unsupported program format");
+}
+
+ProgramBinary* assembleProgram (const SpirVAsmSource& program, SpirVProgramInfo* buildInfo)
+{
+	vector<deUint8> binary;
+	assembleSpirV(&program, &binary, buildInfo);
+	return new ProgramBinary(PROGRAM_FORMAT_SPIRV, binary.size(), &binary[0]);
+}
+
+Move<VkShaderModule> createShaderModule (const DeviceInterface& deviceInterface, VkDevice device, const ProgramBinary& binary, VkShaderModuleCreateFlags flags)
+{
+	if (binary.getFormat() == PROGRAM_FORMAT_SPIRV)
+	{
+		const struct VkShaderModuleCreateInfo		shaderModuleInfo	=
+		{
+			VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+			DE_NULL,
+			flags,
+			(deUintptr)binary.getSize(),
+			(const deUint32*)binary.getBinary(),
+		};
+
+		return createShaderModule(deviceInterface, device, &shaderModuleInfo);
+	}
+	else
+		TCU_THROW(NotSupportedError, "Unsupported program format");
+}
+
+glu::ShaderType getGluShaderType (VkShaderStageFlagBits shaderStage)
+{
+	switch (shaderStage)
+	{
+		case VK_SHADER_STAGE_VERTEX_BIT:					return glu::SHADERTYPE_VERTEX;
+		case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:		return glu::SHADERTYPE_TESSELLATION_CONTROL;
+		case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:	return glu::SHADERTYPE_TESSELLATION_EVALUATION;
+		case VK_SHADER_STAGE_GEOMETRY_BIT:					return glu::SHADERTYPE_GEOMETRY;
+		case VK_SHADER_STAGE_FRAGMENT_BIT:					return glu::SHADERTYPE_FRAGMENT;
+		case VK_SHADER_STAGE_COMPUTE_BIT:					return glu::SHADERTYPE_COMPUTE;
+		default:
+			DE_FATAL("Unknown shader stage");
+			return glu::SHADERTYPE_LAST;
+	}
+}
+
+VkShaderStageFlagBits getVkShaderStage (glu::ShaderType shaderType)
+{
+	static const VkShaderStageFlagBits s_shaderStages[] =
+	{
+		VK_SHADER_STAGE_VERTEX_BIT,
+		VK_SHADER_STAGE_FRAGMENT_BIT,
+		VK_SHADER_STAGE_GEOMETRY_BIT,
+		VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+		VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+		VK_SHADER_STAGE_COMPUTE_BIT
+	};
+
+	return de::getSizedArrayElement<glu::SHADERTYPE_LAST>(s_shaderStages, shaderType);
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkPrograms.hpp b/external/vulkancts/framework/vulkan/vkPrograms.hpp
new file mode 100644
index 0000000..3efdf18
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkPrograms.hpp
@@ -0,0 +1,193 @@
+#ifndef _VKPROGRAMS_HPP
+#define _VKPROGRAMS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Program utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkSpirVProgram.hpp"
+#include "gluShaderProgram.hpp"
+#include "deUniquePtr.hpp"
+#include "deSTLUtil.hpp"
+
+#include <vector>
+#include <map>
+
+namespace tcu
+{
+class TestLog;
+} // tcu
+
+namespace vk
+{
+
+enum ProgramFormat
+{
+	PROGRAM_FORMAT_SPIRV = 0,
+
+	PROGRAM_FORMAT_LAST
+};
+
+class ProgramBinary
+{
+public:
+								ProgramBinary	(ProgramFormat format, size_t binarySize, const deUint8* binary);
+
+	ProgramFormat				getFormat		(void) const { return m_format;										}
+	size_t						getSize			(void) const { return m_binary.size();								}
+	const deUint8*				getBinary		(void) const { return m_binary.empty() ? DE_NULL : &m_binary[0];	}
+
+private:
+	const ProgramFormat			m_format;
+	const std::vector<deUint8>	m_binary;
+};
+
+template<typename Program>
+class ProgramCollection
+{
+public:
+								ProgramCollection	(void);
+								~ProgramCollection	(void);
+
+	void						clear				(void);
+
+	Program&					add					(const std::string& name);
+	void						add					(const std::string& name, de::MovePtr<Program>& program);
+
+	bool						contains			(const std::string& name) const;
+	const Program&				get					(const std::string& name) const;
+
+	class Iterator
+	{
+	private:
+		typedef typename std::map<std::string, Program*>::const_iterator	IteratorImpl;
+
+	public:
+		explicit			Iterator	(const IteratorImpl& i) : m_impl(i) {}
+
+		Iterator&			operator++	(void)			{ ++m_impl; return *this;	}
+		const Program&		operator*	(void) const	{ return getProgram();		}
+
+		const std::string&	getName		(void) const	{ return m_impl->first;		}
+		const Program&		getProgram	(void) const	{ return *m_impl->second;	}
+
+		bool				operator==	(const Iterator& other) const	{ return m_impl == other.m_impl;	}
+		bool				operator!=	(const Iterator& other) const	{ return m_impl != other.m_impl;	}
+
+	private:
+
+		IteratorImpl	m_impl;
+	};
+
+	Iterator					begin				(void) const { return Iterator(m_programs.begin());	}
+	Iterator					end					(void) const { return Iterator(m_programs.end());	}
+
+private:
+	typedef std::map<std::string, Program*>	ProgramMap;
+
+	ProgramMap					m_programs;
+};
+
+template<typename Program>
+ProgramCollection<Program>::ProgramCollection (void)
+{
+}
+
+template<typename Program>
+ProgramCollection<Program>::~ProgramCollection (void)
+{
+	clear();
+}
+
+template<typename Program>
+void ProgramCollection<Program>::clear (void)
+{
+	for (typename ProgramMap::const_iterator i = m_programs.begin(); i != m_programs.end(); ++i)
+		delete i->second;
+	m_programs.clear();
+}
+
+template<typename Program>
+Program& ProgramCollection<Program>::add (const std::string& name)
+{
+	DE_ASSERT(!contains(name));
+	de::MovePtr<Program> prog = de::newMovePtr<Program>();
+	m_programs[name] = prog.get();
+	prog.release();
+	return *m_programs[name];
+}
+
+template<typename Program>
+void ProgramCollection<Program>::add (const std::string& name, de::MovePtr<Program>& program)
+{
+	DE_ASSERT(!contains(name));
+	m_programs[name] = program.get();
+	program.release();
+}
+
+template<typename Program>
+bool ProgramCollection<Program>::contains (const std::string& name) const
+{
+	return de::contains(m_programs, name);
+}
+
+template<typename Program>
+const Program& ProgramCollection<Program>::get (const std::string& name) const
+{
+	DE_ASSERT(contains(name));
+	return *m_programs.find(name)->second;
+}
+
+typedef vk::ProgramCollection<glu::ProgramSources>	GlslSourceCollection;
+typedef vk::ProgramCollection<vk::SpirVAsmSource>	SpirVAsmCollection;
+
+struct SourceCollections
+{
+	GlslSourceCollection	glslSources;
+	SpirVAsmCollection		spirvAsmSources;
+};
+
+typedef ProgramCollection<ProgramBinary>		BinaryCollection;
+
+// \todo [2015-03-13 pyry] Likely need BinaryBuilder abstraction for this
+ProgramBinary*			buildProgram		(const glu::ProgramSources& program, ProgramFormat binaryFormat, glu::ShaderProgramInfo* buildInfo);
+ProgramBinary*			assembleProgram		(const vk::SpirVAsmSource& program, SpirVProgramInfo* buildInfo);
+Move<VkShaderModule>	createShaderModule	(const DeviceInterface& deviceInterface, VkDevice device, const ProgramBinary& binary, VkShaderModuleCreateFlags flags);
+
+glu::ShaderType			getGluShaderType	(VkShaderStageFlagBits shaderStage);
+VkShaderStageFlagBits	getVkShaderStage	(glu::ShaderType shaderType);
+
+} // vk
+
+#endif // _VKPROGRAMS_HPP
diff --git a/external/vulkancts/framework/vulkan/vkQueryUtil.cpp b/external/vulkancts/framework/vulkan/vkQueryUtil.cpp
new file mode 100644
index 0000000..f8128e4
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkQueryUtil.cpp
@@ -0,0 +1,223 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan query utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkQueryUtil.hpp"
+#include "deMemory.h"
+
+namespace vk
+{
+
+using std::vector;
+
+vector<VkPhysicalDevice> enumeratePhysicalDevices (const InstanceInterface& vk, VkInstance instance)
+{
+	deUint32					numDevices	= 0;
+	vector<VkPhysicalDevice>	devices;
+
+	VK_CHECK(vk.enumeratePhysicalDevices(instance, &numDevices, DE_NULL));
+
+	if (numDevices > 0)
+	{
+		devices.resize(numDevices);
+		VK_CHECK(vk.enumeratePhysicalDevices(instance, &numDevices, &devices[0]));
+
+		if ((size_t)numDevices != devices.size())
+			TCU_FAIL("Returned device count changed between queries");
+	}
+
+	return devices;
+}
+
+vector<VkQueueFamilyProperties> getPhysicalDeviceQueueFamilyProperties (const InstanceInterface& vk, VkPhysicalDevice physicalDevice)
+{
+	deUint32						numQueues	= 0;
+	vector<VkQueueFamilyProperties>	properties;
+
+	vk.getPhysicalDeviceQueueFamilyProperties(physicalDevice, &numQueues, DE_NULL);
+
+	if (numQueues > 0)
+	{
+		properties.resize(numQueues);
+		vk.getPhysicalDeviceQueueFamilyProperties(physicalDevice, &numQueues, &properties[0]);
+
+		if ((size_t)numQueues != properties.size())
+			TCU_FAIL("Returned queue family count changes between queries");
+	}
+
+	return properties;
+}
+
+VkPhysicalDeviceFeatures getPhysicalDeviceFeatures (const InstanceInterface& vk, VkPhysicalDevice physicalDevice)
+{
+	VkPhysicalDeviceFeatures	features;
+
+	deMemset(&features, 0, sizeof(features));
+
+	vk.getPhysicalDeviceFeatures(physicalDevice, &features);
+	return features;
+}
+
+VkPhysicalDeviceProperties getPhysicalDeviceProperties (const InstanceInterface& vk, VkPhysicalDevice physicalDevice)
+{
+	VkPhysicalDeviceProperties	properties;
+
+	deMemset(&properties, 0, sizeof(properties));
+
+	vk.getPhysicalDeviceProperties(physicalDevice, &properties);
+	return properties;
+}
+
+VkPhysicalDeviceMemoryProperties getPhysicalDeviceMemoryProperties (const InstanceInterface& vk, VkPhysicalDevice physicalDevice)
+{
+	VkPhysicalDeviceMemoryProperties	properties;
+
+	deMemset(&properties, 0, sizeof(properties));
+
+	vk.getPhysicalDeviceMemoryProperties(physicalDevice, &properties);
+	return properties;
+}
+
+VkFormatProperties getPhysicalDeviceFormatProperties (const InstanceInterface& vk, VkPhysicalDevice physicalDevice, VkFormat format)
+{
+	VkFormatProperties	properties;
+
+	deMemset(&properties, 0, sizeof(properties));
+
+	vk.getPhysicalDeviceFormatProperties(physicalDevice, format, &properties);
+	return properties;
+}
+
+VkImageFormatProperties getPhysicalDeviceImageFormatProperties (const InstanceInterface& vk, VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags)
+{
+	VkImageFormatProperties	properties;
+
+	deMemset(&properties, 0, sizeof(properties));
+
+	VK_CHECK(vk.getPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, &properties));
+	return properties;
+}
+
+VkMemoryRequirements getBufferMemoryRequirements (const DeviceInterface& vk, VkDevice device, VkBuffer buffer)
+{
+	VkMemoryRequirements req;
+	vk.getBufferMemoryRequirements(device, buffer, &req);
+	return req;
+}
+
+VkMemoryRequirements getImageMemoryRequirements (const DeviceInterface& vk, VkDevice device, VkImage image)
+{
+	VkMemoryRequirements req;
+	vk.getImageMemoryRequirements(device, image, &req);
+	return req;
+}
+
+vector<VkLayerProperties> enumerateInstanceLayerProperties (const PlatformInterface& vkp)
+{
+	vector<VkLayerProperties>	properties;
+	deUint32					numLayers	= 0;
+
+	VK_CHECK(vkp.enumerateInstanceLayerProperties(&numLayers, DE_NULL));
+
+	if (numLayers > 0)
+	{
+		properties.resize(numLayers);
+		VK_CHECK(vkp.enumerateInstanceLayerProperties(&numLayers, &properties[0]));
+		TCU_CHECK((size_t)numLayers == properties.size());
+	}
+
+	return properties;
+}
+
+vector<VkExtensionProperties> enumerateInstanceExtensionProperties (const PlatformInterface& vkp, const char* layerName)
+{
+	vector<VkExtensionProperties>	properties;
+	deUint32						numExtensions	= 0;
+
+	VK_CHECK(vkp.enumerateInstanceExtensionProperties(layerName, &numExtensions, DE_NULL));
+
+	if (numExtensions > 0)
+	{
+		properties.resize(numExtensions);
+		VK_CHECK(vkp.enumerateInstanceExtensionProperties(layerName, &numExtensions, &properties[0]));
+		TCU_CHECK((size_t)numExtensions == properties.size());
+	}
+
+	return properties;
+}
+
+vector<VkLayerProperties> enumerateDeviceLayerProperties (const InstanceInterface& vki, VkPhysicalDevice physicalDevice)
+{
+	vector<VkLayerProperties>	properties;
+	deUint32					numLayers	= 0;
+
+	VK_CHECK(vki.enumerateDeviceLayerProperties(physicalDevice, &numLayers, DE_NULL));
+
+	if (numLayers > 0)
+	{
+		properties.resize(numLayers);
+		VK_CHECK(vki.enumerateDeviceLayerProperties(physicalDevice, &numLayers, &properties[0]));
+		TCU_CHECK((size_t)numLayers == properties.size());
+	}
+
+	return properties;
+}
+
+vector<VkExtensionProperties> enumerateDeviceExtensionProperties (const InstanceInterface& vki, VkPhysicalDevice physicalDevice, const char* layerName)
+{
+	vector<VkExtensionProperties>	properties;
+	deUint32						numExtensions	= 0;
+
+	VK_CHECK(vki.enumerateDeviceExtensionProperties(physicalDevice, layerName, &numExtensions, DE_NULL));
+
+	if (numExtensions > 0)
+	{
+		properties.resize(numExtensions);
+		VK_CHECK(vki.enumerateDeviceExtensionProperties(physicalDevice, layerName, &numExtensions, &properties[0]));
+		TCU_CHECK((size_t)numExtensions == properties.size());
+	}
+
+	return properties;
+}
+
+bool isShaderStageSupported (const VkPhysicalDeviceFeatures& deviceFeatures, VkShaderStageFlagBits stage)
+{
+	if (stage == VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || stage == VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+		return deviceFeatures.tessellationShader == VK_TRUE;
+	else if (stage == VK_SHADER_STAGE_GEOMETRY_BIT)
+		return deviceFeatures.geometryShader == VK_TRUE;
+	else
+		return true;
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkQueryUtil.hpp b/external/vulkancts/framework/vulkan/vkQueryUtil.hpp
new file mode 100644
index 0000000..a203b8e
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkQueryUtil.hpp
@@ -0,0 +1,84 @@
+#ifndef _VKQUERYUTIL_HPP
+#define _VKQUERYUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan query utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "deMemory.h"
+
+#include <vector>
+
+namespace vk
+{
+
+std::vector<VkPhysicalDevice>			enumeratePhysicalDevices				(const InstanceInterface& vk, VkInstance instance);
+std::vector<VkQueueFamilyProperties>	getPhysicalDeviceQueueFamilyProperties	(const InstanceInterface& vk, VkPhysicalDevice physicalDevice);
+VkPhysicalDeviceFeatures				getPhysicalDeviceFeatures				(const InstanceInterface& vk, VkPhysicalDevice physicalDevice);
+VkPhysicalDeviceProperties				getPhysicalDeviceProperties				(const InstanceInterface& vk, VkPhysicalDevice physicalDevice);
+VkPhysicalDeviceMemoryProperties		getPhysicalDeviceMemoryProperties		(const InstanceInterface& vk, VkPhysicalDevice physicalDevice);
+VkFormatProperties						getPhysicalDeviceFormatProperties		(const InstanceInterface& vk, VkPhysicalDevice physicalDevice, VkFormat format);
+VkImageFormatProperties					getPhysicalDeviceImageFormatProperties	(const InstanceInterface& vk, VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags);
+
+VkMemoryRequirements					getBufferMemoryRequirements				(const DeviceInterface& vk, VkDevice device, VkBuffer buffer);
+VkMemoryRequirements					getImageMemoryRequirements				(const DeviceInterface& vk, VkDevice device, VkImage image);
+
+std::vector<VkLayerProperties>			enumerateInstanceLayerProperties		(const PlatformInterface& vkp);
+std::vector<VkExtensionProperties>		enumerateInstanceExtensionProperties	(const PlatformInterface& vkp, const char* layerName);
+std::vector<VkLayerProperties>			enumerateDeviceLayerProperties			(const InstanceInterface& vki, VkPhysicalDevice physicalDevice);
+std::vector<VkExtensionProperties>		enumerateDeviceExtensionProperties		(const InstanceInterface& vki, VkPhysicalDevice physicalDevice, const char* layerName);
+
+bool									isShaderStageSupported					(const VkPhysicalDeviceFeatures& deviceFeatures, VkShaderStageFlagBits stage);
+
+template <typename Context, typename Interface, typename Type>
+bool validateInitComplete(Context context, void (Interface::*Function)(Context, Type*)const, const Interface& interface)
+{
+	Type vec[2];
+	deMemset(&vec[0], 0x00, sizeof(Type));
+	deMemset(&vec[1], 0xFF, sizeof(Type));
+
+	(interface.*Function)(context, &vec[0]);
+	(interface.*Function)(context, &vec[1]);
+
+	for (size_t ndx = 0; ndx < sizeof(Type); ndx++)
+	{
+		if (reinterpret_cast<deUint8*>(&vec[0])[ndx] != reinterpret_cast<deUint8*>(&vec[1])[ndx])
+			return false;
+	}
+
+	return true;
+}
+
+} // vk
+
+#endif // _VKQUERYUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkRef.cpp b/external/vulkancts/framework/vulkan/vkRef.cpp
new file mode 100644
index 0000000..5eb2764
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRef.cpp
@@ -0,0 +1,37 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object reference holder.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkRef.hpp"
+
+DE_EMPTY_CPP_FILE
diff --git a/external/vulkancts/framework/vulkan/vkRef.hpp b/external/vulkancts/framework/vulkan/vkRef.hpp
new file mode 100644
index 0000000..19fdc66
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRef.hpp
@@ -0,0 +1,340 @@
+#ifndef _VKREF_HPP
+#define _VKREF_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object reference holder.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkStrUtil.hpp"
+#include "deMeta.hpp"
+
+#include <algorithm>
+
+namespace vk
+{
+
+namespace refdetails
+{
+
+using std::swap;
+
+template<typename T>
+struct Checked
+{
+	explicit inline		Checked		(T object_) : object(object_) {}
+
+	T					object;
+};
+
+//! Check that object is not null
+template<typename T>
+inline Checked<T> check (T object)
+{
+	if (!object)
+		throw tcu::TestError("Object check() failed", (std::string(getTypeName<T>()) + " = 0").c_str(), __FILE__, __LINE__);
+	return Checked<T>(object);
+}
+
+//! Declare object as checked earlier
+template<typename T>
+inline Checked<T> notNull (T object)
+{
+	if (!object)
+		throw tcu::InternalError("Null object was given to notNull()", (std::string(getTypeName<T>()) + " = 0").c_str(), __FILE__, __LINE__);
+	return Checked<T>(object);
+}
+
+//! Allow null object
+template<typename T>
+inline Checked<T> allowNull (T object)
+{
+	return Checked<T>(object);
+}
+
+template<typename T>
+class Deleter
+{
+public:
+									Deleter		(const DeviceInterface& deviceIface, VkDevice device, const VkAllocationCallbacks* allocator)
+										: m_deviceIface	(&deviceIface)
+										, m_device		(device)
+										, m_allocator	(allocator)
+									{}
+									Deleter		(void)
+										: m_deviceIface	(DE_NULL)
+										, m_device		(DE_NULL)
+										, m_allocator	(DE_NULL)
+									{}
+
+	void							operator()	(T obj) const;
+
+private:
+	const DeviceInterface*			m_deviceIface;
+	VkDevice						m_device;
+	const VkAllocationCallbacks*	m_allocator;
+};
+
+template<>
+class Deleter<VkInstance>
+{
+public:
+									Deleter		(const PlatformInterface& platformIface, VkInstance instance, const VkAllocationCallbacks* allocator)
+										: m_destroyInstance	((DestroyInstanceFunc)platformIface.getInstanceProcAddr(instance, "vkDestroyInstance"))
+										, m_allocator		(allocator)
+									{}
+									Deleter		(void)
+										: m_destroyInstance	((DestroyInstanceFunc)DE_NULL)
+										, m_allocator		(DE_NULL)
+									{}
+
+	void							operator()	(VkInstance obj) const { m_destroyInstance(obj, m_allocator); }
+
+private:
+	DestroyInstanceFunc				m_destroyInstance;
+	const VkAllocationCallbacks*	m_allocator;
+};
+
+template<>
+class Deleter<VkDevice>
+{
+public:
+									Deleter		(const InstanceInterface& instanceIface, VkDevice device, const VkAllocationCallbacks* allocator)
+										: m_destroyDevice	((DestroyDeviceFunc)instanceIface.getDeviceProcAddr(device, "vkDestroyDevice"))
+										, m_allocator		(allocator)
+									{}
+									Deleter		(void)
+										: m_destroyDevice	((DestroyDeviceFunc)DE_NULL)
+										, m_allocator		(DE_NULL)
+									{}
+
+	void							operator()	(VkDevice obj) const { m_destroyDevice(obj, m_allocator); }
+
+private:
+	DestroyDeviceFunc				m_destroyDevice;
+	const VkAllocationCallbacks*	m_allocator;
+};
+
+template<>
+class Deleter<VkDescriptorSet>
+{
+public:
+							Deleter		(const DeviceInterface& deviceIface, VkDevice device, VkDescriptorPool pool)
+								: m_deviceIface	(&deviceIface)
+								, m_device		(device)
+								, m_pool		(pool)
+							{}
+							Deleter		(void)
+								: m_deviceIface	(DE_NULL)
+								, m_device		(DE_NULL)
+								, m_pool		(DE_NULL)
+							{}
+
+	void					operator()	(VkDescriptorSet obj) const { m_deviceIface->freeDescriptorSets(m_device, m_pool, 1, &obj); }
+
+private:
+	const DeviceInterface*	m_deviceIface;
+	VkDevice				m_device;
+	VkDescriptorPool		m_pool;
+};
+
+template<>
+class Deleter<VkCommandBuffer>
+{
+public:
+							Deleter		(const DeviceInterface& deviceIface, VkDevice device, VkCommandPool pool)
+								: m_deviceIface	(&deviceIface)
+								, m_device		(device)
+								, m_pool		(pool)
+							{}
+							Deleter		(void)
+								: m_deviceIface	(DE_NULL)
+								, m_device		(DE_NULL)
+								, m_pool		(DE_NULL)
+							{}
+
+	void					operator()	(VkCommandBuffer obj) const { m_deviceIface->freeCommandBuffers(m_device, m_pool, 1, &obj); }
+
+private:
+	const DeviceInterface*	m_deviceIface;
+	VkDevice				m_device;
+	VkCommandPool			m_pool;
+};
+
+template<typename T>
+struct RefData
+{
+				RefData		(T object_, Deleter<T> deleter_)
+								: object	(object_)
+								, deleter	(deleter_)
+				{}
+				RefData		(void)
+								: object	(0)
+				{}
+
+	T			object;
+	Deleter<T>	deleter;
+};
+
+template<typename T>
+class RefBase
+{
+public:
+						~RefBase	(void);
+
+	inline const T&		get			(void) const throw() { return m_data.object;	}
+	inline const T&		operator*	(void) const throw() { return get();			}
+	inline operator		bool		(void) const throw() { return !!get();			}
+
+protected:
+						RefBase		(RefData<T> data) : m_data(data)	{}
+
+	void				reset		(void);				//!< Release previous object, set to null.
+	RefData<T>			disown		(void) throw();		//!< Disown and return object (ownership transferred to caller).
+	void				assign		(RefData<T> data);	//!< Set new pointer, release previous pointer.
+
+private:
+	RefData<T>			m_data;
+};
+
+template<typename T>
+inline RefBase<T>::~RefBase (void)
+{
+	this->reset();
+}
+
+template<typename T>
+inline void RefBase<T>::reset (void)
+{
+	if (!!m_data.object)
+		m_data.deleter(m_data.object);
+
+	m_data = RefData<T>();
+}
+
+template<typename T>
+inline RefData<T> RefBase<T>::disown (void) throw()
+{
+	RefData<T> tmp;
+	swap(m_data, tmp);
+	return tmp;
+}
+
+template<typename T>
+inline void RefBase<T>::assign (RefData<T> data)
+{
+	this->reset();
+	m_data = data;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Movable Vulkan object reference.
+ *
+ * Similar to de::MovePtr.
+ *//*--------------------------------------------------------------------*/
+template<typename T>
+class Move : public RefBase<T>
+{
+public:
+	template<typename U>
+				Move		(Checked<U> object, Deleter<U> deleter)
+								: RefBase<T>(RefData<T>(object.object, deleter))
+				{}
+
+				Move		(RefData<T> data)
+								: RefBase<T>(data)
+				{}
+				Move		(Move<T>& other)
+								: RefBase<T>(other.RefBase<T>::disown())
+				{}
+				Move		(void)
+								: RefBase<T>(RefData<T>())
+				{}
+
+	T			disown		(void) { return this->RefBase<T>::disown().object; }
+	Move<T>&	operator=	(Move<T>& other);
+	Move<T>&	operator=	(RefData<T> data);
+
+	operator	RefData<T>	(void) { return this->RefBase<T>::disown(); }
+};
+
+template<typename T>
+inline Move<T>& Move<T>::operator= (Move<T>& other)
+{
+	if (this != &other)
+		this->assign(other.RefBase<T>::disown());
+
+	return *this;
+}
+
+template<typename T>
+inline Move<T>& Move<T>::operator= (RefData<T> data)
+{
+	this->assign(data);
+	return *this;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Unique Vulkan object reference.
+ *
+ * Similar to de::UniquePtr.
+ *//*--------------------------------------------------------------------*/
+template<typename T>
+class Unique : public RefBase<T>
+{
+public:
+	template<typename U>
+				Unique		(Checked<U> object, Deleter<U> deleter)
+								: RefBase<T>(RefData<T>(object.object, deleter))
+				{}
+
+				Unique		(RefData<T> data)
+								: RefBase<T>(data)
+				{}
+
+private:
+				Unique		(const Unique<T>&);
+	Unique<T>&	operator=	(const Unique<T>&);
+};
+
+} // refdetails
+
+using refdetails::Move;
+using refdetails::Unique;
+using refdetails::Deleter;
+using refdetails::check;
+using refdetails::notNull;
+using refdetails::allowNull;
+
+} // vk
+
+#endif // _VKREF_HPP
diff --git a/external/vulkancts/framework/vulkan/vkRefUtil.cpp b/external/vulkancts/framework/vulkan/vkRefUtil.cpp
new file mode 100644
index 0000000..d6b6abf
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRefUtil.cpp
@@ -0,0 +1,80 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object reference holder utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkRefUtil.hpp"
+
+namespace vk
+{
+
+#include "vkRefUtilImpl.inl"
+
+Move<VkPipeline> createGraphicsPipeline (const DeviceInterface&					vk,
+										 VkDevice								device,
+										 VkPipelineCache						pipelineCache,
+										 const VkGraphicsPipelineCreateInfo*	pCreateInfo,
+										 const VkAllocationCallbacks*			pAllocator)
+{
+	VkPipeline object = 0;
+	VK_CHECK(vk.createGraphicsPipelines(device, pipelineCache, 1u, pCreateInfo, pAllocator, &object));
+	return Move<VkPipeline>(check<VkPipeline>(object), Deleter<VkPipeline>(vk, device, pAllocator));
+}
+
+Move<VkPipeline> createComputePipeline (const DeviceInterface&				vk,
+										VkDevice							device,
+										VkPipelineCache						pipelineCache,
+										const VkComputePipelineCreateInfo*	pCreateInfo,
+										const VkAllocationCallbacks*		pAllocator)
+{
+	VkPipeline object = 0;
+	VK_CHECK(vk.createComputePipelines(device, pipelineCache, 1u, pCreateInfo, pAllocator, &object));
+	return Move<VkPipeline>(check<VkPipeline>(object), Deleter<VkPipeline>(vk, device, pAllocator));
+}
+
+Move<VkCommandBuffer> allocateCommandBuffer (const DeviceInterface& vk, VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo)
+{
+	VkCommandBuffer object = 0;
+	DE_ASSERT(pAllocateInfo->commandBufferCount == 1u);
+	VK_CHECK(vk.allocateCommandBuffers(device, pAllocateInfo, &object));
+	return Move<VkCommandBuffer>(check<VkCommandBuffer>(object), Deleter<VkCommandBuffer>(vk, device, pAllocateInfo->commandPool));
+}
+
+Move<VkDescriptorSet> allocateDescriptorSet (const DeviceInterface& vk, VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo)
+{
+	VkDescriptorSet object = 0;
+	DE_ASSERT(pAllocateInfo->descriptorSetCount == 1u);
+	VK_CHECK(vk.allocateDescriptorSets(device, pAllocateInfo, &object));
+	return Move<VkDescriptorSet>(check<VkDescriptorSet>(object), Deleter<VkDescriptorSet>(vk, device, pAllocateInfo->descriptorPool));
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkRefUtil.hpp b/external/vulkancts/framework/vulkan/vkRefUtil.hpp
new file mode 100644
index 0000000..fc2e46f
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRefUtil.hpp
@@ -0,0 +1,60 @@
+#ifndef _VKREFUTIL_HPP
+#define _VKREFUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan object reference holder utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+
+namespace vk
+{
+
+#include "vkRefUtil.inl"
+
+Move<VkPipeline>		createGraphicsPipeline	(const DeviceInterface&					vk,
+												 VkDevice								device,
+												 VkPipelineCache						pipelineCache,
+												 const VkGraphicsPipelineCreateInfo*	pCreateInfo,
+												 const VkAllocationCallbacks*			pAllocator = DE_NULL);
+Move<VkPipeline>		createComputePipeline	(const DeviceInterface&					vk,
+												 VkDevice								device,
+												 VkPipelineCache						pipelineCache,
+												 const VkComputePipelineCreateInfo*		pCreateInfo,
+												 const VkAllocationCallbacks*			pAllocator = DE_NULL);
+Move<VkCommandBuffer>	allocateCommandBuffer	(const DeviceInterface& vk, VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo);
+Move<VkDescriptorSet>	allocateDescriptorSet	(const DeviceInterface& vk, VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo);
+
+} // vk
+
+#endif // _VKREFUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkRefUtil.inl b/external/vulkancts/framework/vulkan/vkRefUtil.inl
new file mode 100644
index 0000000..2d2a5fd
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRefUtil.inl
@@ -0,0 +1,25 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+Move<VkInstance>			createInstance				(const PlatformInterface& vk, const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkDevice>				createDevice				(const InstanceInterface& vk, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkDeviceMemory>		allocateMemory				(const DeviceInterface& vk, VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkFence>				createFence					(const DeviceInterface& vk, VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkSemaphore>			createSemaphore				(const DeviceInterface& vk, VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkEvent>				createEvent					(const DeviceInterface& vk, VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkQueryPool>			createQueryPool				(const DeviceInterface& vk, VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkBuffer>				createBuffer				(const DeviceInterface& vk, VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkBufferView>			createBufferView			(const DeviceInterface& vk, VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkImage>				createImage					(const DeviceInterface& vk, VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkImageView>			createImageView				(const DeviceInterface& vk, VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkShaderModule>		createShaderModule			(const DeviceInterface& vk, VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkPipelineCache>		createPipelineCache			(const DeviceInterface& vk, VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkPipeline>			createGraphicsPipelines		(const DeviceInterface& vk, VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkPipeline>			createComputePipelines		(const DeviceInterface& vk, VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkPipelineLayout>		createPipelineLayout		(const DeviceInterface& vk, VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkSampler>				createSampler				(const DeviceInterface& vk, VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkDescriptorSetLayout>	createDescriptorSetLayout	(const DeviceInterface& vk, VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkDescriptorPool>		createDescriptorPool		(const DeviceInterface& vk, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkFramebuffer>			createFramebuffer			(const DeviceInterface& vk, VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkRenderPass>			createRenderPass			(const DeviceInterface& vk, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
+Move<VkCommandPool>			createCommandPool			(const DeviceInterface& vk, VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator = DE_NULL);
diff --git a/external/vulkancts/framework/vulkan/vkRefUtilImpl.inl b/external/vulkancts/framework/vulkan/vkRefUtilImpl.inl
new file mode 100644
index 0000000..021e48b
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkRefUtilImpl.inl
@@ -0,0 +1,276 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+namespace refdetails
+{
+
+template<>
+void Deleter<VkDeviceMemory>::operator() (VkDeviceMemory obj) const
+{
+	m_deviceIface->freeMemory(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkFence>::operator() (VkFence obj) const
+{
+	m_deviceIface->destroyFence(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkSemaphore>::operator() (VkSemaphore obj) const
+{
+	m_deviceIface->destroySemaphore(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkEvent>::operator() (VkEvent obj) const
+{
+	m_deviceIface->destroyEvent(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkQueryPool>::operator() (VkQueryPool obj) const
+{
+	m_deviceIface->destroyQueryPool(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkBuffer>::operator() (VkBuffer obj) const
+{
+	m_deviceIface->destroyBuffer(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkBufferView>::operator() (VkBufferView obj) const
+{
+	m_deviceIface->destroyBufferView(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkImage>::operator() (VkImage obj) const
+{
+	m_deviceIface->destroyImage(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkImageView>::operator() (VkImageView obj) const
+{
+	m_deviceIface->destroyImageView(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkShaderModule>::operator() (VkShaderModule obj) const
+{
+	m_deviceIface->destroyShaderModule(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkPipelineCache>::operator() (VkPipelineCache obj) const
+{
+	m_deviceIface->destroyPipelineCache(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkPipeline>::operator() (VkPipeline obj) const
+{
+	m_deviceIface->destroyPipeline(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkPipelineLayout>::operator() (VkPipelineLayout obj) const
+{
+	m_deviceIface->destroyPipelineLayout(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkSampler>::operator() (VkSampler obj) const
+{
+	m_deviceIface->destroySampler(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkDescriptorSetLayout>::operator() (VkDescriptorSetLayout obj) const
+{
+	m_deviceIface->destroyDescriptorSetLayout(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkDescriptorPool>::operator() (VkDescriptorPool obj) const
+{
+	m_deviceIface->destroyDescriptorPool(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkFramebuffer>::operator() (VkFramebuffer obj) const
+{
+	m_deviceIface->destroyFramebuffer(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkRenderPass>::operator() (VkRenderPass obj) const
+{
+	m_deviceIface->destroyRenderPass(m_device, obj, m_allocator);
+}
+
+template<>
+void Deleter<VkCommandPool>::operator() (VkCommandPool obj) const
+{
+	m_deviceIface->destroyCommandPool(m_device, obj, m_allocator);
+}
+
+} // refdetails
+
+Move<VkInstance> createInstance (const PlatformInterface& vk, const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkInstance object = 0;
+	VK_CHECK(vk.createInstance(pCreateInfo, pAllocator, &object));
+	return Move<VkInstance>(check<VkInstance>(object), Deleter<VkInstance>(vk, object, pAllocator));
+}
+
+Move<VkDevice> createDevice (const InstanceInterface& vk, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkDevice object = 0;
+	VK_CHECK(vk.createDevice(physicalDevice, pCreateInfo, pAllocator, &object));
+	return Move<VkDevice>(check<VkDevice>(object), Deleter<VkDevice>(vk, object, pAllocator));
+}
+
+Move<VkDeviceMemory> allocateMemory (const DeviceInterface& vk, VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkDeviceMemory object = 0;
+	VK_CHECK(vk.allocateMemory(device, pAllocateInfo, pAllocator, &object));
+	return Move<VkDeviceMemory>(check<VkDeviceMemory>(object), Deleter<VkDeviceMemory>(vk, device, pAllocator));
+}
+
+Move<VkFence> createFence (const DeviceInterface& vk, VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkFence object = 0;
+	VK_CHECK(vk.createFence(device, pCreateInfo, pAllocator, &object));
+	return Move<VkFence>(check<VkFence>(object), Deleter<VkFence>(vk, device, pAllocator));
+}
+
+Move<VkSemaphore> createSemaphore (const DeviceInterface& vk, VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkSemaphore object = 0;
+	VK_CHECK(vk.createSemaphore(device, pCreateInfo, pAllocator, &object));
+	return Move<VkSemaphore>(check<VkSemaphore>(object), Deleter<VkSemaphore>(vk, device, pAllocator));
+}
+
+Move<VkEvent> createEvent (const DeviceInterface& vk, VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkEvent object = 0;
+	VK_CHECK(vk.createEvent(device, pCreateInfo, pAllocator, &object));
+	return Move<VkEvent>(check<VkEvent>(object), Deleter<VkEvent>(vk, device, pAllocator));
+}
+
+Move<VkQueryPool> createQueryPool (const DeviceInterface& vk, VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkQueryPool object = 0;
+	VK_CHECK(vk.createQueryPool(device, pCreateInfo, pAllocator, &object));
+	return Move<VkQueryPool>(check<VkQueryPool>(object), Deleter<VkQueryPool>(vk, device, pAllocator));
+}
+
+Move<VkBuffer> createBuffer (const DeviceInterface& vk, VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkBuffer object = 0;
+	VK_CHECK(vk.createBuffer(device, pCreateInfo, pAllocator, &object));
+	return Move<VkBuffer>(check<VkBuffer>(object), Deleter<VkBuffer>(vk, device, pAllocator));
+}
+
+Move<VkBufferView> createBufferView (const DeviceInterface& vk, VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkBufferView object = 0;
+	VK_CHECK(vk.createBufferView(device, pCreateInfo, pAllocator, &object));
+	return Move<VkBufferView>(check<VkBufferView>(object), Deleter<VkBufferView>(vk, device, pAllocator));
+}
+
+Move<VkImage> createImage (const DeviceInterface& vk, VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkImage object = 0;
+	VK_CHECK(vk.createImage(device, pCreateInfo, pAllocator, &object));
+	return Move<VkImage>(check<VkImage>(object), Deleter<VkImage>(vk, device, pAllocator));
+}
+
+Move<VkImageView> createImageView (const DeviceInterface& vk, VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkImageView object = 0;
+	VK_CHECK(vk.createImageView(device, pCreateInfo, pAllocator, &object));
+	return Move<VkImageView>(check<VkImageView>(object), Deleter<VkImageView>(vk, device, pAllocator));
+}
+
+Move<VkShaderModule> createShaderModule (const DeviceInterface& vk, VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkShaderModule object = 0;
+	VK_CHECK(vk.createShaderModule(device, pCreateInfo, pAllocator, &object));
+	return Move<VkShaderModule>(check<VkShaderModule>(object), Deleter<VkShaderModule>(vk, device, pAllocator));
+}
+
+Move<VkPipelineCache> createPipelineCache (const DeviceInterface& vk, VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkPipelineCache object = 0;
+	VK_CHECK(vk.createPipelineCache(device, pCreateInfo, pAllocator, &object));
+	return Move<VkPipelineCache>(check<VkPipelineCache>(object), Deleter<VkPipelineCache>(vk, device, pAllocator));
+}
+
+Move<VkPipeline> createGraphicsPipelines (const DeviceInterface& vk, VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator)
+{
+	VkPipeline object = 0;
+	VK_CHECK(vk.createGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, &object));
+	return Move<VkPipeline>(check<VkPipeline>(object), Deleter<VkPipeline>(vk, device, pAllocator));
+}
+
+Move<VkPipeline> createComputePipelines (const DeviceInterface& vk, VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator)
+{
+	VkPipeline object = 0;
+	VK_CHECK(vk.createComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, &object));
+	return Move<VkPipeline>(check<VkPipeline>(object), Deleter<VkPipeline>(vk, device, pAllocator));
+}
+
+Move<VkPipelineLayout> createPipelineLayout (const DeviceInterface& vk, VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkPipelineLayout object = 0;
+	VK_CHECK(vk.createPipelineLayout(device, pCreateInfo, pAllocator, &object));
+	return Move<VkPipelineLayout>(check<VkPipelineLayout>(object), Deleter<VkPipelineLayout>(vk, device, pAllocator));
+}
+
+Move<VkSampler> createSampler (const DeviceInterface& vk, VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkSampler object = 0;
+	VK_CHECK(vk.createSampler(device, pCreateInfo, pAllocator, &object));
+	return Move<VkSampler>(check<VkSampler>(object), Deleter<VkSampler>(vk, device, pAllocator));
+}
+
+Move<VkDescriptorSetLayout> createDescriptorSetLayout (const DeviceInterface& vk, VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkDescriptorSetLayout object = 0;
+	VK_CHECK(vk.createDescriptorSetLayout(device, pCreateInfo, pAllocator, &object));
+	return Move<VkDescriptorSetLayout>(check<VkDescriptorSetLayout>(object), Deleter<VkDescriptorSetLayout>(vk, device, pAllocator));
+}
+
+Move<VkDescriptorPool> createDescriptorPool (const DeviceInterface& vk, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkDescriptorPool object = 0;
+	VK_CHECK(vk.createDescriptorPool(device, pCreateInfo, pAllocator, &object));
+	return Move<VkDescriptorPool>(check<VkDescriptorPool>(object), Deleter<VkDescriptorPool>(vk, device, pAllocator));
+}
+
+Move<VkFramebuffer> createFramebuffer (const DeviceInterface& vk, VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkFramebuffer object = 0;
+	VK_CHECK(vk.createFramebuffer(device, pCreateInfo, pAllocator, &object));
+	return Move<VkFramebuffer>(check<VkFramebuffer>(object), Deleter<VkFramebuffer>(vk, device, pAllocator));
+}
+
+Move<VkRenderPass> createRenderPass (const DeviceInterface& vk, VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkRenderPass object = 0;
+	VK_CHECK(vk.createRenderPass(device, pCreateInfo, pAllocator, &object));
+	return Move<VkRenderPass>(check<VkRenderPass>(object), Deleter<VkRenderPass>(vk, device, pAllocator));
+}
+
+Move<VkCommandPool> createCommandPool (const DeviceInterface& vk, VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator)
+{
+	VkCommandPool object = 0;
+	VK_CHECK(vk.createCommandPool(device, pCreateInfo, pAllocator, &object));
+	return Move<VkCommandPool>(check<VkCommandPool>(object), Deleter<VkCommandPool>(vk, device, pAllocator));
+}
+
diff --git a/external/vulkancts/framework/vulkan/vkSpirVAsm.cpp b/external/vulkancts/framework/vulkan/vkSpirVAsm.cpp
new file mode 100644
index 0000000..549de3d
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkSpirVAsm.cpp
@@ -0,0 +1,98 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V assembly to binary.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkSpirVAsm.hpp"
+#include "vkSpirVProgram.hpp"
+#include "deArrayUtil.hpp"
+#include "deMemory.h"
+#include "deClock.h"
+#include "qpDebugOut.h"
+
+#if defined(DEQP_HAVE_SPIRV_TOOLS)
+#	include "deSingleton.h"
+
+#	include "libspirv/libspirv.h"
+#endif
+
+namespace vk
+{
+
+using std::string;
+using std::vector;
+
+#if defined(DEQP_HAVE_SPIRV_TOOLS)
+
+
+void assembleSpirV (const SpirVAsmSource* program, std::vector<deUint8>* dst, SpirVProgramInfo* buildInfo)
+{
+	spv_context context = spvContextCreate();
+
+	const std::string&	spvSource			= program->program.str();
+	spv_binary			binary				= DE_NULL;
+	spv_diagnostic		diagnostic			= DE_NULL;
+	const deUint64		compileStartTime	= deGetMicroseconds();
+	const spv_result_t	compileOk			= spvTextToBinary(context, spvSource.c_str(), spvSource.size(), &binary, &diagnostic);
+
+	{
+		buildInfo->source			= program;
+		buildInfo->infoLog			= diagnostic? diagnostic->error : ""; // \todo [2015-07-13 pyry] Include debug log?
+		buildInfo->compileTimeUs	= deGetMicroseconds() - compileStartTime;
+		buildInfo->compileOk		= (compileOk == SPV_SUCCESS);
+	}
+
+	if (compileOk != SPV_SUCCESS)
+		TCU_FAIL("Failed to compile shader");
+
+	dst->resize((int)binary->wordCount * sizeof(deUint32));
+#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
+	deMemcpy(&(*dst)[0], &binary->code[0], dst->size());
+#else
+#	error "Big-endian not supported"
+#endif
+	spvBinaryDestroy(binary);
+	spvDiagnosticDestroy(diagnostic);
+	spvContextDestroy(context);
+	return;
+}
+
+#else // defined(DEQP_HAVE_SPIRV_TOOLS)
+
+void assembleSpirV (const SpirVAsmSource*, std::vector<deUint8>*, SpirVProgramInfo*)
+{
+	TCU_THROW(NotSupportedError, "SPIR-V assembly not supported (DEQP_HAVE_SPIRV_TOOLS not defined)");
+}
+
+#endif
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkSpirVAsm.hpp b/external/vulkancts/framework/vulkan/vkSpirVAsm.hpp
new file mode 100644
index 0000000..2897ecd
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkSpirVAsm.hpp
@@ -0,0 +1,48 @@
+#ifndef _VKSPIRVASM_HPP
+#define _VKSPIRVASM_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V assembly to binary.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkPrograms.hpp"
+
+namespace vk
+{
+
+//! Assemble SPIR-V program. Will fail with NotSupportedError if compiler is not available.
+void assembleSpirV (const SpirVAsmSource* program, std::vector<deUint8>* dst, SpirVProgramInfo* buildInfo);
+
+} // vk
+
+#endif // _VKSPIRVASM_HPP
diff --git a/external/vulkancts/framework/vulkan/vkSpirVProgram.cpp b/external/vulkancts/framework/vulkan/vkSpirVProgram.cpp
new file mode 100644
index 0000000..cd73668
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkSpirVProgram.cpp
@@ -0,0 +1,60 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Spirv program and binary info.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkSpirVProgram.hpp"
+
+#include "tcuTestLog.hpp"
+
+namespace vk
+{
+
+tcu::TestLog& operator<< (tcu::TestLog& log, const SpirVProgramInfo& shaderInfo)
+{
+	log << tcu::TestLog::ShaderProgram(shaderInfo.compileOk , shaderInfo.infoLog) << tcu::TestLog::EndShaderProgram;
+
+	// Write statistics
+	log << tcu::TestLog::Float(	"SpirVAssemblyTime",
+								"SpirV assembly time",
+								"ms", QP_KEY_TAG_TIME, (float)shaderInfo.compileTimeUs / 1000.0f);
+	return log;
+}
+
+tcu::TestLog& operator<< (tcu::TestLog& log, const SpirVAsmSource& source)
+{
+	log << tcu::TestLog::KernelSource(source.program.str());
+
+	return log;
+}
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkSpirVProgram.hpp b/external/vulkancts/framework/vulkan/vkSpirVProgram.hpp
new file mode 100644
index 0000000..c5c0d93
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkSpirVProgram.hpp
@@ -0,0 +1,77 @@
+#ifndef _VKSPIRVPROGRAM_HPP
+#define _VKSPIRVPROGRAM_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V program and binary info.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuTestLog.hpp"
+
+#include <string>
+#include <sstream>
+
+namespace vk
+{
+
+struct SpirVAsmSource
+{
+	template<typename T>
+	SpirVAsmSource& operator<<(const T& val)
+	{
+		program << val;
+		return *this;
+	}
+	std::ostringstream program;
+};
+
+struct SpirVProgramInfo
+{
+	SpirVProgramInfo()
+		: source		(DE_NULL)
+		, compileTimeUs	(0)
+		, compileOk		(false)
+	{
+	}
+
+	const SpirVAsmSource*	source;
+	std::string				infoLog;
+	deUint64				compileTimeUs;
+	bool					compileOk;
+};
+
+tcu::TestLog&	operator<<			(tcu::TestLog& log, const SpirVProgramInfo& shaderInfo);
+tcu::TestLog&	operator<<			(tcu::TestLog& log, const SpirVAsmSource& program);
+
+}
+
+#endif // _VKSPIRVPROGRAM_HPP
diff --git a/external/vulkancts/framework/vulkan/vkStrUtil.cpp b/external/vulkancts/framework/vulkan/vkStrUtil.cpp
new file mode 100644
index 0000000..6e63994
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkStrUtil.cpp
@@ -0,0 +1,62 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pretty-printing and logging utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkStrUtil.hpp"
+
+namespace vk
+{
+
+struct CharPtr
+{
+	const char*	ptr;
+
+	CharPtr (const char* ptr_) : ptr(ptr_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const CharPtr& ptr)
+{
+	if (!ptr.ptr)
+		return str << "(null)";
+	else
+		return str << '"' << ptr.ptr << '"';
+}
+
+inline CharPtr getCharPtrStr (const char* ptr)
+{
+	return CharPtr(ptr);
+}
+
+#include "vkStrUtilImpl.inl"
+
+} // vk
diff --git a/external/vulkancts/framework/vulkan/vkStrUtil.hpp b/external/vulkancts/framework/vulkan/vkStrUtil.hpp
new file mode 100644
index 0000000..b4f676d
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkStrUtil.hpp
@@ -0,0 +1,56 @@
+#ifndef _VKSTRUTIL_HPP
+#define _VKSTRUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pretty-printing and logging utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuFormatUtil.hpp"
+
+namespace vk
+{
+
+#include "vkStrUtil.inl"
+
+template<typename T>
+const char*	getTypeName	(void);
+
+template<HandleType Type>
+inline std::ostream& operator<< (std::ostream& s, const Handle<Type>& handle)
+{
+	return s << tcu::toHex(handle.getInternal());
+}
+
+} // vk
+
+#endif // _VKSTRUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkStrUtil.inl b/external/vulkancts/framework/vulkan/vkStrUtil.inl
new file mode 100644
index 0000000..92bf240
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkStrUtil.inl
@@ -0,0 +1,295 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+const char*	getPipelineCacheHeaderVersionName	(VkPipelineCacheHeaderVersion value);
+const char*	getResultName						(VkResult value);
+const char*	getStructureTypeName				(VkStructureType value);
+const char*	getSystemAllocationScopeName		(VkSystemAllocationScope value);
+const char*	getInternalAllocationTypeName		(VkInternalAllocationType value);
+const char*	getFormatName						(VkFormat value);
+const char*	getImageTypeName					(VkImageType value);
+const char*	getImageTilingName					(VkImageTiling value);
+const char*	getPhysicalDeviceTypeName			(VkPhysicalDeviceType value);
+const char*	getQueryTypeName					(VkQueryType value);
+const char*	getSharingModeName					(VkSharingMode value);
+const char*	getImageLayoutName					(VkImageLayout value);
+const char*	getImageViewTypeName				(VkImageViewType value);
+const char*	getComponentSwizzleName				(VkComponentSwizzle value);
+const char*	getVertexInputRateName				(VkVertexInputRate value);
+const char*	getPrimitiveTopologyName			(VkPrimitiveTopology value);
+const char*	getPolygonModeName					(VkPolygonMode value);
+const char*	getFrontFaceName					(VkFrontFace value);
+const char*	getCompareOpName					(VkCompareOp value);
+const char*	getStencilOpName					(VkStencilOp value);
+const char*	getLogicOpName						(VkLogicOp value);
+const char*	getBlendFactorName					(VkBlendFactor value);
+const char*	getBlendOpName						(VkBlendOp value);
+const char*	getDynamicStateName					(VkDynamicState value);
+const char*	getFilterName						(VkFilter value);
+const char*	getSamplerMipmapModeName			(VkSamplerMipmapMode value);
+const char*	getSamplerAddressModeName			(VkSamplerAddressMode value);
+const char*	getBorderColorName					(VkBorderColor value);
+const char*	getDescriptorTypeName				(VkDescriptorType value);
+const char*	getAttachmentLoadOpName				(VkAttachmentLoadOp value);
+const char*	getAttachmentStoreOpName			(VkAttachmentStoreOp value);
+const char*	getPipelineBindPointName			(VkPipelineBindPoint value);
+const char*	getCommandBufferLevelName			(VkCommandBufferLevel value);
+const char*	getIndexTypeName					(VkIndexType value);
+const char*	getSubpassContentsName				(VkSubpassContents value);
+const char*	getColorSpaceKHRName				(VkColorSpaceKHR value);
+const char*	getPresentModeKHRName				(VkPresentModeKHR value);
+
+inline tcu::Format::Enum<VkPipelineCacheHeaderVersion>	getPipelineCacheHeaderVersionStr	(VkPipelineCacheHeaderVersion value)	{ return tcu::Format::Enum<VkPipelineCacheHeaderVersion>(getPipelineCacheHeaderVersionName, value);	}
+inline tcu::Format::Enum<VkResult>						getResultStr						(VkResult value)						{ return tcu::Format::Enum<VkResult>(getResultName, value);											}
+inline tcu::Format::Enum<VkStructureType>				getStructureTypeStr					(VkStructureType value)					{ return tcu::Format::Enum<VkStructureType>(getStructureTypeName, value);							}
+inline tcu::Format::Enum<VkSystemAllocationScope>		getSystemAllocationScopeStr			(VkSystemAllocationScope value)			{ return tcu::Format::Enum<VkSystemAllocationScope>(getSystemAllocationScopeName, value);			}
+inline tcu::Format::Enum<VkInternalAllocationType>		getInternalAllocationTypeStr		(VkInternalAllocationType value)		{ return tcu::Format::Enum<VkInternalAllocationType>(getInternalAllocationTypeName, value);			}
+inline tcu::Format::Enum<VkFormat>						getFormatStr						(VkFormat value)						{ return tcu::Format::Enum<VkFormat>(getFormatName, value);											}
+inline tcu::Format::Enum<VkImageType>					getImageTypeStr						(VkImageType value)						{ return tcu::Format::Enum<VkImageType>(getImageTypeName, value);									}
+inline tcu::Format::Enum<VkImageTiling>					getImageTilingStr					(VkImageTiling value)					{ return tcu::Format::Enum<VkImageTiling>(getImageTilingName, value);								}
+inline tcu::Format::Enum<VkPhysicalDeviceType>			getPhysicalDeviceTypeStr			(VkPhysicalDeviceType value)			{ return tcu::Format::Enum<VkPhysicalDeviceType>(getPhysicalDeviceTypeName, value);					}
+inline tcu::Format::Enum<VkQueryType>					getQueryTypeStr						(VkQueryType value)						{ return tcu::Format::Enum<VkQueryType>(getQueryTypeName, value);									}
+inline tcu::Format::Enum<VkSharingMode>					getSharingModeStr					(VkSharingMode value)					{ return tcu::Format::Enum<VkSharingMode>(getSharingModeName, value);								}
+inline tcu::Format::Enum<VkImageLayout>					getImageLayoutStr					(VkImageLayout value)					{ return tcu::Format::Enum<VkImageLayout>(getImageLayoutName, value);								}
+inline tcu::Format::Enum<VkImageViewType>				getImageViewTypeStr					(VkImageViewType value)					{ return tcu::Format::Enum<VkImageViewType>(getImageViewTypeName, value);							}
+inline tcu::Format::Enum<VkComponentSwizzle>			getComponentSwizzleStr				(VkComponentSwizzle value)				{ return tcu::Format::Enum<VkComponentSwizzle>(getComponentSwizzleName, value);						}
+inline tcu::Format::Enum<VkVertexInputRate>				getVertexInputRateStr				(VkVertexInputRate value)				{ return tcu::Format::Enum<VkVertexInputRate>(getVertexInputRateName, value);						}
+inline tcu::Format::Enum<VkPrimitiveTopology>			getPrimitiveTopologyStr				(VkPrimitiveTopology value)				{ return tcu::Format::Enum<VkPrimitiveTopology>(getPrimitiveTopologyName, value);					}
+inline tcu::Format::Enum<VkPolygonMode>					getPolygonModeStr					(VkPolygonMode value)					{ return tcu::Format::Enum<VkPolygonMode>(getPolygonModeName, value);								}
+inline tcu::Format::Enum<VkFrontFace>					getFrontFaceStr						(VkFrontFace value)						{ return tcu::Format::Enum<VkFrontFace>(getFrontFaceName, value);									}
+inline tcu::Format::Enum<VkCompareOp>					getCompareOpStr						(VkCompareOp value)						{ return tcu::Format::Enum<VkCompareOp>(getCompareOpName, value);									}
+inline tcu::Format::Enum<VkStencilOp>					getStencilOpStr						(VkStencilOp value)						{ return tcu::Format::Enum<VkStencilOp>(getStencilOpName, value);									}
+inline tcu::Format::Enum<VkLogicOp>						getLogicOpStr						(VkLogicOp value)						{ return tcu::Format::Enum<VkLogicOp>(getLogicOpName, value);										}
+inline tcu::Format::Enum<VkBlendFactor>					getBlendFactorStr					(VkBlendFactor value)					{ return tcu::Format::Enum<VkBlendFactor>(getBlendFactorName, value);								}
+inline tcu::Format::Enum<VkBlendOp>						getBlendOpStr						(VkBlendOp value)						{ return tcu::Format::Enum<VkBlendOp>(getBlendOpName, value);										}
+inline tcu::Format::Enum<VkDynamicState>				getDynamicStateStr					(VkDynamicState value)					{ return tcu::Format::Enum<VkDynamicState>(getDynamicStateName, value);								}
+inline tcu::Format::Enum<VkFilter>						getFilterStr						(VkFilter value)						{ return tcu::Format::Enum<VkFilter>(getFilterName, value);											}
+inline tcu::Format::Enum<VkSamplerMipmapMode>			getSamplerMipmapModeStr				(VkSamplerMipmapMode value)				{ return tcu::Format::Enum<VkSamplerMipmapMode>(getSamplerMipmapModeName, value);					}
+inline tcu::Format::Enum<VkSamplerAddressMode>			getSamplerAddressModeStr			(VkSamplerAddressMode value)			{ return tcu::Format::Enum<VkSamplerAddressMode>(getSamplerAddressModeName, value);					}
+inline tcu::Format::Enum<VkBorderColor>					getBorderColorStr					(VkBorderColor value)					{ return tcu::Format::Enum<VkBorderColor>(getBorderColorName, value);								}
+inline tcu::Format::Enum<VkDescriptorType>				getDescriptorTypeStr				(VkDescriptorType value)				{ return tcu::Format::Enum<VkDescriptorType>(getDescriptorTypeName, value);							}
+inline tcu::Format::Enum<VkAttachmentLoadOp>			getAttachmentLoadOpStr				(VkAttachmentLoadOp value)				{ return tcu::Format::Enum<VkAttachmentLoadOp>(getAttachmentLoadOpName, value);						}
+inline tcu::Format::Enum<VkAttachmentStoreOp>			getAttachmentStoreOpStr				(VkAttachmentStoreOp value)				{ return tcu::Format::Enum<VkAttachmentStoreOp>(getAttachmentStoreOpName, value);					}
+inline tcu::Format::Enum<VkPipelineBindPoint>			getPipelineBindPointStr				(VkPipelineBindPoint value)				{ return tcu::Format::Enum<VkPipelineBindPoint>(getPipelineBindPointName, value);					}
+inline tcu::Format::Enum<VkCommandBufferLevel>			getCommandBufferLevelStr			(VkCommandBufferLevel value)			{ return tcu::Format::Enum<VkCommandBufferLevel>(getCommandBufferLevelName, value);					}
+inline tcu::Format::Enum<VkIndexType>					getIndexTypeStr						(VkIndexType value)						{ return tcu::Format::Enum<VkIndexType>(getIndexTypeName, value);									}
+inline tcu::Format::Enum<VkSubpassContents>				getSubpassContentsStr				(VkSubpassContents value)				{ return tcu::Format::Enum<VkSubpassContents>(getSubpassContentsName, value);						}
+inline tcu::Format::Enum<VkColorSpaceKHR>				getColorSpaceKHRStr					(VkColorSpaceKHR value)					{ return tcu::Format::Enum<VkColorSpaceKHR>(getColorSpaceKHRName, value);							}
+inline tcu::Format::Enum<VkPresentModeKHR>				getPresentModeKHRStr				(VkPresentModeKHR value)				{ return tcu::Format::Enum<VkPresentModeKHR>(getPresentModeKHRName, value);							}
+
+inline std::ostream&	operator<<	(std::ostream& s, VkPipelineCacheHeaderVersion value)	{ return s << getPipelineCacheHeaderVersionStr(value);	}
+inline std::ostream&	operator<<	(std::ostream& s, VkResult value)						{ return s << getResultStr(value);						}
+inline std::ostream&	operator<<	(std::ostream& s, VkStructureType value)				{ return s << getStructureTypeStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkSystemAllocationScope value)		{ return s << getSystemAllocationScopeStr(value);		}
+inline std::ostream&	operator<<	(std::ostream& s, VkInternalAllocationType value)		{ return s << getInternalAllocationTypeStr(value);		}
+inline std::ostream&	operator<<	(std::ostream& s, VkFormat value)						{ return s << getFormatStr(value);						}
+inline std::ostream&	operator<<	(std::ostream& s, VkImageType value)					{ return s << getImageTypeStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkImageTiling value)					{ return s << getImageTilingStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkPhysicalDeviceType value)			{ return s << getPhysicalDeviceTypeStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkQueryType value)					{ return s << getQueryTypeStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkSharingMode value)					{ return s << getSharingModeStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkImageLayout value)					{ return s << getImageLayoutStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkImageViewType value)				{ return s << getImageViewTypeStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkComponentSwizzle value)				{ return s << getComponentSwizzleStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkVertexInputRate value)				{ return s << getVertexInputRateStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkPrimitiveTopology value)			{ return s << getPrimitiveTopologyStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkPolygonMode value)					{ return s << getPolygonModeStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkFrontFace value)					{ return s << getFrontFaceStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkCompareOp value)					{ return s << getCompareOpStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkStencilOp value)					{ return s << getStencilOpStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkLogicOp value)						{ return s << getLogicOpStr(value);						}
+inline std::ostream&	operator<<	(std::ostream& s, VkBlendFactor value)					{ return s << getBlendFactorStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkBlendOp value)						{ return s << getBlendOpStr(value);						}
+inline std::ostream&	operator<<	(std::ostream& s, VkDynamicState value)					{ return s << getDynamicStateStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkFilter value)						{ return s << getFilterStr(value);						}
+inline std::ostream&	operator<<	(std::ostream& s, VkSamplerMipmapMode value)			{ return s << getSamplerMipmapModeStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkSamplerAddressMode value)			{ return s << getSamplerAddressModeStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkBorderColor value)					{ return s << getBorderColorStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkDescriptorType value)				{ return s << getDescriptorTypeStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkAttachmentLoadOp value)				{ return s << getAttachmentLoadOpStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkAttachmentStoreOp value)			{ return s << getAttachmentStoreOpStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkPipelineBindPoint value)			{ return s << getPipelineBindPointStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkCommandBufferLevel value)			{ return s << getCommandBufferLevelStr(value);			}
+inline std::ostream&	operator<<	(std::ostream& s, VkIndexType value)					{ return s << getIndexTypeStr(value);					}
+inline std::ostream&	operator<<	(std::ostream& s, VkSubpassContents value)				{ return s << getSubpassContentsStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkColorSpaceKHR value)				{ return s << getColorSpaceKHRStr(value);				}
+inline std::ostream&	operator<<	(std::ostream& s, VkPresentModeKHR value)				{ return s << getPresentModeKHRStr(value);				}
+
+tcu::Format::Bitfield<32>	getFormatFeatureFlagsStr					(VkFormatFeatureFlags value);
+tcu::Format::Bitfield<32>	getImageUsageFlagsStr						(VkImageUsageFlags value);
+tcu::Format::Bitfield<32>	getImageCreateFlagsStr						(VkImageCreateFlags value);
+tcu::Format::Bitfield<32>	getSampleCountFlagsStr						(VkSampleCountFlags value);
+tcu::Format::Bitfield<32>	getQueueFlagsStr							(VkQueueFlags value);
+tcu::Format::Bitfield<32>	getMemoryPropertyFlagsStr					(VkMemoryPropertyFlags value);
+tcu::Format::Bitfield<32>	getMemoryHeapFlagsStr						(VkMemoryHeapFlags value);
+tcu::Format::Bitfield<32>	getPipelineStageFlagsStr					(VkPipelineStageFlags value);
+tcu::Format::Bitfield<32>	getImageAspectFlagsStr						(VkImageAspectFlags value);
+tcu::Format::Bitfield<32>	getSparseImageFormatFlagsStr				(VkSparseImageFormatFlags value);
+tcu::Format::Bitfield<32>	getSparseMemoryBindFlagsStr					(VkSparseMemoryBindFlags value);
+tcu::Format::Bitfield<32>	getFenceCreateFlagsStr						(VkFenceCreateFlags value);
+tcu::Format::Bitfield<32>	getQueryPipelineStatisticFlagsStr			(VkQueryPipelineStatisticFlags value);
+tcu::Format::Bitfield<32>	getQueryResultFlagsStr						(VkQueryResultFlags value);
+tcu::Format::Bitfield<32>	getBufferCreateFlagsStr						(VkBufferCreateFlags value);
+tcu::Format::Bitfield<32>	getBufferUsageFlagsStr						(VkBufferUsageFlags value);
+tcu::Format::Bitfield<32>	getPipelineCreateFlagsStr					(VkPipelineCreateFlags value);
+tcu::Format::Bitfield<32>	getShaderStageFlagsStr						(VkShaderStageFlags value);
+tcu::Format::Bitfield<32>	getCullModeFlagsStr							(VkCullModeFlags value);
+tcu::Format::Bitfield<32>	getColorComponentFlagsStr					(VkColorComponentFlags value);
+tcu::Format::Bitfield<32>	getDescriptorPoolCreateFlagsStr				(VkDescriptorPoolCreateFlags value);
+tcu::Format::Bitfield<32>	getAttachmentDescriptionFlagsStr			(VkAttachmentDescriptionFlags value);
+tcu::Format::Bitfield<32>	getAccessFlagsStr							(VkAccessFlags value);
+tcu::Format::Bitfield<32>	getDependencyFlagsStr						(VkDependencyFlags value);
+tcu::Format::Bitfield<32>	getCommandPoolCreateFlagsStr				(VkCommandPoolCreateFlags value);
+tcu::Format::Bitfield<32>	getCommandPoolResetFlagsStr					(VkCommandPoolResetFlags value);
+tcu::Format::Bitfield<32>	getCommandBufferUsageFlagsStr				(VkCommandBufferUsageFlags value);
+tcu::Format::Bitfield<32>	getQueryControlFlagsStr						(VkQueryControlFlags value);
+tcu::Format::Bitfield<32>	getCommandBufferResetFlagsStr				(VkCommandBufferResetFlags value);
+tcu::Format::Bitfield<32>	getStencilFaceFlagsStr						(VkStencilFaceFlags value);
+tcu::Format::Bitfield<32>	getSurfaceTransformFlagsKHRStr				(VkSurfaceTransformFlagsKHR value);
+tcu::Format::Bitfield<32>	getCompositeAlphaFlagsKHRStr				(VkCompositeAlphaFlagsKHR value);
+tcu::Format::Bitfield<32>	getDisplayPlaneAlphaFlagsKHRStr				(VkDisplayPlaneAlphaFlagsKHR value);
+tcu::Format::Bitfield<32>	getInstanceCreateFlagsStr					(VkInstanceCreateFlags value);
+tcu::Format::Bitfield<32>	getDeviceCreateFlagsStr						(VkDeviceCreateFlags value);
+tcu::Format::Bitfield<32>	getDeviceQueueCreateFlagsStr				(VkDeviceQueueCreateFlags value);
+tcu::Format::Bitfield<32>	getMemoryMapFlagsStr						(VkMemoryMapFlags value);
+tcu::Format::Bitfield<32>	getSemaphoreCreateFlagsStr					(VkSemaphoreCreateFlags value);
+tcu::Format::Bitfield<32>	getEventCreateFlagsStr						(VkEventCreateFlags value);
+tcu::Format::Bitfield<32>	getQueryPoolCreateFlagsStr					(VkQueryPoolCreateFlags value);
+tcu::Format::Bitfield<32>	getBufferViewCreateFlagsStr					(VkBufferViewCreateFlags value);
+tcu::Format::Bitfield<32>	getImageViewCreateFlagsStr					(VkImageViewCreateFlags value);
+tcu::Format::Bitfield<32>	getShaderModuleCreateFlagsStr				(VkShaderModuleCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineCacheCreateFlagsStr				(VkPipelineCacheCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineShaderStageCreateFlagsStr		(VkPipelineShaderStageCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineVertexInputStateCreateFlagsStr	(VkPipelineVertexInputStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineInputAssemblyStateCreateFlagsStr	(VkPipelineInputAssemblyStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineTessellationStateCreateFlagsStr	(VkPipelineTessellationStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineViewportStateCreateFlagsStr		(VkPipelineViewportStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineRasterizationStateCreateFlagsStr	(VkPipelineRasterizationStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineMultisampleStateCreateFlagsStr	(VkPipelineMultisampleStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineDepthStencilStateCreateFlagsStr	(VkPipelineDepthStencilStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineColorBlendStateCreateFlagsStr	(VkPipelineColorBlendStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineDynamicStateCreateFlagsStr		(VkPipelineDynamicStateCreateFlags value);
+tcu::Format::Bitfield<32>	getPipelineLayoutCreateFlagsStr				(VkPipelineLayoutCreateFlags value);
+tcu::Format::Bitfield<32>	getSamplerCreateFlagsStr					(VkSamplerCreateFlags value);
+tcu::Format::Bitfield<32>	getDescriptorSetLayoutCreateFlagsStr		(VkDescriptorSetLayoutCreateFlags value);
+tcu::Format::Bitfield<32>	getDescriptorPoolResetFlagsStr				(VkDescriptorPoolResetFlags value);
+tcu::Format::Bitfield<32>	getFramebufferCreateFlagsStr				(VkFramebufferCreateFlags value);
+tcu::Format::Bitfield<32>	getRenderPassCreateFlagsStr					(VkRenderPassCreateFlags value);
+tcu::Format::Bitfield<32>	getSubpassDescriptionFlagsStr				(VkSubpassDescriptionFlags value);
+tcu::Format::Bitfield<32>	getSwapchainCreateFlagsKHRStr				(VkSwapchainCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getDisplayModeCreateFlagsKHRStr				(VkDisplayModeCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getDisplaySurfaceCreateFlagsKHRStr			(VkDisplaySurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getXlibSurfaceCreateFlagsKHRStr				(VkXlibSurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getXcbSurfaceCreateFlagsKHRStr				(VkXcbSurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getWaylandSurfaceCreateFlagsKHRStr			(VkWaylandSurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getMirSurfaceCreateFlagsKHRStr				(VkMirSurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getAndroidSurfaceCreateFlagsKHRStr			(VkAndroidSurfaceCreateFlagsKHR value);
+tcu::Format::Bitfield<32>	getWin32SurfaceCreateFlagsKHRStr			(VkWin32SurfaceCreateFlagsKHR value);
+
+std::ostream&	operator<<	(std::ostream& s, const VkApplicationInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkInstanceCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkAllocationCallbacks& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPhysicalDeviceFeatures& value);
+std::ostream&	operator<<	(std::ostream& s, const VkFormatProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkExtent3D& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageFormatProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPhysicalDeviceLimits& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPhysicalDeviceSparseProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPhysicalDeviceProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkQueueFamilyProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMemoryType& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMemoryHeap& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPhysicalDeviceMemoryProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDeviceQueueCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDeviceCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkExtensionProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkLayerProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSubmitInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMemoryAllocateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMappedMemoryRange& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMemoryRequirements& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseImageFormatProperties& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseImageMemoryRequirements& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseMemoryBind& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseBufferMemoryBindInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseImageOpaqueMemoryBindInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageSubresource& value);
+std::ostream&	operator<<	(std::ostream& s, const VkOffset3D& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseImageMemoryBind& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSparseImageMemoryBindInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBindSparseInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkFenceCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSemaphoreCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkEventCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkQueryPoolCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBufferCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBufferViewCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSubresourceLayout& value);
+std::ostream&	operator<<	(std::ostream& s, const VkComponentMapping& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageSubresourceRange& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageViewCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkShaderModuleCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineCacheCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSpecializationMapEntry& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSpecializationInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineShaderStageCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkVertexInputBindingDescription& value);
+std::ostream&	operator<<	(std::ostream& s, const VkVertexInputAttributeDescription& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineVertexInputStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineInputAssemblyStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineTessellationStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkViewport& value);
+std::ostream&	operator<<	(std::ostream& s, const VkOffset2D& value);
+std::ostream&	operator<<	(std::ostream& s, const VkExtent2D& value);
+std::ostream&	operator<<	(std::ostream& s, const VkRect2D& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineViewportStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineRasterizationStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineMultisampleStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkStencilOpState& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineDepthStencilStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineColorBlendAttachmentState& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineColorBlendStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineDynamicStateCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkGraphicsPipelineCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkComputePipelineCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPushConstantRange& value);
+std::ostream&	operator<<	(std::ostream& s, const VkPipelineLayoutCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSamplerCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorSetLayoutBinding& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorSetLayoutCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorPoolSize& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorPoolCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorSetAllocateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorImageInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDescriptorBufferInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkWriteDescriptorSet& value);
+std::ostream&	operator<<	(std::ostream& s, const VkCopyDescriptorSet& value);
+std::ostream&	operator<<	(std::ostream& s, const VkFramebufferCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkAttachmentDescription& value);
+std::ostream&	operator<<	(std::ostream& s, const VkAttachmentReference& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSubpassDescription& value);
+std::ostream&	operator<<	(std::ostream& s, const VkSubpassDependency& value);
+std::ostream&	operator<<	(std::ostream& s, const VkRenderPassCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkCommandPoolCreateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkCommandBufferAllocateInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkCommandBufferInheritanceInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkCommandBufferBeginInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBufferCopy& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageSubresourceLayers& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageCopy& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageBlit& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBufferImageCopy& value);
+std::ostream&	operator<<	(std::ostream& s, const VkClearColorValue& value);
+std::ostream&	operator<<	(std::ostream& s, const VkClearDepthStencilValue& value);
+std::ostream&	operator<<	(std::ostream& s, const VkClearValue& value);
+std::ostream&	operator<<	(std::ostream& s, const VkClearAttachment& value);
+std::ostream&	operator<<	(std::ostream& s, const VkClearRect& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageResolve& value);
+std::ostream&	operator<<	(std::ostream& s, const VkMemoryBarrier& value);
+std::ostream&	operator<<	(std::ostream& s, const VkBufferMemoryBarrier& value);
+std::ostream&	operator<<	(std::ostream& s, const VkImageMemoryBarrier& value);
+std::ostream&	operator<<	(std::ostream& s, const VkRenderPassBeginInfo& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDispatchIndirectCommand& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDrawIndexedIndirectCommand& value);
+std::ostream&	operator<<	(std::ostream& s, const VkDrawIndirectCommand& value);
diff --git a/external/vulkancts/framework/vulkan/vkStrUtilImpl.inl b/external/vulkancts/framework/vulkan/vkStrUtilImpl.inl
new file mode 100644
index 0000000..c5aa62c
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkStrUtilImpl.inl
@@ -0,0 +1,2884 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+template<> const char*	getTypeName<VkInstance>				(void) { return "VkInstance";				}
+template<> const char*	getTypeName<VkPhysicalDevice>		(void) { return "VkPhysicalDevice";			}
+template<> const char*	getTypeName<VkDevice>				(void) { return "VkDevice";					}
+template<> const char*	getTypeName<VkQueue>				(void) { return "VkQueue";					}
+template<> const char*	getTypeName<VkSemaphore>			(void) { return "VkSemaphore";				}
+template<> const char*	getTypeName<VkCommandBuffer>		(void) { return "VkCommandBuffer";			}
+template<> const char*	getTypeName<VkFence>				(void) { return "VkFence";					}
+template<> const char*	getTypeName<VkDeviceMemory>			(void) { return "VkDeviceMemory";			}
+template<> const char*	getTypeName<VkBuffer>				(void) { return "VkBuffer";					}
+template<> const char*	getTypeName<VkImage>				(void) { return "VkImage";					}
+template<> const char*	getTypeName<VkEvent>				(void) { return "VkEvent";					}
+template<> const char*	getTypeName<VkQueryPool>			(void) { return "VkQueryPool";				}
+template<> const char*	getTypeName<VkBufferView>			(void) { return "VkBufferView";				}
+template<> const char*	getTypeName<VkImageView>			(void) { return "VkImageView";				}
+template<> const char*	getTypeName<VkShaderModule>			(void) { return "VkShaderModule";			}
+template<> const char*	getTypeName<VkPipelineCache>		(void) { return "VkPipelineCache";			}
+template<> const char*	getTypeName<VkPipelineLayout>		(void) { return "VkPipelineLayout";			}
+template<> const char*	getTypeName<VkRenderPass>			(void) { return "VkRenderPass";				}
+template<> const char*	getTypeName<VkPipeline>				(void) { return "VkPipeline";				}
+template<> const char*	getTypeName<VkDescriptorSetLayout>	(void) { return "VkDescriptorSetLayout";	}
+template<> const char*	getTypeName<VkSampler>				(void) { return "VkSampler";				}
+template<> const char*	getTypeName<VkDescriptorPool>		(void) { return "VkDescriptorPool";			}
+template<> const char*	getTypeName<VkDescriptorSet>		(void) { return "VkDescriptorSet";			}
+template<> const char*	getTypeName<VkFramebuffer>			(void) { return "VkFramebuffer";			}
+template<> const char*	getTypeName<VkCommandPool>			(void) { return "VkCommandPool";			}
+template<> const char*	getTypeName<VkSurfaceKHR>			(void) { return "VkSurfaceKHR";				}
+template<> const char*	getTypeName<VkSwapchainKHR>			(void) { return "VkSwapchainKHR";			}
+template<> const char*	getTypeName<VkDisplayKHR>			(void) { return "VkDisplayKHR";				}
+template<> const char*	getTypeName<VkDisplayModeKHR>		(void) { return "VkDisplayModeKHR";			}
+
+const char* getPipelineCacheHeaderVersionName (VkPipelineCacheHeaderVersion value)
+{
+	switch (value)
+	{
+		case VK_PIPELINE_CACHE_HEADER_VERSION_ONE:	return "VK_PIPELINE_CACHE_HEADER_VERSION_ONE";
+		default:									return DE_NULL;
+	}
+}
+
+const char* getResultName (VkResult value)
+{
+	switch (value)
+	{
+		case VK_SUCCESS:						return "VK_SUCCESS";
+		case VK_NOT_READY:						return "VK_NOT_READY";
+		case VK_TIMEOUT:						return "VK_TIMEOUT";
+		case VK_EVENT_SET:						return "VK_EVENT_SET";
+		case VK_EVENT_RESET:					return "VK_EVENT_RESET";
+		case VK_INCOMPLETE:						return "VK_INCOMPLETE";
+		case VK_ERROR_OUT_OF_HOST_MEMORY:		return "VK_ERROR_OUT_OF_HOST_MEMORY";
+		case VK_ERROR_OUT_OF_DEVICE_MEMORY:		return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
+		case VK_ERROR_INITIALIZATION_FAILED:	return "VK_ERROR_INITIALIZATION_FAILED";
+		case VK_ERROR_DEVICE_LOST:				return "VK_ERROR_DEVICE_LOST";
+		case VK_ERROR_MEMORY_MAP_FAILED:		return "VK_ERROR_MEMORY_MAP_FAILED";
+		case VK_ERROR_LAYER_NOT_PRESENT:		return "VK_ERROR_LAYER_NOT_PRESENT";
+		case VK_ERROR_EXTENSION_NOT_PRESENT:	return "VK_ERROR_EXTENSION_NOT_PRESENT";
+		case VK_ERROR_FEATURE_NOT_PRESENT:		return "VK_ERROR_FEATURE_NOT_PRESENT";
+		case VK_ERROR_INCOMPATIBLE_DRIVER:		return "VK_ERROR_INCOMPATIBLE_DRIVER";
+		case VK_ERROR_TOO_MANY_OBJECTS:			return "VK_ERROR_TOO_MANY_OBJECTS";
+		case VK_ERROR_FORMAT_NOT_SUPPORTED:		return "VK_ERROR_FORMAT_NOT_SUPPORTED";
+		case VK_ERROR_SURFACE_LOST_KHR:			return "VK_ERROR_SURFACE_LOST_KHR";
+		case VK_SUBOPTIMAL_KHR:					return "VK_SUBOPTIMAL_KHR";
+		case VK_ERROR_OUT_OF_DATE_KHR:			return "VK_ERROR_OUT_OF_DATE_KHR";
+		case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR:	return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
+		case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR:	return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
+		case VK_ERROR_VALIDATION_FAILED_EXT:	return "VK_ERROR_VALIDATION_FAILED_EXT";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getStructureTypeName (VkStructureType value)
+{
+	switch (value)
+	{
+		case VK_STRUCTURE_TYPE_APPLICATION_INFO:							return "VK_STRUCTURE_TYPE_APPLICATION_INFO";
+		case VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO:						return "VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO:					return "VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO:							return "VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_SUBMIT_INFO:									return "VK_STRUCTURE_TYPE_SUBMIT_INFO";
+		case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO:						return "VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO";
+		case VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE:							return "VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE";
+		case VK_STRUCTURE_TYPE_BIND_SPARSE_INFO:							return "VK_STRUCTURE_TYPE_BIND_SPARSE_INFO";
+		case VK_STRUCTURE_TYPE_FENCE_CREATE_INFO:							return "VK_STRUCTURE_TYPE_FENCE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO:						return "VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_EVENT_CREATE_INFO:							return "VK_STRUCTURE_TYPE_EVENT_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO:						return "VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO:							return "VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO:						return "VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO:							return "VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO:						return "VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO:					return "VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO:					return "VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO:			return "VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO:		return "VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO:	return "VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO:		return "VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO:			return "VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO:	return "VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO:		return "VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO:	return "VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO:		return "VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO:			return "VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO:				return "VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO:				return "VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO:					return "VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO:							return "VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO:			return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO:					return "VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO:				return "VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO";
+		case VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET:						return "VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET";
+		case VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET:							return "VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET";
+		case VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO:						return "VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO:						return "VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO:					return "VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO:				return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO";
+		case VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO:				return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO";
+		case VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO:					return "VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO";
+		case VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO:						return "VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO";
+		case VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER:						return "VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER";
+		case VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER:						return "VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER";
+		case VK_STRUCTURE_TYPE_MEMORY_BARRIER:								return "VK_STRUCTURE_TYPE_MEMORY_BARRIER";
+		case VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO:					return "VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO:					return "VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO";
+		case VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR:					return "VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_PRESENT_INFO_KHR:							return "VK_STRUCTURE_TYPE_PRESENT_INFO_KHR";
+		case VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR:					return "VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR";
+		case VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR:					return "VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR:					return "VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR:				return "VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR";
+		case VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT:				return "VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT";
+		default:															return DE_NULL;
+	}
+}
+
+const char* getSystemAllocationScopeName (VkSystemAllocationScope value)
+{
+	switch (value)
+	{
+		case VK_SYSTEM_ALLOCATION_SCOPE_COMMAND:	return "VK_SYSTEM_ALLOCATION_SCOPE_COMMAND";
+		case VK_SYSTEM_ALLOCATION_SCOPE_OBJECT:		return "VK_SYSTEM_ALLOCATION_SCOPE_OBJECT";
+		case VK_SYSTEM_ALLOCATION_SCOPE_CACHE:		return "VK_SYSTEM_ALLOCATION_SCOPE_CACHE";
+		case VK_SYSTEM_ALLOCATION_SCOPE_DEVICE:		return "VK_SYSTEM_ALLOCATION_SCOPE_DEVICE";
+		case VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE:	return "VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE";
+		default:									return DE_NULL;
+	}
+}
+
+const char* getInternalAllocationTypeName (VkInternalAllocationType value)
+{
+	switch (value)
+	{
+		case VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE:	return "VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE";
+		default:										return DE_NULL;
+	}
+}
+
+const char* getFormatName (VkFormat value)
+{
+	switch (value)
+	{
+		case VK_FORMAT_UNDEFINED:					return "VK_FORMAT_UNDEFINED";
+		case VK_FORMAT_R4G4_UNORM_PACK8:			return "VK_FORMAT_R4G4_UNORM_PACK8";
+		case VK_FORMAT_R4G4B4A4_UNORM_PACK16:		return "VK_FORMAT_R4G4B4A4_UNORM_PACK16";
+		case VK_FORMAT_B4G4R4A4_UNORM_PACK16:		return "VK_FORMAT_B4G4R4A4_UNORM_PACK16";
+		case VK_FORMAT_R5G6B5_UNORM_PACK16:			return "VK_FORMAT_R5G6B5_UNORM_PACK16";
+		case VK_FORMAT_B5G6R5_UNORM_PACK16:			return "VK_FORMAT_B5G6R5_UNORM_PACK16";
+		case VK_FORMAT_R5G5B5A1_UNORM_PACK16:		return "VK_FORMAT_R5G5B5A1_UNORM_PACK16";
+		case VK_FORMAT_B5G5R5A1_UNORM_PACK16:		return "VK_FORMAT_B5G5R5A1_UNORM_PACK16";
+		case VK_FORMAT_A1R5G5B5_UNORM_PACK16:		return "VK_FORMAT_A1R5G5B5_UNORM_PACK16";
+		case VK_FORMAT_R8_UNORM:					return "VK_FORMAT_R8_UNORM";
+		case VK_FORMAT_R8_SNORM:					return "VK_FORMAT_R8_SNORM";
+		case VK_FORMAT_R8_USCALED:					return "VK_FORMAT_R8_USCALED";
+		case VK_FORMAT_R8_SSCALED:					return "VK_FORMAT_R8_SSCALED";
+		case VK_FORMAT_R8_UINT:						return "VK_FORMAT_R8_UINT";
+		case VK_FORMAT_R8_SINT:						return "VK_FORMAT_R8_SINT";
+		case VK_FORMAT_R8_SRGB:						return "VK_FORMAT_R8_SRGB";
+		case VK_FORMAT_R8G8_UNORM:					return "VK_FORMAT_R8G8_UNORM";
+		case VK_FORMAT_R8G8_SNORM:					return "VK_FORMAT_R8G8_SNORM";
+		case VK_FORMAT_R8G8_USCALED:				return "VK_FORMAT_R8G8_USCALED";
+		case VK_FORMAT_R8G8_SSCALED:				return "VK_FORMAT_R8G8_SSCALED";
+		case VK_FORMAT_R8G8_UINT:					return "VK_FORMAT_R8G8_UINT";
+		case VK_FORMAT_R8G8_SINT:					return "VK_FORMAT_R8G8_SINT";
+		case VK_FORMAT_R8G8_SRGB:					return "VK_FORMAT_R8G8_SRGB";
+		case VK_FORMAT_R8G8B8_UNORM:				return "VK_FORMAT_R8G8B8_UNORM";
+		case VK_FORMAT_R8G8B8_SNORM:				return "VK_FORMAT_R8G8B8_SNORM";
+		case VK_FORMAT_R8G8B8_USCALED:				return "VK_FORMAT_R8G8B8_USCALED";
+		case VK_FORMAT_R8G8B8_SSCALED:				return "VK_FORMAT_R8G8B8_SSCALED";
+		case VK_FORMAT_R8G8B8_UINT:					return "VK_FORMAT_R8G8B8_UINT";
+		case VK_FORMAT_R8G8B8_SINT:					return "VK_FORMAT_R8G8B8_SINT";
+		case VK_FORMAT_R8G8B8_SRGB:					return "VK_FORMAT_R8G8B8_SRGB";
+		case VK_FORMAT_B8G8R8_UNORM:				return "VK_FORMAT_B8G8R8_UNORM";
+		case VK_FORMAT_B8G8R8_SNORM:				return "VK_FORMAT_B8G8R8_SNORM";
+		case VK_FORMAT_B8G8R8_USCALED:				return "VK_FORMAT_B8G8R8_USCALED";
+		case VK_FORMAT_B8G8R8_SSCALED:				return "VK_FORMAT_B8G8R8_SSCALED";
+		case VK_FORMAT_B8G8R8_UINT:					return "VK_FORMAT_B8G8R8_UINT";
+		case VK_FORMAT_B8G8R8_SINT:					return "VK_FORMAT_B8G8R8_SINT";
+		case VK_FORMAT_B8G8R8_SRGB:					return "VK_FORMAT_B8G8R8_SRGB";
+		case VK_FORMAT_R8G8B8A8_UNORM:				return "VK_FORMAT_R8G8B8A8_UNORM";
+		case VK_FORMAT_R8G8B8A8_SNORM:				return "VK_FORMAT_R8G8B8A8_SNORM";
+		case VK_FORMAT_R8G8B8A8_USCALED:			return "VK_FORMAT_R8G8B8A8_USCALED";
+		case VK_FORMAT_R8G8B8A8_SSCALED:			return "VK_FORMAT_R8G8B8A8_SSCALED";
+		case VK_FORMAT_R8G8B8A8_UINT:				return "VK_FORMAT_R8G8B8A8_UINT";
+		case VK_FORMAT_R8G8B8A8_SINT:				return "VK_FORMAT_R8G8B8A8_SINT";
+		case VK_FORMAT_R8G8B8A8_SRGB:				return "VK_FORMAT_R8G8B8A8_SRGB";
+		case VK_FORMAT_B8G8R8A8_UNORM:				return "VK_FORMAT_B8G8R8A8_UNORM";
+		case VK_FORMAT_B8G8R8A8_SNORM:				return "VK_FORMAT_B8G8R8A8_SNORM";
+		case VK_FORMAT_B8G8R8A8_USCALED:			return "VK_FORMAT_B8G8R8A8_USCALED";
+		case VK_FORMAT_B8G8R8A8_SSCALED:			return "VK_FORMAT_B8G8R8A8_SSCALED";
+		case VK_FORMAT_B8G8R8A8_UINT:				return "VK_FORMAT_B8G8R8A8_UINT";
+		case VK_FORMAT_B8G8R8A8_SINT:				return "VK_FORMAT_B8G8R8A8_SINT";
+		case VK_FORMAT_B8G8R8A8_SRGB:				return "VK_FORMAT_B8G8R8A8_SRGB";
+		case VK_FORMAT_A8B8G8R8_UNORM_PACK32:		return "VK_FORMAT_A8B8G8R8_UNORM_PACK32";
+		case VK_FORMAT_A8B8G8R8_SNORM_PACK32:		return "VK_FORMAT_A8B8G8R8_SNORM_PACK32";
+		case VK_FORMAT_A8B8G8R8_USCALED_PACK32:		return "VK_FORMAT_A8B8G8R8_USCALED_PACK32";
+		case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:		return "VK_FORMAT_A8B8G8R8_SSCALED_PACK32";
+		case VK_FORMAT_A8B8G8R8_UINT_PACK32:		return "VK_FORMAT_A8B8G8R8_UINT_PACK32";
+		case VK_FORMAT_A8B8G8R8_SINT_PACK32:		return "VK_FORMAT_A8B8G8R8_SINT_PACK32";
+		case VK_FORMAT_A8B8G8R8_SRGB_PACK32:		return "VK_FORMAT_A8B8G8R8_SRGB_PACK32";
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:	return "VK_FORMAT_A2R10G10B10_UNORM_PACK32";
+		case VK_FORMAT_A2R10G10B10_SNORM_PACK32:	return "VK_FORMAT_A2R10G10B10_SNORM_PACK32";
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:	return "VK_FORMAT_A2R10G10B10_USCALED_PACK32";
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:	return "VK_FORMAT_A2R10G10B10_SSCALED_PACK32";
+		case VK_FORMAT_A2R10G10B10_UINT_PACK32:		return "VK_FORMAT_A2R10G10B10_UINT_PACK32";
+		case VK_FORMAT_A2R10G10B10_SINT_PACK32:		return "VK_FORMAT_A2R10G10B10_SINT_PACK32";
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:	return "VK_FORMAT_A2B10G10R10_UNORM_PACK32";
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:	return "VK_FORMAT_A2B10G10R10_SNORM_PACK32";
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:	return "VK_FORMAT_A2B10G10R10_USCALED_PACK32";
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:	return "VK_FORMAT_A2B10G10R10_SSCALED_PACK32";
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:		return "VK_FORMAT_A2B10G10R10_UINT_PACK32";
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:		return "VK_FORMAT_A2B10G10R10_SINT_PACK32";
+		case VK_FORMAT_R16_UNORM:					return "VK_FORMAT_R16_UNORM";
+		case VK_FORMAT_R16_SNORM:					return "VK_FORMAT_R16_SNORM";
+		case VK_FORMAT_R16_USCALED:					return "VK_FORMAT_R16_USCALED";
+		case VK_FORMAT_R16_SSCALED:					return "VK_FORMAT_R16_SSCALED";
+		case VK_FORMAT_R16_UINT:					return "VK_FORMAT_R16_UINT";
+		case VK_FORMAT_R16_SINT:					return "VK_FORMAT_R16_SINT";
+		case VK_FORMAT_R16_SFLOAT:					return "VK_FORMAT_R16_SFLOAT";
+		case VK_FORMAT_R16G16_UNORM:				return "VK_FORMAT_R16G16_UNORM";
+		case VK_FORMAT_R16G16_SNORM:				return "VK_FORMAT_R16G16_SNORM";
+		case VK_FORMAT_R16G16_USCALED:				return "VK_FORMAT_R16G16_USCALED";
+		case VK_FORMAT_R16G16_SSCALED:				return "VK_FORMAT_R16G16_SSCALED";
+		case VK_FORMAT_R16G16_UINT:					return "VK_FORMAT_R16G16_UINT";
+		case VK_FORMAT_R16G16_SINT:					return "VK_FORMAT_R16G16_SINT";
+		case VK_FORMAT_R16G16_SFLOAT:				return "VK_FORMAT_R16G16_SFLOAT";
+		case VK_FORMAT_R16G16B16_UNORM:				return "VK_FORMAT_R16G16B16_UNORM";
+		case VK_FORMAT_R16G16B16_SNORM:				return "VK_FORMAT_R16G16B16_SNORM";
+		case VK_FORMAT_R16G16B16_USCALED:			return "VK_FORMAT_R16G16B16_USCALED";
+		case VK_FORMAT_R16G16B16_SSCALED:			return "VK_FORMAT_R16G16B16_SSCALED";
+		case VK_FORMAT_R16G16B16_UINT:				return "VK_FORMAT_R16G16B16_UINT";
+		case VK_FORMAT_R16G16B16_SINT:				return "VK_FORMAT_R16G16B16_SINT";
+		case VK_FORMAT_R16G16B16_SFLOAT:			return "VK_FORMAT_R16G16B16_SFLOAT";
+		case VK_FORMAT_R16G16B16A16_UNORM:			return "VK_FORMAT_R16G16B16A16_UNORM";
+		case VK_FORMAT_R16G16B16A16_SNORM:			return "VK_FORMAT_R16G16B16A16_SNORM";
+		case VK_FORMAT_R16G16B16A16_USCALED:		return "VK_FORMAT_R16G16B16A16_USCALED";
+		case VK_FORMAT_R16G16B16A16_SSCALED:		return "VK_FORMAT_R16G16B16A16_SSCALED";
+		case VK_FORMAT_R16G16B16A16_UINT:			return "VK_FORMAT_R16G16B16A16_UINT";
+		case VK_FORMAT_R16G16B16A16_SINT:			return "VK_FORMAT_R16G16B16A16_SINT";
+		case VK_FORMAT_R16G16B16A16_SFLOAT:			return "VK_FORMAT_R16G16B16A16_SFLOAT";
+		case VK_FORMAT_R32_UINT:					return "VK_FORMAT_R32_UINT";
+		case VK_FORMAT_R32_SINT:					return "VK_FORMAT_R32_SINT";
+		case VK_FORMAT_R32_SFLOAT:					return "VK_FORMAT_R32_SFLOAT";
+		case VK_FORMAT_R32G32_UINT:					return "VK_FORMAT_R32G32_UINT";
+		case VK_FORMAT_R32G32_SINT:					return "VK_FORMAT_R32G32_SINT";
+		case VK_FORMAT_R32G32_SFLOAT:				return "VK_FORMAT_R32G32_SFLOAT";
+		case VK_FORMAT_R32G32B32_UINT:				return "VK_FORMAT_R32G32B32_UINT";
+		case VK_FORMAT_R32G32B32_SINT:				return "VK_FORMAT_R32G32B32_SINT";
+		case VK_FORMAT_R32G32B32_SFLOAT:			return "VK_FORMAT_R32G32B32_SFLOAT";
+		case VK_FORMAT_R32G32B32A32_UINT:			return "VK_FORMAT_R32G32B32A32_UINT";
+		case VK_FORMAT_R32G32B32A32_SINT:			return "VK_FORMAT_R32G32B32A32_SINT";
+		case VK_FORMAT_R32G32B32A32_SFLOAT:			return "VK_FORMAT_R32G32B32A32_SFLOAT";
+		case VK_FORMAT_R64_UINT:					return "VK_FORMAT_R64_UINT";
+		case VK_FORMAT_R64_SINT:					return "VK_FORMAT_R64_SINT";
+		case VK_FORMAT_R64_SFLOAT:					return "VK_FORMAT_R64_SFLOAT";
+		case VK_FORMAT_R64G64_UINT:					return "VK_FORMAT_R64G64_UINT";
+		case VK_FORMAT_R64G64_SINT:					return "VK_FORMAT_R64G64_SINT";
+		case VK_FORMAT_R64G64_SFLOAT:				return "VK_FORMAT_R64G64_SFLOAT";
+		case VK_FORMAT_R64G64B64_UINT:				return "VK_FORMAT_R64G64B64_UINT";
+		case VK_FORMAT_R64G64B64_SINT:				return "VK_FORMAT_R64G64B64_SINT";
+		case VK_FORMAT_R64G64B64_SFLOAT:			return "VK_FORMAT_R64G64B64_SFLOAT";
+		case VK_FORMAT_R64G64B64A64_UINT:			return "VK_FORMAT_R64G64B64A64_UINT";
+		case VK_FORMAT_R64G64B64A64_SINT:			return "VK_FORMAT_R64G64B64A64_SINT";
+		case VK_FORMAT_R64G64B64A64_SFLOAT:			return "VK_FORMAT_R64G64B64A64_SFLOAT";
+		case VK_FORMAT_B10G11R11_UFLOAT_PACK32:		return "VK_FORMAT_B10G11R11_UFLOAT_PACK32";
+		case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:		return "VK_FORMAT_E5B9G9R9_UFLOAT_PACK32";
+		case VK_FORMAT_D16_UNORM:					return "VK_FORMAT_D16_UNORM";
+		case VK_FORMAT_X8_D24_UNORM_PACK32:			return "VK_FORMAT_X8_D24_UNORM_PACK32";
+		case VK_FORMAT_D32_SFLOAT:					return "VK_FORMAT_D32_SFLOAT";
+		case VK_FORMAT_S8_UINT:						return "VK_FORMAT_S8_UINT";
+		case VK_FORMAT_D16_UNORM_S8_UINT:			return "VK_FORMAT_D16_UNORM_S8_UINT";
+		case VK_FORMAT_D24_UNORM_S8_UINT:			return "VK_FORMAT_D24_UNORM_S8_UINT";
+		case VK_FORMAT_D32_SFLOAT_S8_UINT:			return "VK_FORMAT_D32_SFLOAT_S8_UINT";
+		case VK_FORMAT_BC1_RGB_UNORM_BLOCK:			return "VK_FORMAT_BC1_RGB_UNORM_BLOCK";
+		case VK_FORMAT_BC1_RGB_SRGB_BLOCK:			return "VK_FORMAT_BC1_RGB_SRGB_BLOCK";
+		case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:		return "VK_FORMAT_BC1_RGBA_UNORM_BLOCK";
+		case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:			return "VK_FORMAT_BC1_RGBA_SRGB_BLOCK";
+		case VK_FORMAT_BC2_UNORM_BLOCK:				return "VK_FORMAT_BC2_UNORM_BLOCK";
+		case VK_FORMAT_BC2_SRGB_BLOCK:				return "VK_FORMAT_BC2_SRGB_BLOCK";
+		case VK_FORMAT_BC3_UNORM_BLOCK:				return "VK_FORMAT_BC3_UNORM_BLOCK";
+		case VK_FORMAT_BC3_SRGB_BLOCK:				return "VK_FORMAT_BC3_SRGB_BLOCK";
+		case VK_FORMAT_BC4_UNORM_BLOCK:				return "VK_FORMAT_BC4_UNORM_BLOCK";
+		case VK_FORMAT_BC4_SNORM_BLOCK:				return "VK_FORMAT_BC4_SNORM_BLOCK";
+		case VK_FORMAT_BC5_UNORM_BLOCK:				return "VK_FORMAT_BC5_UNORM_BLOCK";
+		case VK_FORMAT_BC5_SNORM_BLOCK:				return "VK_FORMAT_BC5_SNORM_BLOCK";
+		case VK_FORMAT_BC6H_UFLOAT_BLOCK:			return "VK_FORMAT_BC6H_UFLOAT_BLOCK";
+		case VK_FORMAT_BC6H_SFLOAT_BLOCK:			return "VK_FORMAT_BC6H_SFLOAT_BLOCK";
+		case VK_FORMAT_BC7_UNORM_BLOCK:				return "VK_FORMAT_BC7_UNORM_BLOCK";
+		case VK_FORMAT_BC7_SRGB_BLOCK:				return "VK_FORMAT_BC7_SRGB_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:		return "VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:		return "VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:	return "VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:	return "VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:	return "VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK";
+		case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:	return "VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK";
+		case VK_FORMAT_EAC_R11_UNORM_BLOCK:			return "VK_FORMAT_EAC_R11_UNORM_BLOCK";
+		case VK_FORMAT_EAC_R11_SNORM_BLOCK:			return "VK_FORMAT_EAC_R11_SNORM_BLOCK";
+		case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:		return "VK_FORMAT_EAC_R11G11_UNORM_BLOCK";
+		case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:		return "VK_FORMAT_EAC_R11G11_SNORM_BLOCK";
+		case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:		return "VK_FORMAT_ASTC_4x4_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:			return "VK_FORMAT_ASTC_4x4_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:		return "VK_FORMAT_ASTC_5x4_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:			return "VK_FORMAT_ASTC_5x4_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:		return "VK_FORMAT_ASTC_5x5_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:			return "VK_FORMAT_ASTC_5x5_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:		return "VK_FORMAT_ASTC_6x5_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:			return "VK_FORMAT_ASTC_6x5_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:		return "VK_FORMAT_ASTC_6x6_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:			return "VK_FORMAT_ASTC_6x6_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:		return "VK_FORMAT_ASTC_8x5_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:			return "VK_FORMAT_ASTC_8x5_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:		return "VK_FORMAT_ASTC_8x6_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:			return "VK_FORMAT_ASTC_8x6_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:		return "VK_FORMAT_ASTC_8x8_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:			return "VK_FORMAT_ASTC_8x8_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:		return "VK_FORMAT_ASTC_10x5_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:		return "VK_FORMAT_ASTC_10x5_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:		return "VK_FORMAT_ASTC_10x6_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:		return "VK_FORMAT_ASTC_10x6_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:		return "VK_FORMAT_ASTC_10x8_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:		return "VK_FORMAT_ASTC_10x8_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:		return "VK_FORMAT_ASTC_10x10_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:		return "VK_FORMAT_ASTC_10x10_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:		return "VK_FORMAT_ASTC_12x10_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:		return "VK_FORMAT_ASTC_12x10_SRGB_BLOCK";
+		case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:		return "VK_FORMAT_ASTC_12x12_UNORM_BLOCK";
+		case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:		return "VK_FORMAT_ASTC_12x12_SRGB_BLOCK";
+		default:									return DE_NULL;
+	}
+}
+
+const char* getImageTypeName (VkImageType value)
+{
+	switch (value)
+	{
+		case VK_IMAGE_TYPE_1D:	return "VK_IMAGE_TYPE_1D";
+		case VK_IMAGE_TYPE_2D:	return "VK_IMAGE_TYPE_2D";
+		case VK_IMAGE_TYPE_3D:	return "VK_IMAGE_TYPE_3D";
+		default:				return DE_NULL;
+	}
+}
+
+const char* getImageTilingName (VkImageTiling value)
+{
+	switch (value)
+	{
+		case VK_IMAGE_TILING_OPTIMAL:	return "VK_IMAGE_TILING_OPTIMAL";
+		case VK_IMAGE_TILING_LINEAR:	return "VK_IMAGE_TILING_LINEAR";
+		default:						return DE_NULL;
+	}
+}
+
+const char* getPhysicalDeviceTypeName (VkPhysicalDeviceType value)
+{
+	switch (value)
+	{
+		case VK_PHYSICAL_DEVICE_TYPE_OTHER:				return "VK_PHYSICAL_DEVICE_TYPE_OTHER";
+		case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:	return "VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU";
+		case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:		return "VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU";
+		case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:		return "VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU";
+		case VK_PHYSICAL_DEVICE_TYPE_CPU:				return "VK_PHYSICAL_DEVICE_TYPE_CPU";
+		default:										return DE_NULL;
+	}
+}
+
+const char* getQueryTypeName (VkQueryType value)
+{
+	switch (value)
+	{
+		case VK_QUERY_TYPE_OCCLUSION:			return "VK_QUERY_TYPE_OCCLUSION";
+		case VK_QUERY_TYPE_PIPELINE_STATISTICS:	return "VK_QUERY_TYPE_PIPELINE_STATISTICS";
+		case VK_QUERY_TYPE_TIMESTAMP:			return "VK_QUERY_TYPE_TIMESTAMP";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getSharingModeName (VkSharingMode value)
+{
+	switch (value)
+	{
+		case VK_SHARING_MODE_EXCLUSIVE:		return "VK_SHARING_MODE_EXCLUSIVE";
+		case VK_SHARING_MODE_CONCURRENT:	return "VK_SHARING_MODE_CONCURRENT";
+		default:							return DE_NULL;
+	}
+}
+
+const char* getImageLayoutName (VkImageLayout value)
+{
+	switch (value)
+	{
+		case VK_IMAGE_LAYOUT_UNDEFINED:							return "VK_IMAGE_LAYOUT_UNDEFINED";
+		case VK_IMAGE_LAYOUT_GENERAL:							return "VK_IMAGE_LAYOUT_GENERAL";
+		case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:			return "VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL";
+		case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:	return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
+		case VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:	return "VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL";
+		case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:			return "VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL";
+		case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:				return "VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL";
+		case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:				return "VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL";
+		case VK_IMAGE_LAYOUT_PREINITIALIZED:					return "VK_IMAGE_LAYOUT_PREINITIALIZED";
+		case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:					return "VK_IMAGE_LAYOUT_PRESENT_SRC_KHR";
+		default:												return DE_NULL;
+	}
+}
+
+const char* getImageViewTypeName (VkImageViewType value)
+{
+	switch (value)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:			return "VK_IMAGE_VIEW_TYPE_1D";
+		case VK_IMAGE_VIEW_TYPE_2D:			return "VK_IMAGE_VIEW_TYPE_2D";
+		case VK_IMAGE_VIEW_TYPE_3D:			return "VK_IMAGE_VIEW_TYPE_3D";
+		case VK_IMAGE_VIEW_TYPE_CUBE:		return "VK_IMAGE_VIEW_TYPE_CUBE";
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:	return "VK_IMAGE_VIEW_TYPE_1D_ARRAY";
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:	return "VK_IMAGE_VIEW_TYPE_2D_ARRAY";
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:	return "VK_IMAGE_VIEW_TYPE_CUBE_ARRAY";
+		default:							return DE_NULL;
+	}
+}
+
+const char* getComponentSwizzleName (VkComponentSwizzle value)
+{
+	switch (value)
+	{
+		case VK_COMPONENT_SWIZZLE_IDENTITY:	return "VK_COMPONENT_SWIZZLE_IDENTITY";
+		case VK_COMPONENT_SWIZZLE_ZERO:		return "VK_COMPONENT_SWIZZLE_ZERO";
+		case VK_COMPONENT_SWIZZLE_ONE:		return "VK_COMPONENT_SWIZZLE_ONE";
+		case VK_COMPONENT_SWIZZLE_R:		return "VK_COMPONENT_SWIZZLE_R";
+		case VK_COMPONENT_SWIZZLE_G:		return "VK_COMPONENT_SWIZZLE_G";
+		case VK_COMPONENT_SWIZZLE_B:		return "VK_COMPONENT_SWIZZLE_B";
+		case VK_COMPONENT_SWIZZLE_A:		return "VK_COMPONENT_SWIZZLE_A";
+		default:							return DE_NULL;
+	}
+}
+
+const char* getVertexInputRateName (VkVertexInputRate value)
+{
+	switch (value)
+	{
+		case VK_VERTEX_INPUT_RATE_VERTEX:	return "VK_VERTEX_INPUT_RATE_VERTEX";
+		case VK_VERTEX_INPUT_RATE_INSTANCE:	return "VK_VERTEX_INPUT_RATE_INSTANCE";
+		default:							return DE_NULL;
+	}
+}
+
+const char* getPrimitiveTopologyName (VkPrimitiveTopology value)
+{
+	switch (value)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:						return "VK_PRIMITIVE_TOPOLOGY_POINT_LIST";
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:						return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST";
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:						return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP";
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:					return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST";
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:					return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP";
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:					return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN";
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:		return "VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY";
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:		return "VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY";
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:	return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY";
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:	return "VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY";
+		case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:						return "VK_PRIMITIVE_TOPOLOGY_PATCH_LIST";
+		default:													return DE_NULL;
+	}
+}
+
+const char* getPolygonModeName (VkPolygonMode value)
+{
+	switch (value)
+	{
+		case VK_POLYGON_MODE_FILL:	return "VK_POLYGON_MODE_FILL";
+		case VK_POLYGON_MODE_LINE:	return "VK_POLYGON_MODE_LINE";
+		case VK_POLYGON_MODE_POINT:	return "VK_POLYGON_MODE_POINT";
+		default:					return DE_NULL;
+	}
+}
+
+const char* getFrontFaceName (VkFrontFace value)
+{
+	switch (value)
+	{
+		case VK_FRONT_FACE_COUNTER_CLOCKWISE:	return "VK_FRONT_FACE_COUNTER_CLOCKWISE";
+		case VK_FRONT_FACE_CLOCKWISE:			return "VK_FRONT_FACE_CLOCKWISE";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getCompareOpName (VkCompareOp value)
+{
+	switch (value)
+	{
+		case VK_COMPARE_OP_NEVER:				return "VK_COMPARE_OP_NEVER";
+		case VK_COMPARE_OP_LESS:				return "VK_COMPARE_OP_LESS";
+		case VK_COMPARE_OP_EQUAL:				return "VK_COMPARE_OP_EQUAL";
+		case VK_COMPARE_OP_LESS_OR_EQUAL:		return "VK_COMPARE_OP_LESS_OR_EQUAL";
+		case VK_COMPARE_OP_GREATER:				return "VK_COMPARE_OP_GREATER";
+		case VK_COMPARE_OP_NOT_EQUAL:			return "VK_COMPARE_OP_NOT_EQUAL";
+		case VK_COMPARE_OP_GREATER_OR_EQUAL:	return "VK_COMPARE_OP_GREATER_OR_EQUAL";
+		case VK_COMPARE_OP_ALWAYS:				return "VK_COMPARE_OP_ALWAYS";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getStencilOpName (VkStencilOp value)
+{
+	switch (value)
+	{
+		case VK_STENCIL_OP_KEEP:				return "VK_STENCIL_OP_KEEP";
+		case VK_STENCIL_OP_ZERO:				return "VK_STENCIL_OP_ZERO";
+		case VK_STENCIL_OP_REPLACE:				return "VK_STENCIL_OP_REPLACE";
+		case VK_STENCIL_OP_INCREMENT_AND_CLAMP:	return "VK_STENCIL_OP_INCREMENT_AND_CLAMP";
+		case VK_STENCIL_OP_DECREMENT_AND_CLAMP:	return "VK_STENCIL_OP_DECREMENT_AND_CLAMP";
+		case VK_STENCIL_OP_INVERT:				return "VK_STENCIL_OP_INVERT";
+		case VK_STENCIL_OP_INCREMENT_AND_WRAP:	return "VK_STENCIL_OP_INCREMENT_AND_WRAP";
+		case VK_STENCIL_OP_DECREMENT_AND_WRAP:	return "VK_STENCIL_OP_DECREMENT_AND_WRAP";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getLogicOpName (VkLogicOp value)
+{
+	switch (value)
+	{
+		case VK_LOGIC_OP_CLEAR:			return "VK_LOGIC_OP_CLEAR";
+		case VK_LOGIC_OP_AND:			return "VK_LOGIC_OP_AND";
+		case VK_LOGIC_OP_AND_REVERSE:	return "VK_LOGIC_OP_AND_REVERSE";
+		case VK_LOGIC_OP_COPY:			return "VK_LOGIC_OP_COPY";
+		case VK_LOGIC_OP_AND_INVERTED:	return "VK_LOGIC_OP_AND_INVERTED";
+		case VK_LOGIC_OP_NO_OP:			return "VK_LOGIC_OP_NO_OP";
+		case VK_LOGIC_OP_XOR:			return "VK_LOGIC_OP_XOR";
+		case VK_LOGIC_OP_OR:			return "VK_LOGIC_OP_OR";
+		case VK_LOGIC_OP_NOR:			return "VK_LOGIC_OP_NOR";
+		case VK_LOGIC_OP_EQUIVALENT:	return "VK_LOGIC_OP_EQUIVALENT";
+		case VK_LOGIC_OP_INVERT:		return "VK_LOGIC_OP_INVERT";
+		case VK_LOGIC_OP_OR_REVERSE:	return "VK_LOGIC_OP_OR_REVERSE";
+		case VK_LOGIC_OP_COPY_INVERTED:	return "VK_LOGIC_OP_COPY_INVERTED";
+		case VK_LOGIC_OP_OR_INVERTED:	return "VK_LOGIC_OP_OR_INVERTED";
+		case VK_LOGIC_OP_NAND:			return "VK_LOGIC_OP_NAND";
+		case VK_LOGIC_OP_SET:			return "VK_LOGIC_OP_SET";
+		default:						return DE_NULL;
+	}
+}
+
+const char* getBlendFactorName (VkBlendFactor value)
+{
+	switch (value)
+	{
+		case VK_BLEND_FACTOR_ZERO:						return "VK_BLEND_FACTOR_ZERO";
+		case VK_BLEND_FACTOR_ONE:						return "VK_BLEND_FACTOR_ONE";
+		case VK_BLEND_FACTOR_SRC_COLOR:					return "VK_BLEND_FACTOR_SRC_COLOR";
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR:		return "VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR";
+		case VK_BLEND_FACTOR_DST_COLOR:					return "VK_BLEND_FACTOR_DST_COLOR";
+		case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:		return "VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR";
+		case VK_BLEND_FACTOR_SRC_ALPHA:					return "VK_BLEND_FACTOR_SRC_ALPHA";
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:		return "VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA";
+		case VK_BLEND_FACTOR_DST_ALPHA:					return "VK_BLEND_FACTOR_DST_ALPHA";
+		case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA:		return "VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA";
+		case VK_BLEND_FACTOR_CONSTANT_COLOR:			return "VK_BLEND_FACTOR_CONSTANT_COLOR";
+		case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:	return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR";
+		case VK_BLEND_FACTOR_CONSTANT_ALPHA:			return "VK_BLEND_FACTOR_CONSTANT_ALPHA";
+		case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA:	return "VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA";
+		case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:		return "VK_BLEND_FACTOR_SRC_ALPHA_SATURATE";
+		case VK_BLEND_FACTOR_SRC1_COLOR:				return "VK_BLEND_FACTOR_SRC1_COLOR";
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:		return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR";
+		case VK_BLEND_FACTOR_SRC1_ALPHA:				return "VK_BLEND_FACTOR_SRC1_ALPHA";
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA:		return "VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA";
+		default:										return DE_NULL;
+	}
+}
+
+const char* getBlendOpName (VkBlendOp value)
+{
+	switch (value)
+	{
+		case VK_BLEND_OP_ADD:				return "VK_BLEND_OP_ADD";
+		case VK_BLEND_OP_SUBTRACT:			return "VK_BLEND_OP_SUBTRACT";
+		case VK_BLEND_OP_REVERSE_SUBTRACT:	return "VK_BLEND_OP_REVERSE_SUBTRACT";
+		case VK_BLEND_OP_MIN:				return "VK_BLEND_OP_MIN";
+		case VK_BLEND_OP_MAX:				return "VK_BLEND_OP_MAX";
+		default:							return DE_NULL;
+	}
+}
+
+const char* getDynamicStateName (VkDynamicState value)
+{
+	switch (value)
+	{
+		case VK_DYNAMIC_STATE_VIEWPORT:				return "VK_DYNAMIC_STATE_VIEWPORT";
+		case VK_DYNAMIC_STATE_SCISSOR:				return "VK_DYNAMIC_STATE_SCISSOR";
+		case VK_DYNAMIC_STATE_LINE_WIDTH:			return "VK_DYNAMIC_STATE_LINE_WIDTH";
+		case VK_DYNAMIC_STATE_DEPTH_BIAS:			return "VK_DYNAMIC_STATE_DEPTH_BIAS";
+		case VK_DYNAMIC_STATE_BLEND_CONSTANTS:		return "VK_DYNAMIC_STATE_BLEND_CONSTANTS";
+		case VK_DYNAMIC_STATE_DEPTH_BOUNDS:			return "VK_DYNAMIC_STATE_DEPTH_BOUNDS";
+		case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:	return "VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK";
+		case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:	return "VK_DYNAMIC_STATE_STENCIL_WRITE_MASK";
+		case VK_DYNAMIC_STATE_STENCIL_REFERENCE:	return "VK_DYNAMIC_STATE_STENCIL_REFERENCE";
+		default:									return DE_NULL;
+	}
+}
+
+const char* getFilterName (VkFilter value)
+{
+	switch (value)
+	{
+		case VK_FILTER_NEAREST:	return "VK_FILTER_NEAREST";
+		case VK_FILTER_LINEAR:	return "VK_FILTER_LINEAR";
+		default:				return DE_NULL;
+	}
+}
+
+const char* getSamplerMipmapModeName (VkSamplerMipmapMode value)
+{
+	switch (value)
+	{
+		case VK_SAMPLER_MIPMAP_MODE_NEAREST:	return "VK_SAMPLER_MIPMAP_MODE_NEAREST";
+		case VK_SAMPLER_MIPMAP_MODE_LINEAR:		return "VK_SAMPLER_MIPMAP_MODE_LINEAR";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getSamplerAddressModeName (VkSamplerAddressMode value)
+{
+	switch (value)
+	{
+		case VK_SAMPLER_ADDRESS_MODE_REPEAT:				return "VK_SAMPLER_ADDRESS_MODE_REPEAT";
+		case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT:		return "VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT";
+		case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE:			return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE";
+		case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER:		return "VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER";
+		case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE:	return "VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE";
+		default:											return DE_NULL;
+	}
+}
+
+const char* getBorderColorName (VkBorderColor value)
+{
+	switch (value)
+	{
+		case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:	return "VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK";
+		case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:		return "VK_BORDER_COLOR_INT_TRANSPARENT_BLACK";
+		case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:		return "VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK";
+		case VK_BORDER_COLOR_INT_OPAQUE_BLACK:			return "VK_BORDER_COLOR_INT_OPAQUE_BLACK";
+		case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:		return "VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE";
+		case VK_BORDER_COLOR_INT_OPAQUE_WHITE:			return "VK_BORDER_COLOR_INT_OPAQUE_WHITE";
+		default:										return DE_NULL;
+	}
+}
+
+const char* getDescriptorTypeName (VkDescriptorType value)
+{
+	switch (value)
+	{
+		case VK_DESCRIPTOR_TYPE_SAMPLER:				return "VK_DESCRIPTOR_TYPE_SAMPLER";
+		case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:	return "VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER";
+		case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:			return "VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE";
+		case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:			return "VK_DESCRIPTOR_TYPE_STORAGE_IMAGE";
+		case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:	return "VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER";
+		case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:	return "VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER";
+		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:			return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER";
+		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:			return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER";
+		case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:	return "VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC";
+		case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:	return "VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC";
+		case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:		return "VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT";
+		default:										return DE_NULL;
+	}
+}
+
+const char* getAttachmentLoadOpName (VkAttachmentLoadOp value)
+{
+	switch (value)
+	{
+		case VK_ATTACHMENT_LOAD_OP_LOAD:		return "VK_ATTACHMENT_LOAD_OP_LOAD";
+		case VK_ATTACHMENT_LOAD_OP_CLEAR:		return "VK_ATTACHMENT_LOAD_OP_CLEAR";
+		case VK_ATTACHMENT_LOAD_OP_DONT_CARE:	return "VK_ATTACHMENT_LOAD_OP_DONT_CARE";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getAttachmentStoreOpName (VkAttachmentStoreOp value)
+{
+	switch (value)
+	{
+		case VK_ATTACHMENT_STORE_OP_STORE:		return "VK_ATTACHMENT_STORE_OP_STORE";
+		case VK_ATTACHMENT_STORE_OP_DONT_CARE:	return "VK_ATTACHMENT_STORE_OP_DONT_CARE";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getPipelineBindPointName (VkPipelineBindPoint value)
+{
+	switch (value)
+	{
+		case VK_PIPELINE_BIND_POINT_GRAPHICS:	return "VK_PIPELINE_BIND_POINT_GRAPHICS";
+		case VK_PIPELINE_BIND_POINT_COMPUTE:	return "VK_PIPELINE_BIND_POINT_COMPUTE";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getCommandBufferLevelName (VkCommandBufferLevel value)
+{
+	switch (value)
+	{
+		case VK_COMMAND_BUFFER_LEVEL_PRIMARY:	return "VK_COMMAND_BUFFER_LEVEL_PRIMARY";
+		case VK_COMMAND_BUFFER_LEVEL_SECONDARY:	return "VK_COMMAND_BUFFER_LEVEL_SECONDARY";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getIndexTypeName (VkIndexType value)
+{
+	switch (value)
+	{
+		case VK_INDEX_TYPE_UINT16:	return "VK_INDEX_TYPE_UINT16";
+		case VK_INDEX_TYPE_UINT32:	return "VK_INDEX_TYPE_UINT32";
+		default:					return DE_NULL;
+	}
+}
+
+const char* getSubpassContentsName (VkSubpassContents value)
+{
+	switch (value)
+	{
+		case VK_SUBPASS_CONTENTS_INLINE:					return "VK_SUBPASS_CONTENTS_INLINE";
+		case VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS:	return "VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS";
+		default:											return DE_NULL;
+	}
+}
+
+const char* getColorSpaceKHRName (VkColorSpaceKHR value)
+{
+	switch (value)
+	{
+		case VK_COLORSPACE_SRGB_NONLINEAR_KHR:	return "VK_COLORSPACE_SRGB_NONLINEAR_KHR";
+		default:								return DE_NULL;
+	}
+}
+
+const char* getPresentModeKHRName (VkPresentModeKHR value)
+{
+	switch (value)
+	{
+		case VK_PRESENT_MODE_IMMEDIATE_KHR:		return "VK_PRESENT_MODE_IMMEDIATE_KHR";
+		case VK_PRESENT_MODE_MAILBOX_KHR:		return "VK_PRESENT_MODE_MAILBOX_KHR";
+		case VK_PRESENT_MODE_FIFO_KHR:			return "VK_PRESENT_MODE_FIFO_KHR";
+		case VK_PRESENT_MODE_FIFO_RELAXED_KHR:	return "VK_PRESENT_MODE_FIFO_RELAXED_KHR";
+		default:								return DE_NULL;
+	}
+}
+
+tcu::Format::Bitfield<32> getFormatFeatureFlagsStr (VkFormatFeatureFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,				"VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,				"VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,		"VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,		"VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,		"VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,	"VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,				"VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,			"VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,		"VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,	"VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_BLIT_SRC_BIT,					"VK_FORMAT_FEATURE_BLIT_SRC_BIT"),
+		tcu::Format::BitDesc(VK_FORMAT_FEATURE_BLIT_DST_BIT,					"VK_FORMAT_FEATURE_BLIT_DST_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getImageUsageFlagsStr (VkImageUsageFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_TRANSFER_SRC_BIT,				"VK_IMAGE_USAGE_TRANSFER_SRC_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_TRANSFER_DST_BIT,				"VK_IMAGE_USAGE_TRANSFER_DST_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_SAMPLED_BIT,					"VK_IMAGE_USAGE_SAMPLED_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_STORAGE_BIT,					"VK_IMAGE_USAGE_STORAGE_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,			"VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,	"VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,		"VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,			"VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getImageCreateFlagsStr (VkImageCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_IMAGE_CREATE_SPARSE_BINDING_BIT,	"VK_IMAGE_CREATE_SPARSE_BINDING_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,	"VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,	"VK_IMAGE_CREATE_SPARSE_ALIASED_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,	"VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,	"VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getSampleCountFlagsStr (VkSampleCountFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_1_BIT,		"VK_SAMPLE_COUNT_1_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_2_BIT,		"VK_SAMPLE_COUNT_2_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_4_BIT,		"VK_SAMPLE_COUNT_4_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_8_BIT,		"VK_SAMPLE_COUNT_8_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_16_BIT,	"VK_SAMPLE_COUNT_16_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_32_BIT,	"VK_SAMPLE_COUNT_32_BIT"),
+		tcu::Format::BitDesc(VK_SAMPLE_COUNT_64_BIT,	"VK_SAMPLE_COUNT_64_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getQueueFlagsStr (VkQueueFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_QUEUE_GRAPHICS_BIT,			"VK_QUEUE_GRAPHICS_BIT"),
+		tcu::Format::BitDesc(VK_QUEUE_COMPUTE_BIT,			"VK_QUEUE_COMPUTE_BIT"),
+		tcu::Format::BitDesc(VK_QUEUE_TRANSFER_BIT,			"VK_QUEUE_TRANSFER_BIT"),
+		tcu::Format::BitDesc(VK_QUEUE_SPARSE_BINDING_BIT,	"VK_QUEUE_SPARSE_BINDING_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getMemoryPropertyFlagsStr (VkMemoryPropertyFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,		"VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT"),
+		tcu::Format::BitDesc(VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,		"VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT"),
+		tcu::Format::BitDesc(VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,		"VK_MEMORY_PROPERTY_HOST_COHERENT_BIT"),
+		tcu::Format::BitDesc(VK_MEMORY_PROPERTY_HOST_CACHED_BIT,		"VK_MEMORY_PROPERTY_HOST_CACHED_BIT"),
+		tcu::Format::BitDesc(VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,	"VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getMemoryHeapFlagsStr (VkMemoryHeapFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,	"VK_MEMORY_HEAP_DEVICE_LOCAL_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getPipelineStageFlagsStr (VkPipelineStageFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,						"VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,					"VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,					"VK_PIPELINE_STAGE_VERTEX_INPUT_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,					"VK_PIPELINE_STAGE_VERTEX_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,		"VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,	"VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,					"VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,					"VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,			"VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,				"VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,			"VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,					"VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_TRANSFER_BIT,						"VK_PIPELINE_STAGE_TRANSFER_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,					"VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_HOST_BIT,							"VK_PIPELINE_STAGE_HOST_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,					"VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,					"VK_PIPELINE_STAGE_ALL_COMMANDS_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getImageAspectFlagsStr (VkImageAspectFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_IMAGE_ASPECT_COLOR_BIT,		"VK_IMAGE_ASPECT_COLOR_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_ASPECT_DEPTH_BIT,		"VK_IMAGE_ASPECT_DEPTH_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_ASPECT_STENCIL_BIT,	"VK_IMAGE_ASPECT_STENCIL_BIT"),
+		tcu::Format::BitDesc(VK_IMAGE_ASPECT_METADATA_BIT,	"VK_IMAGE_ASPECT_METADATA_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getSparseImageFormatFlagsStr (VkSparseImageFormatFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,			"VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT"),
+		tcu::Format::BitDesc(VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,		"VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT"),
+		tcu::Format::BitDesc(VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT,	"VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getSparseMemoryBindFlagsStr (VkSparseMemoryBindFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_SPARSE_MEMORY_BIND_METADATA_BIT,	"VK_SPARSE_MEMORY_BIND_METADATA_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getFenceCreateFlagsStr (VkFenceCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_FENCE_CREATE_SIGNALED_BIT,	"VK_FENCE_CREATE_SIGNALED_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getQueryPipelineStatisticFlagsStr (VkQueryPipelineStatisticFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,						"VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,						"VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,						"VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,					"VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,					"VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,							"VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,							"VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,					"VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,			"VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,	"VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT,					"VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getQueryResultFlagsStr (VkQueryResultFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_QUERY_RESULT_64_BIT,				"VK_QUERY_RESULT_64_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_RESULT_WAIT_BIT,				"VK_QUERY_RESULT_WAIT_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,	"VK_QUERY_RESULT_WITH_AVAILABILITY_BIT"),
+		tcu::Format::BitDesc(VK_QUERY_RESULT_PARTIAL_BIT,			"VK_QUERY_RESULT_PARTIAL_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getBufferCreateFlagsStr (VkBufferCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_BUFFER_CREATE_SPARSE_BINDING_BIT,	"VK_BUFFER_CREATE_SPARSE_BINDING_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,	"VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,	"VK_BUFFER_CREATE_SPARSE_ALIASED_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getBufferUsageFlagsStr (VkBufferUsageFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			"VK_BUFFER_USAGE_TRANSFER_SRC_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_TRANSFER_DST_BIT,			"VK_BUFFER_USAGE_TRANSFER_DST_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,	"VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,	"VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,		"VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,		"VK_BUFFER_USAGE_STORAGE_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_INDEX_BUFFER_BIT,			"VK_BUFFER_USAGE_INDEX_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			"VK_BUFFER_USAGE_VERTEX_BUFFER_BIT"),
+		tcu::Format::BitDesc(VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,		"VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getPipelineCreateFlagsStr (VkPipelineCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,	"VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,		"VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT"),
+		tcu::Format::BitDesc(VK_PIPELINE_CREATE_DERIVATIVE_BIT,				"VK_PIPELINE_CREATE_DERIVATIVE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getShaderStageFlagsStr (VkShaderStageFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_SHADER_STAGE_VERTEX_BIT,					"VK_SHADER_STAGE_VERTEX_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,		"VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,	"VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_GEOMETRY_BIT,					"VK_SHADER_STAGE_GEOMETRY_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_FRAGMENT_BIT,					"VK_SHADER_STAGE_FRAGMENT_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_COMPUTE_BIT,					"VK_SHADER_STAGE_COMPUTE_BIT"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_ALL_GRAPHICS,					"VK_SHADER_STAGE_ALL_GRAPHICS"),
+		tcu::Format::BitDesc(VK_SHADER_STAGE_ALL,							"VK_SHADER_STAGE_ALL"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCullModeFlagsStr (VkCullModeFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_CULL_MODE_NONE,				"VK_CULL_MODE_NONE"),
+		tcu::Format::BitDesc(VK_CULL_MODE_FRONT_BIT,		"VK_CULL_MODE_FRONT_BIT"),
+		tcu::Format::BitDesc(VK_CULL_MODE_BACK_BIT,			"VK_CULL_MODE_BACK_BIT"),
+		tcu::Format::BitDesc(VK_CULL_MODE_FRONT_AND_BACK,	"VK_CULL_MODE_FRONT_AND_BACK"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getColorComponentFlagsStr (VkColorComponentFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COLOR_COMPONENT_R_BIT,	"VK_COLOR_COMPONENT_R_BIT"),
+		tcu::Format::BitDesc(VK_COLOR_COMPONENT_G_BIT,	"VK_COLOR_COMPONENT_G_BIT"),
+		tcu::Format::BitDesc(VK_COLOR_COMPONENT_B_BIT,	"VK_COLOR_COMPONENT_B_BIT"),
+		tcu::Format::BitDesc(VK_COLOR_COMPONENT_A_BIT,	"VK_COLOR_COMPONENT_A_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getDescriptorPoolCreateFlagsStr (VkDescriptorPoolCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,	"VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getAttachmentDescriptionFlagsStr (VkAttachmentDescriptionFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT,	"VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getAccessFlagsStr (VkAccessFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_ACCESS_INDIRECT_COMMAND_READ_BIT,			"VK_ACCESS_INDIRECT_COMMAND_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_INDEX_READ_BIT,						"VK_ACCESS_INDEX_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,			"VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_UNIFORM_READ_BIT,					"VK_ACCESS_UNIFORM_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,			"VK_ACCESS_INPUT_ATTACHMENT_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_SHADER_READ_BIT,						"VK_ACCESS_SHADER_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_SHADER_WRITE_BIT,					"VK_ACCESS_SHADER_WRITE_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,			"VK_ACCESS_COLOR_ATTACHMENT_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,			"VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,	"VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,	"VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_TRANSFER_READ_BIT,					"VK_ACCESS_TRANSFER_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_TRANSFER_WRITE_BIT,					"VK_ACCESS_TRANSFER_WRITE_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_HOST_READ_BIT,						"VK_ACCESS_HOST_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_HOST_WRITE_BIT,						"VK_ACCESS_HOST_WRITE_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_MEMORY_READ_BIT,						"VK_ACCESS_MEMORY_READ_BIT"),
+		tcu::Format::BitDesc(VK_ACCESS_MEMORY_WRITE_BIT,					"VK_ACCESS_MEMORY_WRITE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getDependencyFlagsStr (VkDependencyFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_DEPENDENCY_BY_REGION_BIT,	"VK_DEPENDENCY_BY_REGION_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCommandPoolCreateFlagsStr (VkCommandPoolCreateFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,				"VK_COMMAND_POOL_CREATE_TRANSIENT_BIT"),
+		tcu::Format::BitDesc(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,	"VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCommandPoolResetFlagsStr (VkCommandPoolResetFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT,	"VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCommandBufferUsageFlagsStr (VkCommandBufferUsageFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,		"VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT"),
+		tcu::Format::BitDesc(VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,	"VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT"),
+		tcu::Format::BitDesc(VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,		"VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getQueryControlFlagsStr (VkQueryControlFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_QUERY_CONTROL_PRECISE_BIT,	"VK_QUERY_CONTROL_PRECISE_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCommandBufferResetFlagsStr (VkCommandBufferResetFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT,	"VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getStencilFaceFlagsStr (VkStencilFaceFlags value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_STENCIL_FACE_FRONT_BIT,	"VK_STENCIL_FACE_FRONT_BIT"),
+		tcu::Format::BitDesc(VK_STENCIL_FACE_BACK_BIT,	"VK_STENCIL_FACE_BACK_BIT"),
+		tcu::Format::BitDesc(VK_STENCIL_FRONT_AND_BACK,	"VK_STENCIL_FRONT_AND_BACK"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getSurfaceTransformFlagsKHRStr (VkSurfaceTransformFlagsKHR value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,						"VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,					"VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,					"VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,					"VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,			"VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,	"VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,	"VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,	"VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR"),
+		tcu::Format::BitDesc(VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR,						"VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getCompositeAlphaFlagsKHRStr (VkCompositeAlphaFlagsKHR value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,				"VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR"),
+		tcu::Format::BitDesc(VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,		"VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR"),
+		tcu::Format::BitDesc(VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,	"VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR"),
+		tcu::Format::BitDesc(VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR,			"VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getDisplayPlaneAlphaFlagsKHRStr (VkDisplayPlaneAlphaFlagsKHR value)
+{
+	static const tcu::Format::BitDesc s_desc[] =
+	{
+		tcu::Format::BitDesc(VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,						"VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR"),
+		tcu::Format::BitDesc(VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,						"VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR"),
+		tcu::Format::BitDesc(VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,					"VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR"),
+		tcu::Format::BitDesc(VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR,	"VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR"),
+	};
+	return tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));
+}
+
+tcu::Format::Bitfield<32> getInstanceCreateFlagsStr (VkInstanceCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDeviceCreateFlagsStr (VkDeviceCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDeviceQueueCreateFlagsStr (VkDeviceQueueCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getMemoryMapFlagsStr (VkMemoryMapFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getSemaphoreCreateFlagsStr (VkSemaphoreCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getEventCreateFlagsStr (VkEventCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getQueryPoolCreateFlagsStr (VkQueryPoolCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getBufferViewCreateFlagsStr (VkBufferViewCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getImageViewCreateFlagsStr (VkImageViewCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getShaderModuleCreateFlagsStr (VkShaderModuleCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineCacheCreateFlagsStr (VkPipelineCacheCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineShaderStageCreateFlagsStr (VkPipelineShaderStageCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineVertexInputStateCreateFlagsStr (VkPipelineVertexInputStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineInputAssemblyStateCreateFlagsStr (VkPipelineInputAssemblyStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineTessellationStateCreateFlagsStr (VkPipelineTessellationStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineViewportStateCreateFlagsStr (VkPipelineViewportStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineRasterizationStateCreateFlagsStr (VkPipelineRasterizationStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineMultisampleStateCreateFlagsStr (VkPipelineMultisampleStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineDepthStencilStateCreateFlagsStr (VkPipelineDepthStencilStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineColorBlendStateCreateFlagsStr (VkPipelineColorBlendStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineDynamicStateCreateFlagsStr (VkPipelineDynamicStateCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getPipelineLayoutCreateFlagsStr (VkPipelineLayoutCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getSamplerCreateFlagsStr (VkSamplerCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDescriptorSetLayoutCreateFlagsStr (VkDescriptorSetLayoutCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDescriptorPoolResetFlagsStr (VkDescriptorPoolResetFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getFramebufferCreateFlagsStr (VkFramebufferCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getRenderPassCreateFlagsStr (VkRenderPassCreateFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getSubpassDescriptionFlagsStr (VkSubpassDescriptionFlags value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getSwapchainCreateFlagsKHRStr (VkSwapchainCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDisplayModeCreateFlagsKHRStr (VkDisplayModeCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getDisplaySurfaceCreateFlagsKHRStr (VkDisplaySurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getXlibSurfaceCreateFlagsKHRStr (VkXlibSurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getXcbSurfaceCreateFlagsKHRStr (VkXcbSurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getWaylandSurfaceCreateFlagsKHRStr (VkWaylandSurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getMirSurfaceCreateFlagsKHRStr (VkMirSurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getAndroidSurfaceCreateFlagsKHRStr (VkAndroidSurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+tcu::Format::Bitfield<32> getWin32SurfaceCreateFlagsKHRStr (VkWin32SurfaceCreateFlagsKHR value)
+{
+	return tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);
+}
+
+std::ostream& operator<< (std::ostream& s, const VkApplicationInfo& value)
+{
+	s << "VkApplicationInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tpApplicationName = " << getCharPtrStr(value.pApplicationName) << '\n';
+	s << "\tapplicationVersion = " << value.applicationVersion << '\n';
+	s << "\tpEngineName = " << getCharPtrStr(value.pEngineName) << '\n';
+	s << "\tengineVersion = " << value.engineVersion << '\n';
+	s << "\tapiVersion = " << value.apiVersion << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkInstanceCreateInfo& value)
+{
+	s << "VkInstanceCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getInstanceCreateFlagsStr(value.flags) << '\n';
+	s << "\tpApplicationInfo = " << value.pApplicationInfo << '\n';
+	s << "\tenabledLayerCount = " << value.enabledLayerCount << '\n';
+	s << "\tppEnabledLayerNames = " << value.ppEnabledLayerNames << '\n';
+	s << "\tenabledExtensionCount = " << value.enabledExtensionCount << '\n';
+	s << "\tppEnabledExtensionNames = " << value.ppEnabledExtensionNames << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkAllocationCallbacks& value)
+{
+	s << "VkAllocationCallbacks = {\n";
+	s << "\tpUserData = " << value.pUserData << '\n';
+	s << "\tpfnAllocation = " << value.pfnAllocation << '\n';
+	s << "\tpfnReallocation = " << value.pfnReallocation << '\n';
+	s << "\tpfnFree = " << value.pfnFree << '\n';
+	s << "\tpfnInternalAllocation = " << value.pfnInternalAllocation << '\n';
+	s << "\tpfnInternalFree = " << value.pfnInternalFree << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPhysicalDeviceFeatures& value)
+{
+	s << "VkPhysicalDeviceFeatures = {\n";
+	s << "\trobustBufferAccess = " << value.robustBufferAccess << '\n';
+	s << "\tfullDrawIndexUint32 = " << value.fullDrawIndexUint32 << '\n';
+	s << "\timageCubeArray = " << value.imageCubeArray << '\n';
+	s << "\tindependentBlend = " << value.independentBlend << '\n';
+	s << "\tgeometryShader = " << value.geometryShader << '\n';
+	s << "\ttessellationShader = " << value.tessellationShader << '\n';
+	s << "\tsampleRateShading = " << value.sampleRateShading << '\n';
+	s << "\tdualSrcBlend = " << value.dualSrcBlend << '\n';
+	s << "\tlogicOp = " << value.logicOp << '\n';
+	s << "\tmultiDrawIndirect = " << value.multiDrawIndirect << '\n';
+	s << "\tdrawIndirectFirstInstance = " << value.drawIndirectFirstInstance << '\n';
+	s << "\tdepthClamp = " << value.depthClamp << '\n';
+	s << "\tdepthBiasClamp = " << value.depthBiasClamp << '\n';
+	s << "\tfillModeNonSolid = " << value.fillModeNonSolid << '\n';
+	s << "\tdepthBounds = " << value.depthBounds << '\n';
+	s << "\twideLines = " << value.wideLines << '\n';
+	s << "\tlargePoints = " << value.largePoints << '\n';
+	s << "\talphaToOne = " << value.alphaToOne << '\n';
+	s << "\tmultiViewport = " << value.multiViewport << '\n';
+	s << "\tsamplerAnisotropy = " << value.samplerAnisotropy << '\n';
+	s << "\ttextureCompressionETC2 = " << value.textureCompressionETC2 << '\n';
+	s << "\ttextureCompressionASTC_LDR = " << value.textureCompressionASTC_LDR << '\n';
+	s << "\ttextureCompressionBC = " << value.textureCompressionBC << '\n';
+	s << "\tocclusionQueryPrecise = " << value.occlusionQueryPrecise << '\n';
+	s << "\tpipelineStatisticsQuery = " << value.pipelineStatisticsQuery << '\n';
+	s << "\tvertexPipelineStoresAndAtomics = " << value.vertexPipelineStoresAndAtomics << '\n';
+	s << "\tfragmentStoresAndAtomics = " << value.fragmentStoresAndAtomics << '\n';
+	s << "\tshaderTessellationAndGeometryPointSize = " << value.shaderTessellationAndGeometryPointSize << '\n';
+	s << "\tshaderImageGatherExtended = " << value.shaderImageGatherExtended << '\n';
+	s << "\tshaderStorageImageExtendedFormats = " << value.shaderStorageImageExtendedFormats << '\n';
+	s << "\tshaderStorageImageMultisample = " << value.shaderStorageImageMultisample << '\n';
+	s << "\tshaderStorageImageReadWithoutFormat = " << value.shaderStorageImageReadWithoutFormat << '\n';
+	s << "\tshaderStorageImageWriteWithoutFormat = " << value.shaderStorageImageWriteWithoutFormat << '\n';
+	s << "\tshaderUniformBufferArrayDynamicIndexing = " << value.shaderUniformBufferArrayDynamicIndexing << '\n';
+	s << "\tshaderSampledImageArrayDynamicIndexing = " << value.shaderSampledImageArrayDynamicIndexing << '\n';
+	s << "\tshaderStorageBufferArrayDynamicIndexing = " << value.shaderStorageBufferArrayDynamicIndexing << '\n';
+	s << "\tshaderStorageImageArrayDynamicIndexing = " << value.shaderStorageImageArrayDynamicIndexing << '\n';
+	s << "\tshaderClipDistance = " << value.shaderClipDistance << '\n';
+	s << "\tshaderCullDistance = " << value.shaderCullDistance << '\n';
+	s << "\tshaderFloat64 = " << value.shaderFloat64 << '\n';
+	s << "\tshaderInt64 = " << value.shaderInt64 << '\n';
+	s << "\tshaderInt16 = " << value.shaderInt16 << '\n';
+	s << "\tshaderResourceResidency = " << value.shaderResourceResidency << '\n';
+	s << "\tshaderResourceMinLod = " << value.shaderResourceMinLod << '\n';
+	s << "\tsparseBinding = " << value.sparseBinding << '\n';
+	s << "\tsparseResidencyBuffer = " << value.sparseResidencyBuffer << '\n';
+	s << "\tsparseResidencyImage2D = " << value.sparseResidencyImage2D << '\n';
+	s << "\tsparseResidencyImage3D = " << value.sparseResidencyImage3D << '\n';
+	s << "\tsparseResidency2Samples = " << value.sparseResidency2Samples << '\n';
+	s << "\tsparseResidency4Samples = " << value.sparseResidency4Samples << '\n';
+	s << "\tsparseResidency8Samples = " << value.sparseResidency8Samples << '\n';
+	s << "\tsparseResidency16Samples = " << value.sparseResidency16Samples << '\n';
+	s << "\tsparseResidencyAliased = " << value.sparseResidencyAliased << '\n';
+	s << "\tvariableMultisampleRate = " << value.variableMultisampleRate << '\n';
+	s << "\tinheritedQueries = " << value.inheritedQueries << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkFormatProperties& value)
+{
+	s << "VkFormatProperties = {\n";
+	s << "\tlinearTilingFeatures = " << getFormatFeatureFlagsStr(value.linearTilingFeatures) << '\n';
+	s << "\toptimalTilingFeatures = " << getFormatFeatureFlagsStr(value.optimalTilingFeatures) << '\n';
+	s << "\tbufferFeatures = " << getFormatFeatureFlagsStr(value.bufferFeatures) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkExtent3D& value)
+{
+	s << "VkExtent3D = {\n";
+	s << "\twidth = " << value.width << '\n';
+	s << "\theight = " << value.height << '\n';
+	s << "\tdepth = " << value.depth << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageFormatProperties& value)
+{
+	s << "VkImageFormatProperties = {\n";
+	s << "\tmaxExtent = " << value.maxExtent << '\n';
+	s << "\tmaxMipLevels = " << value.maxMipLevels << '\n';
+	s << "\tmaxArrayLayers = " << value.maxArrayLayers << '\n';
+	s << "\tsampleCounts = " << getSampleCountFlagsStr(value.sampleCounts) << '\n';
+	s << "\tmaxResourceSize = " << value.maxResourceSize << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPhysicalDeviceLimits& value)
+{
+	s << "VkPhysicalDeviceLimits = {\n";
+	s << "\tmaxImageDimension1D = " << value.maxImageDimension1D << '\n';
+	s << "\tmaxImageDimension2D = " << value.maxImageDimension2D << '\n';
+	s << "\tmaxImageDimension3D = " << value.maxImageDimension3D << '\n';
+	s << "\tmaxImageDimensionCube = " << value.maxImageDimensionCube << '\n';
+	s << "\tmaxImageArrayLayers = " << value.maxImageArrayLayers << '\n';
+	s << "\tmaxTexelBufferElements = " << value.maxTexelBufferElements << '\n';
+	s << "\tmaxUniformBufferRange = " << value.maxUniformBufferRange << '\n';
+	s << "\tmaxStorageBufferRange = " << value.maxStorageBufferRange << '\n';
+	s << "\tmaxPushConstantsSize = " << value.maxPushConstantsSize << '\n';
+	s << "\tmaxMemoryAllocationCount = " << value.maxMemoryAllocationCount << '\n';
+	s << "\tmaxSamplerAllocationCount = " << value.maxSamplerAllocationCount << '\n';
+	s << "\tbufferImageGranularity = " << value.bufferImageGranularity << '\n';
+	s << "\tsparseAddressSpaceSize = " << value.sparseAddressSpaceSize << '\n';
+	s << "\tmaxBoundDescriptorSets = " << value.maxBoundDescriptorSets << '\n';
+	s << "\tmaxPerStageDescriptorSamplers = " << value.maxPerStageDescriptorSamplers << '\n';
+	s << "\tmaxPerStageDescriptorUniformBuffers = " << value.maxPerStageDescriptorUniformBuffers << '\n';
+	s << "\tmaxPerStageDescriptorStorageBuffers = " << value.maxPerStageDescriptorStorageBuffers << '\n';
+	s << "\tmaxPerStageDescriptorSampledImages = " << value.maxPerStageDescriptorSampledImages << '\n';
+	s << "\tmaxPerStageDescriptorStorageImages = " << value.maxPerStageDescriptorStorageImages << '\n';
+	s << "\tmaxPerStageDescriptorInputAttachments = " << value.maxPerStageDescriptorInputAttachments << '\n';
+	s << "\tmaxPerStageResources = " << value.maxPerStageResources << '\n';
+	s << "\tmaxDescriptorSetSamplers = " << value.maxDescriptorSetSamplers << '\n';
+	s << "\tmaxDescriptorSetUniformBuffers = " << value.maxDescriptorSetUniformBuffers << '\n';
+	s << "\tmaxDescriptorSetUniformBuffersDynamic = " << value.maxDescriptorSetUniformBuffersDynamic << '\n';
+	s << "\tmaxDescriptorSetStorageBuffers = " << value.maxDescriptorSetStorageBuffers << '\n';
+	s << "\tmaxDescriptorSetStorageBuffersDynamic = " << value.maxDescriptorSetStorageBuffersDynamic << '\n';
+	s << "\tmaxDescriptorSetSampledImages = " << value.maxDescriptorSetSampledImages << '\n';
+	s << "\tmaxDescriptorSetStorageImages = " << value.maxDescriptorSetStorageImages << '\n';
+	s << "\tmaxDescriptorSetInputAttachments = " << value.maxDescriptorSetInputAttachments << '\n';
+	s << "\tmaxVertexInputAttributes = " << value.maxVertexInputAttributes << '\n';
+	s << "\tmaxVertexInputBindings = " << value.maxVertexInputBindings << '\n';
+	s << "\tmaxVertexInputAttributeOffset = " << value.maxVertexInputAttributeOffset << '\n';
+	s << "\tmaxVertexInputBindingStride = " << value.maxVertexInputBindingStride << '\n';
+	s << "\tmaxVertexOutputComponents = " << value.maxVertexOutputComponents << '\n';
+	s << "\tmaxTessellationGenerationLevel = " << value.maxTessellationGenerationLevel << '\n';
+	s << "\tmaxTessellationPatchSize = " << value.maxTessellationPatchSize << '\n';
+	s << "\tmaxTessellationControlPerVertexInputComponents = " << value.maxTessellationControlPerVertexInputComponents << '\n';
+	s << "\tmaxTessellationControlPerVertexOutputComponents = " << value.maxTessellationControlPerVertexOutputComponents << '\n';
+	s << "\tmaxTessellationControlPerPatchOutputComponents = " << value.maxTessellationControlPerPatchOutputComponents << '\n';
+	s << "\tmaxTessellationControlTotalOutputComponents = " << value.maxTessellationControlTotalOutputComponents << '\n';
+	s << "\tmaxTessellationEvaluationInputComponents = " << value.maxTessellationEvaluationInputComponents << '\n';
+	s << "\tmaxTessellationEvaluationOutputComponents = " << value.maxTessellationEvaluationOutputComponents << '\n';
+	s << "\tmaxGeometryShaderInvocations = " << value.maxGeometryShaderInvocations << '\n';
+	s << "\tmaxGeometryInputComponents = " << value.maxGeometryInputComponents << '\n';
+	s << "\tmaxGeometryOutputComponents = " << value.maxGeometryOutputComponents << '\n';
+	s << "\tmaxGeometryOutputVertices = " << value.maxGeometryOutputVertices << '\n';
+	s << "\tmaxGeometryTotalOutputComponents = " << value.maxGeometryTotalOutputComponents << '\n';
+	s << "\tmaxFragmentInputComponents = " << value.maxFragmentInputComponents << '\n';
+	s << "\tmaxFragmentOutputAttachments = " << value.maxFragmentOutputAttachments << '\n';
+	s << "\tmaxFragmentDualSrcAttachments = " << value.maxFragmentDualSrcAttachments << '\n';
+	s << "\tmaxFragmentCombinedOutputResources = " << value.maxFragmentCombinedOutputResources << '\n';
+	s << "\tmaxComputeSharedMemorySize = " << value.maxComputeSharedMemorySize << '\n';
+	s << "\tmaxComputeWorkGroupCount = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.maxComputeWorkGroupCount), DE_ARRAY_END(value.maxComputeWorkGroupCount)) << '\n';
+	s << "\tmaxComputeWorkGroupInvocations = " << value.maxComputeWorkGroupInvocations << '\n';
+	s << "\tmaxComputeWorkGroupSize = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.maxComputeWorkGroupSize), DE_ARRAY_END(value.maxComputeWorkGroupSize)) << '\n';
+	s << "\tsubPixelPrecisionBits = " << value.subPixelPrecisionBits << '\n';
+	s << "\tsubTexelPrecisionBits = " << value.subTexelPrecisionBits << '\n';
+	s << "\tmipmapPrecisionBits = " << value.mipmapPrecisionBits << '\n';
+	s << "\tmaxDrawIndexedIndexValue = " << value.maxDrawIndexedIndexValue << '\n';
+	s << "\tmaxDrawIndirectCount = " << value.maxDrawIndirectCount << '\n';
+	s << "\tmaxSamplerLodBias = " << value.maxSamplerLodBias << '\n';
+	s << "\tmaxSamplerAnisotropy = " << value.maxSamplerAnisotropy << '\n';
+	s << "\tmaxViewports = " << value.maxViewports << '\n';
+	s << "\tmaxViewportDimensions = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.maxViewportDimensions), DE_ARRAY_END(value.maxViewportDimensions)) << '\n';
+	s << "\tviewportBoundsRange = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.viewportBoundsRange), DE_ARRAY_END(value.viewportBoundsRange)) << '\n';
+	s << "\tviewportSubPixelBits = " << value.viewportSubPixelBits << '\n';
+	s << "\tminMemoryMapAlignment = " << value.minMemoryMapAlignment << '\n';
+	s << "\tminTexelBufferOffsetAlignment = " << value.minTexelBufferOffsetAlignment << '\n';
+	s << "\tminUniformBufferOffsetAlignment = " << value.minUniformBufferOffsetAlignment << '\n';
+	s << "\tminStorageBufferOffsetAlignment = " << value.minStorageBufferOffsetAlignment << '\n';
+	s << "\tminTexelOffset = " << value.minTexelOffset << '\n';
+	s << "\tmaxTexelOffset = " << value.maxTexelOffset << '\n';
+	s << "\tminTexelGatherOffset = " << value.minTexelGatherOffset << '\n';
+	s << "\tmaxTexelGatherOffset = " << value.maxTexelGatherOffset << '\n';
+	s << "\tminInterpolationOffset = " << value.minInterpolationOffset << '\n';
+	s << "\tmaxInterpolationOffset = " << value.maxInterpolationOffset << '\n';
+	s << "\tsubPixelInterpolationOffsetBits = " << value.subPixelInterpolationOffsetBits << '\n';
+	s << "\tmaxFramebufferWidth = " << value.maxFramebufferWidth << '\n';
+	s << "\tmaxFramebufferHeight = " << value.maxFramebufferHeight << '\n';
+	s << "\tmaxFramebufferLayers = " << value.maxFramebufferLayers << '\n';
+	s << "\tframebufferColorSampleCounts = " << getSampleCountFlagsStr(value.framebufferColorSampleCounts) << '\n';
+	s << "\tframebufferDepthSampleCounts = " << getSampleCountFlagsStr(value.framebufferDepthSampleCounts) << '\n';
+	s << "\tframebufferStencilSampleCounts = " << getSampleCountFlagsStr(value.framebufferStencilSampleCounts) << '\n';
+	s << "\tframebufferNoAttachmentsSampleCounts = " << getSampleCountFlagsStr(value.framebufferNoAttachmentsSampleCounts) << '\n';
+	s << "\tmaxColorAttachments = " << value.maxColorAttachments << '\n';
+	s << "\tsampledImageColorSampleCounts = " << getSampleCountFlagsStr(value.sampledImageColorSampleCounts) << '\n';
+	s << "\tsampledImageIntegerSampleCounts = " << getSampleCountFlagsStr(value.sampledImageIntegerSampleCounts) << '\n';
+	s << "\tsampledImageDepthSampleCounts = " << getSampleCountFlagsStr(value.sampledImageDepthSampleCounts) << '\n';
+	s << "\tsampledImageStencilSampleCounts = " << getSampleCountFlagsStr(value.sampledImageStencilSampleCounts) << '\n';
+	s << "\tstorageImageSampleCounts = " << getSampleCountFlagsStr(value.storageImageSampleCounts) << '\n';
+	s << "\tmaxSampleMaskWords = " << value.maxSampleMaskWords << '\n';
+	s << "\ttimestampComputeAndGraphics = " << value.timestampComputeAndGraphics << '\n';
+	s << "\ttimestampPeriod = " << value.timestampPeriod << '\n';
+	s << "\tmaxClipDistances = " << value.maxClipDistances << '\n';
+	s << "\tmaxCullDistances = " << value.maxCullDistances << '\n';
+	s << "\tmaxCombinedClipAndCullDistances = " << value.maxCombinedClipAndCullDistances << '\n';
+	s << "\tdiscreteQueuePriorities = " << value.discreteQueuePriorities << '\n';
+	s << "\tpointSizeRange = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.pointSizeRange), DE_ARRAY_END(value.pointSizeRange)) << '\n';
+	s << "\tlineWidthRange = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.lineWidthRange), DE_ARRAY_END(value.lineWidthRange)) << '\n';
+	s << "\tpointSizeGranularity = " << value.pointSizeGranularity << '\n';
+	s << "\tlineWidthGranularity = " << value.lineWidthGranularity << '\n';
+	s << "\tstrictLines = " << value.strictLines << '\n';
+	s << "\tstandardSampleLocations = " << value.standardSampleLocations << '\n';
+	s << "\toptimalBufferCopyOffsetAlignment = " << value.optimalBufferCopyOffsetAlignment << '\n';
+	s << "\toptimalBufferCopyRowPitchAlignment = " << value.optimalBufferCopyRowPitchAlignment << '\n';
+	s << "\tnonCoherentAtomSize = " << value.nonCoherentAtomSize << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPhysicalDeviceSparseProperties& value)
+{
+	s << "VkPhysicalDeviceSparseProperties = {\n";
+	s << "\tresidencyStandard2DBlockShape = " << value.residencyStandard2DBlockShape << '\n';
+	s << "\tresidencyStandard2DMultisampleBlockShape = " << value.residencyStandard2DMultisampleBlockShape << '\n';
+	s << "\tresidencyStandard3DBlockShape = " << value.residencyStandard3DBlockShape << '\n';
+	s << "\tresidencyAlignedMipSize = " << value.residencyAlignedMipSize << '\n';
+	s << "\tresidencyNonResidentStrict = " << value.residencyNonResidentStrict << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPhysicalDeviceProperties& value)
+{
+	s << "VkPhysicalDeviceProperties = {\n";
+	s << "\tapiVersion = " << value.apiVersion << '\n';
+	s << "\tdriverVersion = " << value.driverVersion << '\n';
+	s << "\tvendorID = " << value.vendorID << '\n';
+	s << "\tdeviceID = " << value.deviceID << '\n';
+	s << "\tdeviceType = " << value.deviceType << '\n';
+	s << "\tdeviceName = " << (const char*)value.deviceName << '\n';
+	s << "\tpipelineCacheUUID = " << '\n' << tcu::formatArray(tcu::Format::HexIterator<deUint8>(DE_ARRAY_BEGIN(value.pipelineCacheUUID)), tcu::Format::HexIterator<deUint8>(DE_ARRAY_END(value.pipelineCacheUUID))) << '\n';
+	s << "\tlimits = " << value.limits << '\n';
+	s << "\tsparseProperties = " << value.sparseProperties << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkQueueFamilyProperties& value)
+{
+	s << "VkQueueFamilyProperties = {\n";
+	s << "\tqueueFlags = " << getQueueFlagsStr(value.queueFlags) << '\n';
+	s << "\tqueueCount = " << value.queueCount << '\n';
+	s << "\ttimestampValidBits = " << value.timestampValidBits << '\n';
+	s << "\tminImageTransferGranularity = " << value.minImageTransferGranularity << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMemoryType& value)
+{
+	s << "VkMemoryType = {\n";
+	s << "\tpropertyFlags = " << getMemoryPropertyFlagsStr(value.propertyFlags) << '\n';
+	s << "\theapIndex = " << value.heapIndex << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMemoryHeap& value)
+{
+	s << "VkMemoryHeap = {\n";
+	s << "\tsize = " << value.size << '\n';
+	s << "\tflags = " << getMemoryHeapFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPhysicalDeviceMemoryProperties& value)
+{
+	s << "VkPhysicalDeviceMemoryProperties = {\n";
+	s << "\tmemoryTypeCount = " << value.memoryTypeCount << '\n';
+	s << "\tmemoryTypes = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.memoryTypes), DE_ARRAY_END(value.memoryTypes)) << '\n';
+	s << "\tmemoryHeapCount = " << value.memoryHeapCount << '\n';
+	s << "\tmemoryHeaps = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.memoryHeaps), DE_ARRAY_END(value.memoryHeaps)) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDeviceQueueCreateInfo& value)
+{
+	s << "VkDeviceQueueCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getDeviceQueueCreateFlagsStr(value.flags) << '\n';
+	s << "\tqueueFamilyIndex = " << value.queueFamilyIndex << '\n';
+	s << "\tqueueCount = " << value.queueCount << '\n';
+	s << "\tpQueuePriorities = " << value.pQueuePriorities << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDeviceCreateInfo& value)
+{
+	s << "VkDeviceCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getDeviceCreateFlagsStr(value.flags) << '\n';
+	s << "\tqueueCreateInfoCount = " << value.queueCreateInfoCount << '\n';
+	s << "\tpQueueCreateInfos = " << value.pQueueCreateInfos << '\n';
+	s << "\tenabledLayerCount = " << value.enabledLayerCount << '\n';
+	s << "\tppEnabledLayerNames = " << value.ppEnabledLayerNames << '\n';
+	s << "\tenabledExtensionCount = " << value.enabledExtensionCount << '\n';
+	s << "\tppEnabledExtensionNames = " << value.ppEnabledExtensionNames << '\n';
+	s << "\tpEnabledFeatures = " << value.pEnabledFeatures << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkExtensionProperties& value)
+{
+	s << "VkExtensionProperties = {\n";
+	s << "\textensionName = " << (const char*)value.extensionName << '\n';
+	s << "\tspecVersion = " << value.specVersion << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkLayerProperties& value)
+{
+	s << "VkLayerProperties = {\n";
+	s << "\tlayerName = " << (const char*)value.layerName << '\n';
+	s << "\tspecVersion = " << value.specVersion << '\n';
+	s << "\timplementationVersion = " << value.implementationVersion << '\n';
+	s << "\tdescription = " << (const char*)value.description << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSubmitInfo& value)
+{
+	s << "VkSubmitInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\twaitSemaphoreCount = " << value.waitSemaphoreCount << '\n';
+	s << "\tpWaitSemaphores = " << value.pWaitSemaphores << '\n';
+	s << "\tpWaitDstStageMask = " << value.pWaitDstStageMask << '\n';
+	s << "\tcommandBufferCount = " << value.commandBufferCount << '\n';
+	s << "\tpCommandBuffers = " << value.pCommandBuffers << '\n';
+	s << "\tsignalSemaphoreCount = " << value.signalSemaphoreCount << '\n';
+	s << "\tpSignalSemaphores = " << value.pSignalSemaphores << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMemoryAllocateInfo& value)
+{
+	s << "VkMemoryAllocateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tallocationSize = " << value.allocationSize << '\n';
+	s << "\tmemoryTypeIndex = " << value.memoryTypeIndex << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMappedMemoryRange& value)
+{
+	s << "VkMappedMemoryRange = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tmemory = " << value.memory << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMemoryRequirements& value)
+{
+	s << "VkMemoryRequirements = {\n";
+	s << "\tsize = " << value.size << '\n';
+	s << "\talignment = " << value.alignment << '\n';
+	s << "\tmemoryTypeBits = " << value.memoryTypeBits << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseImageFormatProperties& value)
+{
+	s << "VkSparseImageFormatProperties = {\n";
+	s << "\taspectMask = " << getImageAspectFlagsStr(value.aspectMask) << '\n';
+	s << "\timageGranularity = " << value.imageGranularity << '\n';
+	s << "\tflags = " << getSparseImageFormatFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseImageMemoryRequirements& value)
+{
+	s << "VkSparseImageMemoryRequirements = {\n";
+	s << "\tformatProperties = " << value.formatProperties << '\n';
+	s << "\timageMipTailFirstLod = " << value.imageMipTailFirstLod << '\n';
+	s << "\timageMipTailSize = " << value.imageMipTailSize << '\n';
+	s << "\timageMipTailOffset = " << value.imageMipTailOffset << '\n';
+	s << "\timageMipTailStride = " << value.imageMipTailStride << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseMemoryBind& value)
+{
+	s << "VkSparseMemoryBind = {\n";
+	s << "\tresourceOffset = " << value.resourceOffset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << "\tmemory = " << value.memory << '\n';
+	s << "\tmemoryOffset = " << value.memoryOffset << '\n';
+	s << "\tflags = " << getSparseMemoryBindFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseBufferMemoryBindInfo& value)
+{
+	s << "VkSparseBufferMemoryBindInfo = {\n";
+	s << "\tbuffer = " << value.buffer << '\n';
+	s << "\tbindCount = " << value.bindCount << '\n';
+	s << "\tpBinds = " << value.pBinds << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseImageOpaqueMemoryBindInfo& value)
+{
+	s << "VkSparseImageOpaqueMemoryBindInfo = {\n";
+	s << "\timage = " << value.image << '\n';
+	s << "\tbindCount = " << value.bindCount << '\n';
+	s << "\tpBinds = " << value.pBinds << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageSubresource& value)
+{
+	s << "VkImageSubresource = {\n";
+	s << "\taspectMask = " << getImageAspectFlagsStr(value.aspectMask) << '\n';
+	s << "\tmipLevel = " << value.mipLevel << '\n';
+	s << "\tarrayLayer = " << value.arrayLayer << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkOffset3D& value)
+{
+	s << "VkOffset3D = {\n";
+	s << "\tx = " << value.x << '\n';
+	s << "\ty = " << value.y << '\n';
+	s << "\tz = " << value.z << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseImageMemoryBind& value)
+{
+	s << "VkSparseImageMemoryBind = {\n";
+	s << "\tsubresource = " << value.subresource << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\textent = " << value.extent << '\n';
+	s << "\tmemory = " << value.memory << '\n';
+	s << "\tmemoryOffset = " << value.memoryOffset << '\n';
+	s << "\tflags = " << getSparseMemoryBindFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSparseImageMemoryBindInfo& value)
+{
+	s << "VkSparseImageMemoryBindInfo = {\n";
+	s << "\timage = " << value.image << '\n';
+	s << "\tbindCount = " << value.bindCount << '\n';
+	s << "\tpBinds = " << value.pBinds << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBindSparseInfo& value)
+{
+	s << "VkBindSparseInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\twaitSemaphoreCount = " << value.waitSemaphoreCount << '\n';
+	s << "\tpWaitSemaphores = " << value.pWaitSemaphores << '\n';
+	s << "\tbufferBindCount = " << value.bufferBindCount << '\n';
+	s << "\tpBufferBinds = " << value.pBufferBinds << '\n';
+	s << "\timageOpaqueBindCount = " << value.imageOpaqueBindCount << '\n';
+	s << "\tpImageOpaqueBinds = " << value.pImageOpaqueBinds << '\n';
+	s << "\timageBindCount = " << value.imageBindCount << '\n';
+	s << "\tpImageBinds = " << value.pImageBinds << '\n';
+	s << "\tsignalSemaphoreCount = " << value.signalSemaphoreCount << '\n';
+	s << "\tpSignalSemaphores = " << value.pSignalSemaphores << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkFenceCreateInfo& value)
+{
+	s << "VkFenceCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getFenceCreateFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSemaphoreCreateInfo& value)
+{
+	s << "VkSemaphoreCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getSemaphoreCreateFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkEventCreateInfo& value)
+{
+	s << "VkEventCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getEventCreateFlagsStr(value.flags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkQueryPoolCreateInfo& value)
+{
+	s << "VkQueryPoolCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getQueryPoolCreateFlagsStr(value.flags) << '\n';
+	s << "\tqueryType = " << value.queryType << '\n';
+	s << "\tqueryCount = " << value.queryCount << '\n';
+	s << "\tpipelineStatistics = " << getQueryPipelineStatisticFlagsStr(value.pipelineStatistics) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBufferCreateInfo& value)
+{
+	s << "VkBufferCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getBufferCreateFlagsStr(value.flags) << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << "\tusage = " << getBufferUsageFlagsStr(value.usage) << '\n';
+	s << "\tsharingMode = " << value.sharingMode << '\n';
+	s << "\tqueueFamilyIndexCount = " << value.queueFamilyIndexCount << '\n';
+	s << "\tpQueueFamilyIndices = " << value.pQueueFamilyIndices << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBufferViewCreateInfo& value)
+{
+	s << "VkBufferViewCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getBufferViewCreateFlagsStr(value.flags) << '\n';
+	s << "\tbuffer = " << value.buffer << '\n';
+	s << "\tformat = " << value.format << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\trange = " << value.range << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageCreateInfo& value)
+{
+	s << "VkImageCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getImageCreateFlagsStr(value.flags) << '\n';
+	s << "\timageType = " << value.imageType << '\n';
+	s << "\tformat = " << value.format << '\n';
+	s << "\textent = " << value.extent << '\n';
+	s << "\tmipLevels = " << value.mipLevels << '\n';
+	s << "\tarrayLayers = " << value.arrayLayers << '\n';
+	s << "\tsamples = " << value.samples << '\n';
+	s << "\ttiling = " << value.tiling << '\n';
+	s << "\tusage = " << getImageUsageFlagsStr(value.usage) << '\n';
+	s << "\tsharingMode = " << value.sharingMode << '\n';
+	s << "\tqueueFamilyIndexCount = " << value.queueFamilyIndexCount << '\n';
+	s << "\tpQueueFamilyIndices = " << value.pQueueFamilyIndices << '\n';
+	s << "\tinitialLayout = " << value.initialLayout << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSubresourceLayout& value)
+{
+	s << "VkSubresourceLayout = {\n";
+	s << "\toffset = " << value.offset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << "\trowPitch = " << value.rowPitch << '\n';
+	s << "\tarrayPitch = " << value.arrayPitch << '\n';
+	s << "\tdepthPitch = " << value.depthPitch << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkComponentMapping& value)
+{
+	s << "VkComponentMapping = {\n";
+	s << "\tr = " << value.r << '\n';
+	s << "\tg = " << value.g << '\n';
+	s << "\tb = " << value.b << '\n';
+	s << "\ta = " << value.a << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageSubresourceRange& value)
+{
+	s << "VkImageSubresourceRange = {\n";
+	s << "\taspectMask = " << getImageAspectFlagsStr(value.aspectMask) << '\n';
+	s << "\tbaseMipLevel = " << value.baseMipLevel << '\n';
+	s << "\tlevelCount = " << value.levelCount << '\n';
+	s << "\tbaseArrayLayer = " << value.baseArrayLayer << '\n';
+	s << "\tlayerCount = " << value.layerCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageViewCreateInfo& value)
+{
+	s << "VkImageViewCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getImageViewCreateFlagsStr(value.flags) << '\n';
+	s << "\timage = " << value.image << '\n';
+	s << "\tviewType = " << value.viewType << '\n';
+	s << "\tformat = " << value.format << '\n';
+	s << "\tcomponents = " << value.components << '\n';
+	s << "\tsubresourceRange = " << value.subresourceRange << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkShaderModuleCreateInfo& value)
+{
+	s << "VkShaderModuleCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getShaderModuleCreateFlagsStr(value.flags) << '\n';
+	s << "\tcodeSize = " << value.codeSize << '\n';
+	s << "\tpCode = " << value.pCode << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineCacheCreateInfo& value)
+{
+	s << "VkPipelineCacheCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineCacheCreateFlagsStr(value.flags) << '\n';
+	s << "\tinitialDataSize = " << value.initialDataSize << '\n';
+	s << "\tpInitialData = " << value.pInitialData << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSpecializationMapEntry& value)
+{
+	s << "VkSpecializationMapEntry = {\n";
+	s << "\tconstantID = " << value.constantID << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSpecializationInfo& value)
+{
+	s << "VkSpecializationInfo = {\n";
+	s << "\tmapEntryCount = " << value.mapEntryCount << '\n';
+	s << "\tpMapEntries = " << value.pMapEntries << '\n';
+	s << "\tdataSize = " << value.dataSize << '\n';
+	s << "\tpData = " << value.pData << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineShaderStageCreateInfo& value)
+{
+	s << "VkPipelineShaderStageCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineShaderStageCreateFlagsStr(value.flags) << '\n';
+	s << "\tstage = " << value.stage << '\n';
+	s << "\tmodule = " << value.module << '\n';
+	s << "\tpName = " << getCharPtrStr(value.pName) << '\n';
+	s << "\tpSpecializationInfo = " << value.pSpecializationInfo << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkVertexInputBindingDescription& value)
+{
+	s << "VkVertexInputBindingDescription = {\n";
+	s << "\tbinding = " << value.binding << '\n';
+	s << "\tstride = " << value.stride << '\n';
+	s << "\tinputRate = " << value.inputRate << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkVertexInputAttributeDescription& value)
+{
+	s << "VkVertexInputAttributeDescription = {\n";
+	s << "\tlocation = " << value.location << '\n';
+	s << "\tbinding = " << value.binding << '\n';
+	s << "\tformat = " << value.format << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineVertexInputStateCreateInfo& value)
+{
+	s << "VkPipelineVertexInputStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineVertexInputStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tvertexBindingDescriptionCount = " << value.vertexBindingDescriptionCount << '\n';
+	s << "\tpVertexBindingDescriptions = " << value.pVertexBindingDescriptions << '\n';
+	s << "\tvertexAttributeDescriptionCount = " << value.vertexAttributeDescriptionCount << '\n';
+	s << "\tpVertexAttributeDescriptions = " << value.pVertexAttributeDescriptions << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineInputAssemblyStateCreateInfo& value)
+{
+	s << "VkPipelineInputAssemblyStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineInputAssemblyStateCreateFlagsStr(value.flags) << '\n';
+	s << "\ttopology = " << value.topology << '\n';
+	s << "\tprimitiveRestartEnable = " << value.primitiveRestartEnable << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineTessellationStateCreateInfo& value)
+{
+	s << "VkPipelineTessellationStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineTessellationStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tpatchControlPoints = " << value.patchControlPoints << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkViewport& value)
+{
+	s << "VkViewport = {\n";
+	s << "\tx = " << value.x << '\n';
+	s << "\ty = " << value.y << '\n';
+	s << "\twidth = " << value.width << '\n';
+	s << "\theight = " << value.height << '\n';
+	s << "\tminDepth = " << value.minDepth << '\n';
+	s << "\tmaxDepth = " << value.maxDepth << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkOffset2D& value)
+{
+	s << "VkOffset2D = {\n";
+	s << "\tx = " << value.x << '\n';
+	s << "\ty = " << value.y << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkExtent2D& value)
+{
+	s << "VkExtent2D = {\n";
+	s << "\twidth = " << value.width << '\n';
+	s << "\theight = " << value.height << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkRect2D& value)
+{
+	s << "VkRect2D = {\n";
+	s << "\toffset = " << value.offset << '\n';
+	s << "\textent = " << value.extent << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineViewportStateCreateInfo& value)
+{
+	s << "VkPipelineViewportStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineViewportStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tviewportCount = " << value.viewportCount << '\n';
+	s << "\tpViewports = " << value.pViewports << '\n';
+	s << "\tscissorCount = " << value.scissorCount << '\n';
+	s << "\tpScissors = " << value.pScissors << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineRasterizationStateCreateInfo& value)
+{
+	s << "VkPipelineRasterizationStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineRasterizationStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tdepthClampEnable = " << value.depthClampEnable << '\n';
+	s << "\trasterizerDiscardEnable = " << value.rasterizerDiscardEnable << '\n';
+	s << "\tpolygonMode = " << value.polygonMode << '\n';
+	s << "\tcullMode = " << getCullModeFlagsStr(value.cullMode) << '\n';
+	s << "\tfrontFace = " << value.frontFace << '\n';
+	s << "\tdepthBiasEnable = " << value.depthBiasEnable << '\n';
+	s << "\tdepthBiasConstantFactor = " << value.depthBiasConstantFactor << '\n';
+	s << "\tdepthBiasClamp = " << value.depthBiasClamp << '\n';
+	s << "\tdepthBiasSlopeFactor = " << value.depthBiasSlopeFactor << '\n';
+	s << "\tlineWidth = " << value.lineWidth << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineMultisampleStateCreateInfo& value)
+{
+	s << "VkPipelineMultisampleStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineMultisampleStateCreateFlagsStr(value.flags) << '\n';
+	s << "\trasterizationSamples = " << value.rasterizationSamples << '\n';
+	s << "\tsampleShadingEnable = " << value.sampleShadingEnable << '\n';
+	s << "\tminSampleShading = " << value.minSampleShading << '\n';
+	s << "\tpSampleMask = " << value.pSampleMask << '\n';
+	s << "\talphaToCoverageEnable = " << value.alphaToCoverageEnable << '\n';
+	s << "\talphaToOneEnable = " << value.alphaToOneEnable << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkStencilOpState& value)
+{
+	s << "VkStencilOpState = {\n";
+	s << "\tfailOp = " << value.failOp << '\n';
+	s << "\tpassOp = " << value.passOp << '\n';
+	s << "\tdepthFailOp = " << value.depthFailOp << '\n';
+	s << "\tcompareOp = " << value.compareOp << '\n';
+	s << "\tcompareMask = " << value.compareMask << '\n';
+	s << "\twriteMask = " << value.writeMask << '\n';
+	s << "\treference = " << value.reference << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineDepthStencilStateCreateInfo& value)
+{
+	s << "VkPipelineDepthStencilStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineDepthStencilStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tdepthTestEnable = " << value.depthTestEnable << '\n';
+	s << "\tdepthWriteEnable = " << value.depthWriteEnable << '\n';
+	s << "\tdepthCompareOp = " << value.depthCompareOp << '\n';
+	s << "\tdepthBoundsTestEnable = " << value.depthBoundsTestEnable << '\n';
+	s << "\tstencilTestEnable = " << value.stencilTestEnable << '\n';
+	s << "\tfront = " << value.front << '\n';
+	s << "\tback = " << value.back << '\n';
+	s << "\tminDepthBounds = " << value.minDepthBounds << '\n';
+	s << "\tmaxDepthBounds = " << value.maxDepthBounds << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineColorBlendAttachmentState& value)
+{
+	s << "VkPipelineColorBlendAttachmentState = {\n";
+	s << "\tblendEnable = " << value.blendEnable << '\n';
+	s << "\tsrcColorBlendFactor = " << value.srcColorBlendFactor << '\n';
+	s << "\tdstColorBlendFactor = " << value.dstColorBlendFactor << '\n';
+	s << "\tcolorBlendOp = " << value.colorBlendOp << '\n';
+	s << "\tsrcAlphaBlendFactor = " << value.srcAlphaBlendFactor << '\n';
+	s << "\tdstAlphaBlendFactor = " << value.dstAlphaBlendFactor << '\n';
+	s << "\talphaBlendOp = " << value.alphaBlendOp << '\n';
+	s << "\tcolorWriteMask = " << getColorComponentFlagsStr(value.colorWriteMask) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineColorBlendStateCreateInfo& value)
+{
+	s << "VkPipelineColorBlendStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineColorBlendStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tlogicOpEnable = " << value.logicOpEnable << '\n';
+	s << "\tlogicOp = " << value.logicOp << '\n';
+	s << "\tattachmentCount = " << value.attachmentCount << '\n';
+	s << "\tpAttachments = " << value.pAttachments << '\n';
+	s << "\tblendConstants = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.blendConstants), DE_ARRAY_END(value.blendConstants)) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineDynamicStateCreateInfo& value)
+{
+	s << "VkPipelineDynamicStateCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineDynamicStateCreateFlagsStr(value.flags) << '\n';
+	s << "\tdynamicStateCount = " << value.dynamicStateCount << '\n';
+	s << "\tpDynamicStates = " << value.pDynamicStates << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkGraphicsPipelineCreateInfo& value)
+{
+	s << "VkGraphicsPipelineCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineCreateFlagsStr(value.flags) << '\n';
+	s << "\tstageCount = " << value.stageCount << '\n';
+	s << "\tpStages = " << value.pStages << '\n';
+	s << "\tpVertexInputState = " << value.pVertexInputState << '\n';
+	s << "\tpInputAssemblyState = " << value.pInputAssemblyState << '\n';
+	s << "\tpTessellationState = " << value.pTessellationState << '\n';
+	s << "\tpViewportState = " << value.pViewportState << '\n';
+	s << "\tpRasterizationState = " << value.pRasterizationState << '\n';
+	s << "\tpMultisampleState = " << value.pMultisampleState << '\n';
+	s << "\tpDepthStencilState = " << value.pDepthStencilState << '\n';
+	s << "\tpColorBlendState = " << value.pColorBlendState << '\n';
+	s << "\tpDynamicState = " << value.pDynamicState << '\n';
+	s << "\tlayout = " << value.layout << '\n';
+	s << "\trenderPass = " << value.renderPass << '\n';
+	s << "\tsubpass = " << value.subpass << '\n';
+	s << "\tbasePipelineHandle = " << value.basePipelineHandle << '\n';
+	s << "\tbasePipelineIndex = " << value.basePipelineIndex << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkComputePipelineCreateInfo& value)
+{
+	s << "VkComputePipelineCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineCreateFlagsStr(value.flags) << '\n';
+	s << "\tstage = " << value.stage << '\n';
+	s << "\tlayout = " << value.layout << '\n';
+	s << "\tbasePipelineHandle = " << value.basePipelineHandle << '\n';
+	s << "\tbasePipelineIndex = " << value.basePipelineIndex << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPushConstantRange& value)
+{
+	s << "VkPushConstantRange = {\n";
+	s << "\tstageFlags = " << getShaderStageFlagsStr(value.stageFlags) << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkPipelineLayoutCreateInfo& value)
+{
+	s << "VkPipelineLayoutCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getPipelineLayoutCreateFlagsStr(value.flags) << '\n';
+	s << "\tsetLayoutCount = " << value.setLayoutCount << '\n';
+	s << "\tpSetLayouts = " << value.pSetLayouts << '\n';
+	s << "\tpushConstantRangeCount = " << value.pushConstantRangeCount << '\n';
+	s << "\tpPushConstantRanges = " << value.pPushConstantRanges << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSamplerCreateInfo& value)
+{
+	s << "VkSamplerCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getSamplerCreateFlagsStr(value.flags) << '\n';
+	s << "\tmagFilter = " << value.magFilter << '\n';
+	s << "\tminFilter = " << value.minFilter << '\n';
+	s << "\tmipmapMode = " << value.mipmapMode << '\n';
+	s << "\taddressModeU = " << value.addressModeU << '\n';
+	s << "\taddressModeV = " << value.addressModeV << '\n';
+	s << "\taddressModeW = " << value.addressModeW << '\n';
+	s << "\tmipLodBias = " << value.mipLodBias << '\n';
+	s << "\tanisotropyEnable = " << value.anisotropyEnable << '\n';
+	s << "\tmaxAnisotropy = " << value.maxAnisotropy << '\n';
+	s << "\tcompareEnable = " << value.compareEnable << '\n';
+	s << "\tcompareOp = " << value.compareOp << '\n';
+	s << "\tminLod = " << value.minLod << '\n';
+	s << "\tmaxLod = " << value.maxLod << '\n';
+	s << "\tborderColor = " << value.borderColor << '\n';
+	s << "\tunnormalizedCoordinates = " << value.unnormalizedCoordinates << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorSetLayoutBinding& value)
+{
+	s << "VkDescriptorSetLayoutBinding = {\n";
+	s << "\tbinding = " << value.binding << '\n';
+	s << "\tdescriptorType = " << value.descriptorType << '\n';
+	s << "\tdescriptorCount = " << value.descriptorCount << '\n';
+	s << "\tstageFlags = " << getShaderStageFlagsStr(value.stageFlags) << '\n';
+	s << "\tpImmutableSamplers = " << value.pImmutableSamplers << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorSetLayoutCreateInfo& value)
+{
+	s << "VkDescriptorSetLayoutCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getDescriptorSetLayoutCreateFlagsStr(value.flags) << '\n';
+	s << "\tbindingCount = " << value.bindingCount << '\n';
+	s << "\tpBindings = " << value.pBindings << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorPoolSize& value)
+{
+	s << "VkDescriptorPoolSize = {\n";
+	s << "\ttype = " << value.type << '\n';
+	s << "\tdescriptorCount = " << value.descriptorCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorPoolCreateInfo& value)
+{
+	s << "VkDescriptorPoolCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getDescriptorPoolCreateFlagsStr(value.flags) << '\n';
+	s << "\tmaxSets = " << value.maxSets << '\n';
+	s << "\tpoolSizeCount = " << value.poolSizeCount << '\n';
+	s << "\tpPoolSizes = " << value.pPoolSizes << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorSetAllocateInfo& value)
+{
+	s << "VkDescriptorSetAllocateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tdescriptorPool = " << value.descriptorPool << '\n';
+	s << "\tdescriptorSetCount = " << value.descriptorSetCount << '\n';
+	s << "\tpSetLayouts = " << value.pSetLayouts << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorImageInfo& value)
+{
+	s << "VkDescriptorImageInfo = {\n";
+	s << "\tsampler = " << value.sampler << '\n';
+	s << "\timageView = " << value.imageView << '\n';
+	s << "\timageLayout = " << value.imageLayout << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDescriptorBufferInfo& value)
+{
+	s << "VkDescriptorBufferInfo = {\n";
+	s << "\tbuffer = " << value.buffer << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\trange = " << value.range << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkWriteDescriptorSet& value)
+{
+	s << "VkWriteDescriptorSet = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tdstSet = " << value.dstSet << '\n';
+	s << "\tdstBinding = " << value.dstBinding << '\n';
+	s << "\tdstArrayElement = " << value.dstArrayElement << '\n';
+	s << "\tdescriptorCount = " << value.descriptorCount << '\n';
+	s << "\tdescriptorType = " << value.descriptorType << '\n';
+	s << "\tpImageInfo = " << value.pImageInfo << '\n';
+	s << "\tpBufferInfo = " << value.pBufferInfo << '\n';
+	s << "\tpTexelBufferView = " << value.pTexelBufferView << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkCopyDescriptorSet& value)
+{
+	s << "VkCopyDescriptorSet = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tsrcSet = " << value.srcSet << '\n';
+	s << "\tsrcBinding = " << value.srcBinding << '\n';
+	s << "\tsrcArrayElement = " << value.srcArrayElement << '\n';
+	s << "\tdstSet = " << value.dstSet << '\n';
+	s << "\tdstBinding = " << value.dstBinding << '\n';
+	s << "\tdstArrayElement = " << value.dstArrayElement << '\n';
+	s << "\tdescriptorCount = " << value.descriptorCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkFramebufferCreateInfo& value)
+{
+	s << "VkFramebufferCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getFramebufferCreateFlagsStr(value.flags) << '\n';
+	s << "\trenderPass = " << value.renderPass << '\n';
+	s << "\tattachmentCount = " << value.attachmentCount << '\n';
+	s << "\tpAttachments = " << value.pAttachments << '\n';
+	s << "\twidth = " << value.width << '\n';
+	s << "\theight = " << value.height << '\n';
+	s << "\tlayers = " << value.layers << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkAttachmentDescription& value)
+{
+	s << "VkAttachmentDescription = {\n";
+	s << "\tflags = " << getAttachmentDescriptionFlagsStr(value.flags) << '\n';
+	s << "\tformat = " << value.format << '\n';
+	s << "\tsamples = " << value.samples << '\n';
+	s << "\tloadOp = " << value.loadOp << '\n';
+	s << "\tstoreOp = " << value.storeOp << '\n';
+	s << "\tstencilLoadOp = " << value.stencilLoadOp << '\n';
+	s << "\tstencilStoreOp = " << value.stencilStoreOp << '\n';
+	s << "\tinitialLayout = " << value.initialLayout << '\n';
+	s << "\tfinalLayout = " << value.finalLayout << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkAttachmentReference& value)
+{
+	s << "VkAttachmentReference = {\n";
+	s << "\tattachment = " << value.attachment << '\n';
+	s << "\tlayout = " << value.layout << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSubpassDescription& value)
+{
+	s << "VkSubpassDescription = {\n";
+	s << "\tflags = " << getSubpassDescriptionFlagsStr(value.flags) << '\n';
+	s << "\tpipelineBindPoint = " << value.pipelineBindPoint << '\n';
+	s << "\tinputAttachmentCount = " << value.inputAttachmentCount << '\n';
+	s << "\tpInputAttachments = " << value.pInputAttachments << '\n';
+	s << "\tcolorAttachmentCount = " << value.colorAttachmentCount << '\n';
+	s << "\tpColorAttachments = " << value.pColorAttachments << '\n';
+	s << "\tpResolveAttachments = " << value.pResolveAttachments << '\n';
+	s << "\tpDepthStencilAttachment = " << value.pDepthStencilAttachment << '\n';
+	s << "\tpreserveAttachmentCount = " << value.preserveAttachmentCount << '\n';
+	s << "\tpPreserveAttachments = " << value.pPreserveAttachments << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkSubpassDependency& value)
+{
+	s << "VkSubpassDependency = {\n";
+	s << "\tsrcSubpass = " << value.srcSubpass << '\n';
+	s << "\tdstSubpass = " << value.dstSubpass << '\n';
+	s << "\tsrcStageMask = " << getPipelineStageFlagsStr(value.srcStageMask) << '\n';
+	s << "\tdstStageMask = " << getPipelineStageFlagsStr(value.dstStageMask) << '\n';
+	s << "\tsrcAccessMask = " << getAccessFlagsStr(value.srcAccessMask) << '\n';
+	s << "\tdstAccessMask = " << getAccessFlagsStr(value.dstAccessMask) << '\n';
+	s << "\tdependencyFlags = " << getDependencyFlagsStr(value.dependencyFlags) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkRenderPassCreateInfo& value)
+{
+	s << "VkRenderPassCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getRenderPassCreateFlagsStr(value.flags) << '\n';
+	s << "\tattachmentCount = " << value.attachmentCount << '\n';
+	s << "\tpAttachments = " << value.pAttachments << '\n';
+	s << "\tsubpassCount = " << value.subpassCount << '\n';
+	s << "\tpSubpasses = " << value.pSubpasses << '\n';
+	s << "\tdependencyCount = " << value.dependencyCount << '\n';
+	s << "\tpDependencies = " << value.pDependencies << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkCommandPoolCreateInfo& value)
+{
+	s << "VkCommandPoolCreateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getCommandPoolCreateFlagsStr(value.flags) << '\n';
+	s << "\tqueueFamilyIndex = " << value.queueFamilyIndex << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkCommandBufferAllocateInfo& value)
+{
+	s << "VkCommandBufferAllocateInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tcommandPool = " << value.commandPool << '\n';
+	s << "\tlevel = " << value.level << '\n';
+	s << "\tcommandBufferCount = " << value.commandBufferCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkCommandBufferInheritanceInfo& value)
+{
+	s << "VkCommandBufferInheritanceInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\trenderPass = " << value.renderPass << '\n';
+	s << "\tsubpass = " << value.subpass << '\n';
+	s << "\tframebuffer = " << value.framebuffer << '\n';
+	s << "\tocclusionQueryEnable = " << value.occlusionQueryEnable << '\n';
+	s << "\tqueryFlags = " << getQueryControlFlagsStr(value.queryFlags) << '\n';
+	s << "\tpipelineStatistics = " << getQueryPipelineStatisticFlagsStr(value.pipelineStatistics) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkCommandBufferBeginInfo& value)
+{
+	s << "VkCommandBufferBeginInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tflags = " << getCommandBufferUsageFlagsStr(value.flags) << '\n';
+	s << "\tpInheritanceInfo = " << value.pInheritanceInfo << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBufferCopy& value)
+{
+	s << "VkBufferCopy = {\n";
+	s << "\tsrcOffset = " << value.srcOffset << '\n';
+	s << "\tdstOffset = " << value.dstOffset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageSubresourceLayers& value)
+{
+	s << "VkImageSubresourceLayers = {\n";
+	s << "\taspectMask = " << getImageAspectFlagsStr(value.aspectMask) << '\n';
+	s << "\tmipLevel = " << value.mipLevel << '\n';
+	s << "\tbaseArrayLayer = " << value.baseArrayLayer << '\n';
+	s << "\tlayerCount = " << value.layerCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageCopy& value)
+{
+	s << "VkImageCopy = {\n";
+	s << "\tsrcSubresource = " << value.srcSubresource << '\n';
+	s << "\tsrcOffset = " << value.srcOffset << '\n';
+	s << "\tdstSubresource = " << value.dstSubresource << '\n';
+	s << "\tdstOffset = " << value.dstOffset << '\n';
+	s << "\textent = " << value.extent << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageBlit& value)
+{
+	s << "VkImageBlit = {\n";
+	s << "\tsrcSubresource = " << value.srcSubresource << '\n';
+	s << "\tsrcOffsets = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.srcOffsets), DE_ARRAY_END(value.srcOffsets)) << '\n';
+	s << "\tdstSubresource = " << value.dstSubresource << '\n';
+	s << "\tdstOffsets = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.dstOffsets), DE_ARRAY_END(value.dstOffsets)) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBufferImageCopy& value)
+{
+	s << "VkBufferImageCopy = {\n";
+	s << "\tbufferOffset = " << value.bufferOffset << '\n';
+	s << "\tbufferRowLength = " << value.bufferRowLength << '\n';
+	s << "\tbufferImageHeight = " << value.bufferImageHeight << '\n';
+	s << "\timageSubresource = " << value.imageSubresource << '\n';
+	s << "\timageOffset = " << value.imageOffset << '\n';
+	s << "\timageExtent = " << value.imageExtent << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkClearColorValue& value)
+{
+	s << "VkClearColorValue = {\n";
+	s << "\tfloat32 = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.float32), DE_ARRAY_END(value.float32)) << '\n';
+	s << "\tint32 = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.int32), DE_ARRAY_END(value.int32)) << '\n';
+	s << "\tuint32 = " << '\n' << tcu::formatArray(DE_ARRAY_BEGIN(value.uint32), DE_ARRAY_END(value.uint32)) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkClearDepthStencilValue& value)
+{
+	s << "VkClearDepthStencilValue = {\n";
+	s << "\tdepth = " << value.depth << '\n';
+	s << "\tstencil = " << value.stencil << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkClearValue& value)
+{
+	s << "VkClearValue = {\n";
+	s << "\tcolor = " << value.color << '\n';
+	s << "\tdepthStencil = " << value.depthStencil << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkClearAttachment& value)
+{
+	s << "VkClearAttachment = {\n";
+	s << "\taspectMask = " << getImageAspectFlagsStr(value.aspectMask) << '\n';
+	s << "\tcolorAttachment = " << value.colorAttachment << '\n';
+	s << "\tclearValue = " << value.clearValue << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkClearRect& value)
+{
+	s << "VkClearRect = {\n";
+	s << "\trect = " << value.rect << '\n';
+	s << "\tbaseArrayLayer = " << value.baseArrayLayer << '\n';
+	s << "\tlayerCount = " << value.layerCount << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageResolve& value)
+{
+	s << "VkImageResolve = {\n";
+	s << "\tsrcSubresource = " << value.srcSubresource << '\n';
+	s << "\tsrcOffset = " << value.srcOffset << '\n';
+	s << "\tdstSubresource = " << value.dstSubresource << '\n';
+	s << "\tdstOffset = " << value.dstOffset << '\n';
+	s << "\textent = " << value.extent << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkMemoryBarrier& value)
+{
+	s << "VkMemoryBarrier = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tsrcAccessMask = " << getAccessFlagsStr(value.srcAccessMask) << '\n';
+	s << "\tdstAccessMask = " << getAccessFlagsStr(value.dstAccessMask) << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkBufferMemoryBarrier& value)
+{
+	s << "VkBufferMemoryBarrier = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tsrcAccessMask = " << getAccessFlagsStr(value.srcAccessMask) << '\n';
+	s << "\tdstAccessMask = " << getAccessFlagsStr(value.dstAccessMask) << '\n';
+	s << "\tsrcQueueFamilyIndex = " << value.srcQueueFamilyIndex << '\n';
+	s << "\tdstQueueFamilyIndex = " << value.dstQueueFamilyIndex << '\n';
+	s << "\tbuffer = " << value.buffer << '\n';
+	s << "\toffset = " << value.offset << '\n';
+	s << "\tsize = " << value.size << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkImageMemoryBarrier& value)
+{
+	s << "VkImageMemoryBarrier = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\tsrcAccessMask = " << getAccessFlagsStr(value.srcAccessMask) << '\n';
+	s << "\tdstAccessMask = " << getAccessFlagsStr(value.dstAccessMask) << '\n';
+	s << "\toldLayout = " << value.oldLayout << '\n';
+	s << "\tnewLayout = " << value.newLayout << '\n';
+	s << "\tsrcQueueFamilyIndex = " << value.srcQueueFamilyIndex << '\n';
+	s << "\tdstQueueFamilyIndex = " << value.dstQueueFamilyIndex << '\n';
+	s << "\timage = " << value.image << '\n';
+	s << "\tsubresourceRange = " << value.subresourceRange << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkRenderPassBeginInfo& value)
+{
+	s << "VkRenderPassBeginInfo = {\n";
+	s << "\tsType = " << value.sType << '\n';
+	s << "\tpNext = " << value.pNext << '\n';
+	s << "\trenderPass = " << value.renderPass << '\n';
+	s << "\tframebuffer = " << value.framebuffer << '\n';
+	s << "\trenderArea = " << value.renderArea << '\n';
+	s << "\tclearValueCount = " << value.clearValueCount << '\n';
+	s << "\tpClearValues = " << value.pClearValues << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDispatchIndirectCommand& value)
+{
+	s << "VkDispatchIndirectCommand = {\n";
+	s << "\tx = " << value.x << '\n';
+	s << "\ty = " << value.y << '\n';
+	s << "\tz = " << value.z << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDrawIndexedIndirectCommand& value)
+{
+	s << "VkDrawIndexedIndirectCommand = {\n";
+	s << "\tindexCount = " << value.indexCount << '\n';
+	s << "\tinstanceCount = " << value.instanceCount << '\n';
+	s << "\tfirstIndex = " << value.firstIndex << '\n';
+	s << "\tvertexOffset = " << value.vertexOffset << '\n';
+	s << "\tfirstInstance = " << value.firstInstance << '\n';
+	s << '}';
+	return s;
+}
+
+std::ostream& operator<< (std::ostream& s, const VkDrawIndirectCommand& value)
+{
+	s << "VkDrawIndirectCommand = {\n";
+	s << "\tvertexCount = " << value.vertexCount << '\n';
+	s << "\tinstanceCount = " << value.instanceCount << '\n';
+	s << "\tfirstVertex = " << value.firstVertex << '\n';
+	s << "\tfirstInstance = " << value.firstInstance << '\n';
+	s << '}';
+	return s;
+}
diff --git a/external/vulkancts/framework/vulkan/vkStructTypes.inl b/external/vulkancts/framework/vulkan/vkStructTypes.inl
new file mode 100644
index 0000000..49d993c
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkStructTypes.inl
@@ -0,0 +1,1185 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+struct VkApplicationInfo
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	const char*		pApplicationName;
+	deUint32		applicationVersion;
+	const char*		pEngineName;
+	deUint32		engineVersion;
+	deUint32		apiVersion;
+};
+
+struct VkInstanceCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkInstanceCreateFlags		flags;
+	const VkApplicationInfo*	pApplicationInfo;
+	deUint32					enabledLayerCount;
+	const char* const*			ppEnabledLayerNames;
+	deUint32					enabledExtensionCount;
+	const char* const*			ppEnabledExtensionNames;
+};
+
+struct VkAllocationCallbacks
+{
+	void*									pUserData;
+	PFN_vkAllocationFunction				pfnAllocation;
+	PFN_vkReallocationFunction				pfnReallocation;
+	PFN_vkFreeFunction						pfnFree;
+	PFN_vkInternalAllocationNotification	pfnInternalAllocation;
+	PFN_vkInternalFreeNotification			pfnInternalFree;
+};
+
+struct VkPhysicalDeviceFeatures
+{
+	VkBool32	robustBufferAccess;
+	VkBool32	fullDrawIndexUint32;
+	VkBool32	imageCubeArray;
+	VkBool32	independentBlend;
+	VkBool32	geometryShader;
+	VkBool32	tessellationShader;
+	VkBool32	sampleRateShading;
+	VkBool32	dualSrcBlend;
+	VkBool32	logicOp;
+	VkBool32	multiDrawIndirect;
+	VkBool32	drawIndirectFirstInstance;
+	VkBool32	depthClamp;
+	VkBool32	depthBiasClamp;
+	VkBool32	fillModeNonSolid;
+	VkBool32	depthBounds;
+	VkBool32	wideLines;
+	VkBool32	largePoints;
+	VkBool32	alphaToOne;
+	VkBool32	multiViewport;
+	VkBool32	samplerAnisotropy;
+	VkBool32	textureCompressionETC2;
+	VkBool32	textureCompressionASTC_LDR;
+	VkBool32	textureCompressionBC;
+	VkBool32	occlusionQueryPrecise;
+	VkBool32	pipelineStatisticsQuery;
+	VkBool32	vertexPipelineStoresAndAtomics;
+	VkBool32	fragmentStoresAndAtomics;
+	VkBool32	shaderTessellationAndGeometryPointSize;
+	VkBool32	shaderImageGatherExtended;
+	VkBool32	shaderStorageImageExtendedFormats;
+	VkBool32	shaderStorageImageMultisample;
+	VkBool32	shaderStorageImageReadWithoutFormat;
+	VkBool32	shaderStorageImageWriteWithoutFormat;
+	VkBool32	shaderUniformBufferArrayDynamicIndexing;
+	VkBool32	shaderSampledImageArrayDynamicIndexing;
+	VkBool32	shaderStorageBufferArrayDynamicIndexing;
+	VkBool32	shaderStorageImageArrayDynamicIndexing;
+	VkBool32	shaderClipDistance;
+	VkBool32	shaderCullDistance;
+	VkBool32	shaderFloat64;
+	VkBool32	shaderInt64;
+	VkBool32	shaderInt16;
+	VkBool32	shaderResourceResidency;
+	VkBool32	shaderResourceMinLod;
+	VkBool32	sparseBinding;
+	VkBool32	sparseResidencyBuffer;
+	VkBool32	sparseResidencyImage2D;
+	VkBool32	sparseResidencyImage3D;
+	VkBool32	sparseResidency2Samples;
+	VkBool32	sparseResidency4Samples;
+	VkBool32	sparseResidency8Samples;
+	VkBool32	sparseResidency16Samples;
+	VkBool32	sparseResidencyAliased;
+	VkBool32	variableMultisampleRate;
+	VkBool32	inheritedQueries;
+};
+
+struct VkFormatProperties
+{
+	VkFormatFeatureFlags	linearTilingFeatures;
+	VkFormatFeatureFlags	optimalTilingFeatures;
+	VkFormatFeatureFlags	bufferFeatures;
+};
+
+struct VkExtent3D
+{
+	deUint32	width;
+	deUint32	height;
+	deUint32	depth;
+};
+
+struct VkImageFormatProperties
+{
+	VkExtent3D			maxExtent;
+	deUint32			maxMipLevels;
+	deUint32			maxArrayLayers;
+	VkSampleCountFlags	sampleCounts;
+	VkDeviceSize		maxResourceSize;
+};
+
+struct VkPhysicalDeviceLimits
+{
+	deUint32			maxImageDimension1D;
+	deUint32			maxImageDimension2D;
+	deUint32			maxImageDimension3D;
+	deUint32			maxImageDimensionCube;
+	deUint32			maxImageArrayLayers;
+	deUint32			maxTexelBufferElements;
+	deUint32			maxUniformBufferRange;
+	deUint32			maxStorageBufferRange;
+	deUint32			maxPushConstantsSize;
+	deUint32			maxMemoryAllocationCount;
+	deUint32			maxSamplerAllocationCount;
+	VkDeviceSize		bufferImageGranularity;
+	VkDeviceSize		sparseAddressSpaceSize;
+	deUint32			maxBoundDescriptorSets;
+	deUint32			maxPerStageDescriptorSamplers;
+	deUint32			maxPerStageDescriptorUniformBuffers;
+	deUint32			maxPerStageDescriptorStorageBuffers;
+	deUint32			maxPerStageDescriptorSampledImages;
+	deUint32			maxPerStageDescriptorStorageImages;
+	deUint32			maxPerStageDescriptorInputAttachments;
+	deUint32			maxPerStageResources;
+	deUint32			maxDescriptorSetSamplers;
+	deUint32			maxDescriptorSetUniformBuffers;
+	deUint32			maxDescriptorSetUniformBuffersDynamic;
+	deUint32			maxDescriptorSetStorageBuffers;
+	deUint32			maxDescriptorSetStorageBuffersDynamic;
+	deUint32			maxDescriptorSetSampledImages;
+	deUint32			maxDescriptorSetStorageImages;
+	deUint32			maxDescriptorSetInputAttachments;
+	deUint32			maxVertexInputAttributes;
+	deUint32			maxVertexInputBindings;
+	deUint32			maxVertexInputAttributeOffset;
+	deUint32			maxVertexInputBindingStride;
+	deUint32			maxVertexOutputComponents;
+	deUint32			maxTessellationGenerationLevel;
+	deUint32			maxTessellationPatchSize;
+	deUint32			maxTessellationControlPerVertexInputComponents;
+	deUint32			maxTessellationControlPerVertexOutputComponents;
+	deUint32			maxTessellationControlPerPatchOutputComponents;
+	deUint32			maxTessellationControlTotalOutputComponents;
+	deUint32			maxTessellationEvaluationInputComponents;
+	deUint32			maxTessellationEvaluationOutputComponents;
+	deUint32			maxGeometryShaderInvocations;
+	deUint32			maxGeometryInputComponents;
+	deUint32			maxGeometryOutputComponents;
+	deUint32			maxGeometryOutputVertices;
+	deUint32			maxGeometryTotalOutputComponents;
+	deUint32			maxFragmentInputComponents;
+	deUint32			maxFragmentOutputAttachments;
+	deUint32			maxFragmentDualSrcAttachments;
+	deUint32			maxFragmentCombinedOutputResources;
+	deUint32			maxComputeSharedMemorySize;
+	deUint32			maxComputeWorkGroupCount[3];
+	deUint32			maxComputeWorkGroupInvocations;
+	deUint32			maxComputeWorkGroupSize[3];
+	deUint32			subPixelPrecisionBits;
+	deUint32			subTexelPrecisionBits;
+	deUint32			mipmapPrecisionBits;
+	deUint32			maxDrawIndexedIndexValue;
+	deUint32			maxDrawIndirectCount;
+	float				maxSamplerLodBias;
+	float				maxSamplerAnisotropy;
+	deUint32			maxViewports;
+	deUint32			maxViewportDimensions[2];
+	float				viewportBoundsRange[2];
+	deUint32			viewportSubPixelBits;
+	deUintptr			minMemoryMapAlignment;
+	VkDeviceSize		minTexelBufferOffsetAlignment;
+	VkDeviceSize		minUniformBufferOffsetAlignment;
+	VkDeviceSize		minStorageBufferOffsetAlignment;
+	deInt32				minTexelOffset;
+	deUint32			maxTexelOffset;
+	deInt32				minTexelGatherOffset;
+	deUint32			maxTexelGatherOffset;
+	float				minInterpolationOffset;
+	float				maxInterpolationOffset;
+	deUint32			subPixelInterpolationOffsetBits;
+	deUint32			maxFramebufferWidth;
+	deUint32			maxFramebufferHeight;
+	deUint32			maxFramebufferLayers;
+	VkSampleCountFlags	framebufferColorSampleCounts;
+	VkSampleCountFlags	framebufferDepthSampleCounts;
+	VkSampleCountFlags	framebufferStencilSampleCounts;
+	VkSampleCountFlags	framebufferNoAttachmentsSampleCounts;
+	deUint32			maxColorAttachments;
+	VkSampleCountFlags	sampledImageColorSampleCounts;
+	VkSampleCountFlags	sampledImageIntegerSampleCounts;
+	VkSampleCountFlags	sampledImageDepthSampleCounts;
+	VkSampleCountFlags	sampledImageStencilSampleCounts;
+	VkSampleCountFlags	storageImageSampleCounts;
+	deUint32			maxSampleMaskWords;
+	VkBool32			timestampComputeAndGraphics;
+	float				timestampPeriod;
+	deUint32			maxClipDistances;
+	deUint32			maxCullDistances;
+	deUint32			maxCombinedClipAndCullDistances;
+	deUint32			discreteQueuePriorities;
+	float				pointSizeRange[2];
+	float				lineWidthRange[2];
+	float				pointSizeGranularity;
+	float				lineWidthGranularity;
+	VkBool32			strictLines;
+	VkBool32			standardSampleLocations;
+	VkDeviceSize		optimalBufferCopyOffsetAlignment;
+	VkDeviceSize		optimalBufferCopyRowPitchAlignment;
+	VkDeviceSize		nonCoherentAtomSize;
+};
+
+struct VkPhysicalDeviceSparseProperties
+{
+	VkBool32	residencyStandard2DBlockShape;
+	VkBool32	residencyStandard2DMultisampleBlockShape;
+	VkBool32	residencyStandard3DBlockShape;
+	VkBool32	residencyAlignedMipSize;
+	VkBool32	residencyNonResidentStrict;
+};
+
+struct VkPhysicalDeviceProperties
+{
+	deUint32							apiVersion;
+	deUint32							driverVersion;
+	deUint32							vendorID;
+	deUint32							deviceID;
+	VkPhysicalDeviceType				deviceType;
+	char								deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+	deUint8								pipelineCacheUUID[VK_UUID_SIZE];
+	VkPhysicalDeviceLimits				limits;
+	VkPhysicalDeviceSparseProperties	sparseProperties;
+};
+
+struct VkQueueFamilyProperties
+{
+	VkQueueFlags	queueFlags;
+	deUint32		queueCount;
+	deUint32		timestampValidBits;
+	VkExtent3D		minImageTransferGranularity;
+};
+
+struct VkMemoryType
+{
+	VkMemoryPropertyFlags	propertyFlags;
+	deUint32				heapIndex;
+};
+
+struct VkMemoryHeap
+{
+	VkDeviceSize		size;
+	VkMemoryHeapFlags	flags;
+};
+
+struct VkPhysicalDeviceMemoryProperties
+{
+	deUint32		memoryTypeCount;
+	VkMemoryType	memoryTypes[VK_MAX_MEMORY_TYPES];
+	deUint32		memoryHeapCount;
+	VkMemoryHeap	memoryHeaps[VK_MAX_MEMORY_HEAPS];
+};
+
+struct VkDeviceQueueCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkDeviceQueueCreateFlags	flags;
+	deUint32					queueFamilyIndex;
+	deUint32					queueCount;
+	const float*				pQueuePriorities;
+};
+
+struct VkDeviceCreateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkDeviceCreateFlags				flags;
+	deUint32						queueCreateInfoCount;
+	const VkDeviceQueueCreateInfo*	pQueueCreateInfos;
+	deUint32						enabledLayerCount;
+	const char* const*				ppEnabledLayerNames;
+	deUint32						enabledExtensionCount;
+	const char* const*				ppEnabledExtensionNames;
+	const VkPhysicalDeviceFeatures*	pEnabledFeatures;
+};
+
+struct VkExtensionProperties
+{
+	char		extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+	deUint32	specVersion;
+};
+
+struct VkLayerProperties
+{
+	char		layerName[VK_MAX_EXTENSION_NAME_SIZE];
+	deUint32	specVersion;
+	deUint32	implementationVersion;
+	char		description[VK_MAX_DESCRIPTION_SIZE];
+};
+
+struct VkSubmitInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	deUint32					waitSemaphoreCount;
+	const VkSemaphore*			pWaitSemaphores;
+	const VkPipelineStageFlags*	pWaitDstStageMask;
+	deUint32					commandBufferCount;
+	const VkCommandBuffer*		pCommandBuffers;
+	deUint32					signalSemaphoreCount;
+	const VkSemaphore*			pSignalSemaphores;
+};
+
+struct VkMemoryAllocateInfo
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	VkDeviceSize	allocationSize;
+	deUint32		memoryTypeIndex;
+};
+
+struct VkMappedMemoryRange
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	VkDeviceMemory	memory;
+	VkDeviceSize	offset;
+	VkDeviceSize	size;
+};
+
+struct VkMemoryRequirements
+{
+	VkDeviceSize	size;
+	VkDeviceSize	alignment;
+	deUint32		memoryTypeBits;
+};
+
+struct VkSparseImageFormatProperties
+{
+	VkImageAspectFlags			aspectMask;
+	VkExtent3D					imageGranularity;
+	VkSparseImageFormatFlags	flags;
+};
+
+struct VkSparseImageMemoryRequirements
+{
+	VkSparseImageFormatProperties	formatProperties;
+	deUint32						imageMipTailFirstLod;
+	VkDeviceSize					imageMipTailSize;
+	VkDeviceSize					imageMipTailOffset;
+	VkDeviceSize					imageMipTailStride;
+};
+
+struct VkSparseMemoryBind
+{
+	VkDeviceSize			resourceOffset;
+	VkDeviceSize			size;
+	VkDeviceMemory			memory;
+	VkDeviceSize			memoryOffset;
+	VkSparseMemoryBindFlags	flags;
+};
+
+struct VkSparseBufferMemoryBindInfo
+{
+	VkBuffer					buffer;
+	deUint32					bindCount;
+	const VkSparseMemoryBind*	pBinds;
+};
+
+struct VkSparseImageOpaqueMemoryBindInfo
+{
+	VkImage						image;
+	deUint32					bindCount;
+	const VkSparseMemoryBind*	pBinds;
+};
+
+struct VkImageSubresource
+{
+	VkImageAspectFlags	aspectMask;
+	deUint32			mipLevel;
+	deUint32			arrayLayer;
+};
+
+struct VkOffset3D
+{
+	deInt32	x;
+	deInt32	y;
+	deInt32	z;
+};
+
+struct VkSparseImageMemoryBind
+{
+	VkImageSubresource		subresource;
+	VkOffset3D				offset;
+	VkExtent3D				extent;
+	VkDeviceMemory			memory;
+	VkDeviceSize			memoryOffset;
+	VkSparseMemoryBindFlags	flags;
+};
+
+struct VkSparseImageMemoryBindInfo
+{
+	VkImage							image;
+	deUint32						bindCount;
+	const VkSparseImageMemoryBind*	pBinds;
+};
+
+struct VkBindSparseInfo
+{
+	VkStructureType								sType;
+	const void*									pNext;
+	deUint32									waitSemaphoreCount;
+	const VkSemaphore*							pWaitSemaphores;
+	deUint32									bufferBindCount;
+	const VkSparseBufferMemoryBindInfo*			pBufferBinds;
+	deUint32									imageOpaqueBindCount;
+	const VkSparseImageOpaqueMemoryBindInfo*	pImageOpaqueBinds;
+	deUint32									imageBindCount;
+	const VkSparseImageMemoryBindInfo*			pImageBinds;
+	deUint32									signalSemaphoreCount;
+	const VkSemaphore*							pSignalSemaphores;
+};
+
+struct VkFenceCreateInfo
+{
+	VkStructureType		sType;
+	const void*			pNext;
+	VkFenceCreateFlags	flags;
+};
+
+struct VkSemaphoreCreateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkSemaphoreCreateFlags	flags;
+};
+
+struct VkEventCreateInfo
+{
+	VkStructureType		sType;
+	const void*			pNext;
+	VkEventCreateFlags	flags;
+};
+
+struct VkQueryPoolCreateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkQueryPoolCreateFlags			flags;
+	VkQueryType						queryType;
+	deUint32						queryCount;
+	VkQueryPipelineStatisticFlags	pipelineStatistics;
+};
+
+struct VkBufferCreateInfo
+{
+	VkStructureType		sType;
+	const void*			pNext;
+	VkBufferCreateFlags	flags;
+	VkDeviceSize		size;
+	VkBufferUsageFlags	usage;
+	VkSharingMode		sharingMode;
+	deUint32			queueFamilyIndexCount;
+	const deUint32*		pQueueFamilyIndices;
+};
+
+struct VkBufferViewCreateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkBufferViewCreateFlags	flags;
+	VkBuffer				buffer;
+	VkFormat				format;
+	VkDeviceSize			offset;
+	VkDeviceSize			range;
+};
+
+struct VkImageCreateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkImageCreateFlags		flags;
+	VkImageType				imageType;
+	VkFormat				format;
+	VkExtent3D				extent;
+	deUint32				mipLevels;
+	deUint32				arrayLayers;
+	VkSampleCountFlagBits	samples;
+	VkImageTiling			tiling;
+	VkImageUsageFlags		usage;
+	VkSharingMode			sharingMode;
+	deUint32				queueFamilyIndexCount;
+	const deUint32*			pQueueFamilyIndices;
+	VkImageLayout			initialLayout;
+};
+
+struct VkSubresourceLayout
+{
+	VkDeviceSize	offset;
+	VkDeviceSize	size;
+	VkDeviceSize	rowPitch;
+	VkDeviceSize	arrayPitch;
+	VkDeviceSize	depthPitch;
+};
+
+struct VkComponentMapping
+{
+	VkComponentSwizzle	r;
+	VkComponentSwizzle	g;
+	VkComponentSwizzle	b;
+	VkComponentSwizzle	a;
+};
+
+struct VkImageSubresourceRange
+{
+	VkImageAspectFlags	aspectMask;
+	deUint32			baseMipLevel;
+	deUint32			levelCount;
+	deUint32			baseArrayLayer;
+	deUint32			layerCount;
+};
+
+struct VkImageViewCreateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkImageViewCreateFlags	flags;
+	VkImage					image;
+	VkImageViewType			viewType;
+	VkFormat				format;
+	VkComponentMapping		components;
+	VkImageSubresourceRange	subresourceRange;
+};
+
+struct VkShaderModuleCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkShaderModuleCreateFlags	flags;
+	deUintptr					codeSize;
+	const deUint32*				pCode;
+};
+
+struct VkPipelineCacheCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkPipelineCacheCreateFlags	flags;
+	deUintptr					initialDataSize;
+	const void*					pInitialData;
+};
+
+struct VkSpecializationMapEntry
+{
+	deUint32	constantID;
+	deUint32	offset;
+	deUintptr	size;
+};
+
+struct VkSpecializationInfo
+{
+	deUint32						mapEntryCount;
+	const VkSpecializationMapEntry*	pMapEntries;
+	deUintptr						dataSize;
+	const void*						pData;
+};
+
+struct VkPipelineShaderStageCreateInfo
+{
+	VkStructureType						sType;
+	const void*							pNext;
+	VkPipelineShaderStageCreateFlags	flags;
+	VkShaderStageFlagBits				stage;
+	VkShaderModule						module;
+	const char*							pName;
+	const VkSpecializationInfo*			pSpecializationInfo;
+};
+
+struct VkVertexInputBindingDescription
+{
+	deUint32			binding;
+	deUint32			stride;
+	VkVertexInputRate	inputRate;
+};
+
+struct VkVertexInputAttributeDescription
+{
+	deUint32	location;
+	deUint32	binding;
+	VkFormat	format;
+	deUint32	offset;
+};
+
+struct VkPipelineVertexInputStateCreateInfo
+{
+	VkStructureType								sType;
+	const void*									pNext;
+	VkPipelineVertexInputStateCreateFlags		flags;
+	deUint32									vertexBindingDescriptionCount;
+	const VkVertexInputBindingDescription*		pVertexBindingDescriptions;
+	deUint32									vertexAttributeDescriptionCount;
+	const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+};
+
+struct VkPipelineInputAssemblyStateCreateInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkPipelineInputAssemblyStateCreateFlags	flags;
+	VkPrimitiveTopology						topology;
+	VkBool32								primitiveRestartEnable;
+};
+
+struct VkPipelineTessellationStateCreateInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkPipelineTessellationStateCreateFlags	flags;
+	deUint32								patchControlPoints;
+};
+
+struct VkViewport
+{
+	float	x;
+	float	y;
+	float	width;
+	float	height;
+	float	minDepth;
+	float	maxDepth;
+};
+
+struct VkOffset2D
+{
+	deInt32	x;
+	deInt32	y;
+};
+
+struct VkExtent2D
+{
+	deUint32	width;
+	deUint32	height;
+};
+
+struct VkRect2D
+{
+	VkOffset2D	offset;
+	VkExtent2D	extent;
+};
+
+struct VkPipelineViewportStateCreateInfo
+{
+	VkStructureType						sType;
+	const void*							pNext;
+	VkPipelineViewportStateCreateFlags	flags;
+	deUint32							viewportCount;
+	const VkViewport*					pViewports;
+	deUint32							scissorCount;
+	const VkRect2D*						pScissors;
+};
+
+struct VkPipelineRasterizationStateCreateInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkPipelineRasterizationStateCreateFlags	flags;
+	VkBool32								depthClampEnable;
+	VkBool32								rasterizerDiscardEnable;
+	VkPolygonMode							polygonMode;
+	VkCullModeFlags							cullMode;
+	VkFrontFace								frontFace;
+	VkBool32								depthBiasEnable;
+	float									depthBiasConstantFactor;
+	float									depthBiasClamp;
+	float									depthBiasSlopeFactor;
+	float									lineWidth;
+};
+
+struct VkPipelineMultisampleStateCreateInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkPipelineMultisampleStateCreateFlags	flags;
+	VkSampleCountFlagBits					rasterizationSamples;
+	VkBool32								sampleShadingEnable;
+	float									minSampleShading;
+	const VkSampleMask*						pSampleMask;
+	VkBool32								alphaToCoverageEnable;
+	VkBool32								alphaToOneEnable;
+};
+
+struct VkStencilOpState
+{
+	VkStencilOp	failOp;
+	VkStencilOp	passOp;
+	VkStencilOp	depthFailOp;
+	VkCompareOp	compareOp;
+	deUint32	compareMask;
+	deUint32	writeMask;
+	deUint32	reference;
+};
+
+struct VkPipelineDepthStencilStateCreateInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkPipelineDepthStencilStateCreateFlags	flags;
+	VkBool32								depthTestEnable;
+	VkBool32								depthWriteEnable;
+	VkCompareOp								depthCompareOp;
+	VkBool32								depthBoundsTestEnable;
+	VkBool32								stencilTestEnable;
+	VkStencilOpState						front;
+	VkStencilOpState						back;
+	float									minDepthBounds;
+	float									maxDepthBounds;
+};
+
+struct VkPipelineColorBlendAttachmentState
+{
+	VkBool32				blendEnable;
+	VkBlendFactor			srcColorBlendFactor;
+	VkBlendFactor			dstColorBlendFactor;
+	VkBlendOp				colorBlendOp;
+	VkBlendFactor			srcAlphaBlendFactor;
+	VkBlendFactor			dstAlphaBlendFactor;
+	VkBlendOp				alphaBlendOp;
+	VkColorComponentFlags	colorWriteMask;
+};
+
+struct VkPipelineColorBlendStateCreateInfo
+{
+	VkStructureType								sType;
+	const void*									pNext;
+	VkPipelineColorBlendStateCreateFlags		flags;
+	VkBool32									logicOpEnable;
+	VkLogicOp									logicOp;
+	deUint32									attachmentCount;
+	const VkPipelineColorBlendAttachmentState*	pAttachments;
+	float										blendConstants[4];
+};
+
+struct VkPipelineDynamicStateCreateInfo
+{
+	VkStructureType						sType;
+	const void*							pNext;
+	VkPipelineDynamicStateCreateFlags	flags;
+	deUint32							dynamicStateCount;
+	const VkDynamicState*				pDynamicStates;
+};
+
+struct VkGraphicsPipelineCreateInfo
+{
+	VkStructureType									sType;
+	const void*										pNext;
+	VkPipelineCreateFlags							flags;
+	deUint32										stageCount;
+	const VkPipelineShaderStageCreateInfo*			pStages;
+	const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+	const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+	const VkPipelineTessellationStateCreateInfo*	pTessellationState;
+	const VkPipelineViewportStateCreateInfo*		pViewportState;
+	const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+	const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+	const VkPipelineDepthStencilStateCreateInfo*	pDepthStencilState;
+	const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+	const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+	VkPipelineLayout								layout;
+	VkRenderPass									renderPass;
+	deUint32										subpass;
+	VkPipeline										basePipelineHandle;
+	deInt32											basePipelineIndex;
+};
+
+struct VkComputePipelineCreateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkPipelineCreateFlags			flags;
+	VkPipelineShaderStageCreateInfo	stage;
+	VkPipelineLayout				layout;
+	VkPipeline						basePipelineHandle;
+	deInt32							basePipelineIndex;
+};
+
+struct VkPushConstantRange
+{
+	VkShaderStageFlags	stageFlags;
+	deUint32			offset;
+	deUint32			size;
+};
+
+struct VkPipelineLayoutCreateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkPipelineLayoutCreateFlags		flags;
+	deUint32						setLayoutCount;
+	const VkDescriptorSetLayout*	pSetLayouts;
+	deUint32						pushConstantRangeCount;
+	const VkPushConstantRange*		pPushConstantRanges;
+};
+
+struct VkSamplerCreateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkSamplerCreateFlags	flags;
+	VkFilter				magFilter;
+	VkFilter				minFilter;
+	VkSamplerMipmapMode		mipmapMode;
+	VkSamplerAddressMode	addressModeU;
+	VkSamplerAddressMode	addressModeV;
+	VkSamplerAddressMode	addressModeW;
+	float					mipLodBias;
+	VkBool32				anisotropyEnable;
+	float					maxAnisotropy;
+	VkBool32				compareEnable;
+	VkCompareOp				compareOp;
+	float					minLod;
+	float					maxLod;
+	VkBorderColor			borderColor;
+	VkBool32				unnormalizedCoordinates;
+};
+
+struct VkDescriptorSetLayoutBinding
+{
+	deUint32			binding;
+	VkDescriptorType	descriptorType;
+	deUint32			descriptorCount;
+	VkShaderStageFlags	stageFlags;
+	const VkSampler*	pImmutableSamplers;
+};
+
+struct VkDescriptorSetLayoutCreateInfo
+{
+	VkStructureType						sType;
+	const void*							pNext;
+	VkDescriptorSetLayoutCreateFlags	flags;
+	deUint32							bindingCount;
+	const VkDescriptorSetLayoutBinding*	pBindings;
+};
+
+struct VkDescriptorPoolSize
+{
+	VkDescriptorType	type;
+	deUint32			descriptorCount;
+};
+
+struct VkDescriptorPoolCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkDescriptorPoolCreateFlags	flags;
+	deUint32					maxSets;
+	deUint32					poolSizeCount;
+	const VkDescriptorPoolSize*	pPoolSizes;
+};
+
+struct VkDescriptorSetAllocateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkDescriptorPool				descriptorPool;
+	deUint32						descriptorSetCount;
+	const VkDescriptorSetLayout*	pSetLayouts;
+};
+
+struct VkDescriptorImageInfo
+{
+	VkSampler		sampler;
+	VkImageView		imageView;
+	VkImageLayout	imageLayout;
+};
+
+struct VkDescriptorBufferInfo
+{
+	VkBuffer		buffer;
+	VkDeviceSize	offset;
+	VkDeviceSize	range;
+};
+
+struct VkWriteDescriptorSet
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkDescriptorSet					dstSet;
+	deUint32						dstBinding;
+	deUint32						dstArrayElement;
+	deUint32						descriptorCount;
+	VkDescriptorType				descriptorType;
+	const VkDescriptorImageInfo*	pImageInfo;
+	const VkDescriptorBufferInfo*	pBufferInfo;
+	const VkBufferView*				pTexelBufferView;
+};
+
+struct VkCopyDescriptorSet
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	VkDescriptorSet	srcSet;
+	deUint32		srcBinding;
+	deUint32		srcArrayElement;
+	VkDescriptorSet	dstSet;
+	deUint32		dstBinding;
+	deUint32		dstArrayElement;
+	deUint32		descriptorCount;
+};
+
+struct VkFramebufferCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkFramebufferCreateFlags	flags;
+	VkRenderPass				renderPass;
+	deUint32					attachmentCount;
+	const VkImageView*			pAttachments;
+	deUint32					width;
+	deUint32					height;
+	deUint32					layers;
+};
+
+struct VkAttachmentDescription
+{
+	VkAttachmentDescriptionFlags	flags;
+	VkFormat						format;
+	VkSampleCountFlagBits			samples;
+	VkAttachmentLoadOp				loadOp;
+	VkAttachmentStoreOp				storeOp;
+	VkAttachmentLoadOp				stencilLoadOp;
+	VkAttachmentStoreOp				stencilStoreOp;
+	VkImageLayout					initialLayout;
+	VkImageLayout					finalLayout;
+};
+
+struct VkAttachmentReference
+{
+	deUint32		attachment;
+	VkImageLayout	layout;
+};
+
+struct VkSubpassDescription
+{
+	VkSubpassDescriptionFlags		flags;
+	VkPipelineBindPoint				pipelineBindPoint;
+	deUint32						inputAttachmentCount;
+	const VkAttachmentReference*	pInputAttachments;
+	deUint32						colorAttachmentCount;
+	const VkAttachmentReference*	pColorAttachments;
+	const VkAttachmentReference*	pResolveAttachments;
+	const VkAttachmentReference*	pDepthStencilAttachment;
+	deUint32						preserveAttachmentCount;
+	const deUint32*					pPreserveAttachments;
+};
+
+struct VkSubpassDependency
+{
+	deUint32				srcSubpass;
+	deUint32				dstSubpass;
+	VkPipelineStageFlags	srcStageMask;
+	VkPipelineStageFlags	dstStageMask;
+	VkAccessFlags			srcAccessMask;
+	VkAccessFlags			dstAccessMask;
+	VkDependencyFlags		dependencyFlags;
+};
+
+struct VkRenderPassCreateInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkRenderPassCreateFlags			flags;
+	deUint32						attachmentCount;
+	const VkAttachmentDescription*	pAttachments;
+	deUint32						subpassCount;
+	const VkSubpassDescription*		pSubpasses;
+	deUint32						dependencyCount;
+	const VkSubpassDependency*		pDependencies;
+};
+
+struct VkCommandPoolCreateInfo
+{
+	VkStructureType				sType;
+	const void*					pNext;
+	VkCommandPoolCreateFlags	flags;
+	deUint32					queueFamilyIndex;
+};
+
+struct VkCommandBufferAllocateInfo
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkCommandPool			commandPool;
+	VkCommandBufferLevel	level;
+	deUint32				commandBufferCount;
+};
+
+struct VkCommandBufferInheritanceInfo
+{
+	VkStructureType					sType;
+	const void*						pNext;
+	VkRenderPass					renderPass;
+	deUint32						subpass;
+	VkFramebuffer					framebuffer;
+	VkBool32						occlusionQueryEnable;
+	VkQueryControlFlags				queryFlags;
+	VkQueryPipelineStatisticFlags	pipelineStatistics;
+};
+
+struct VkCommandBufferBeginInfo
+{
+	VkStructureType							sType;
+	const void*								pNext;
+	VkCommandBufferUsageFlags				flags;
+	const VkCommandBufferInheritanceInfo*	pInheritanceInfo;
+};
+
+struct VkBufferCopy
+{
+	VkDeviceSize	srcOffset;
+	VkDeviceSize	dstOffset;
+	VkDeviceSize	size;
+};
+
+struct VkImageSubresourceLayers
+{
+	VkImageAspectFlags	aspectMask;
+	deUint32			mipLevel;
+	deUint32			baseArrayLayer;
+	deUint32			layerCount;
+};
+
+struct VkImageCopy
+{
+	VkImageSubresourceLayers	srcSubresource;
+	VkOffset3D					srcOffset;
+	VkImageSubresourceLayers	dstSubresource;
+	VkOffset3D					dstOffset;
+	VkExtent3D					extent;
+};
+
+struct VkImageBlit
+{
+	VkImageSubresourceLayers	srcSubresource;
+	VkOffset3D					srcOffsets[2];
+	VkImageSubresourceLayers	dstSubresource;
+	VkOffset3D					dstOffsets[2];
+};
+
+struct VkBufferImageCopy
+{
+	VkDeviceSize				bufferOffset;
+	deUint32					bufferRowLength;
+	deUint32					bufferImageHeight;
+	VkImageSubresourceLayers	imageSubresource;
+	VkOffset3D					imageOffset;
+	VkExtent3D					imageExtent;
+};
+
+union VkClearColorValue
+{
+	float		float32[4];
+	deInt32		int32[4];
+	deUint32	uint32[4];
+};
+
+struct VkClearDepthStencilValue
+{
+	float		depth;
+	deUint32	stencil;
+};
+
+union VkClearValue
+{
+	VkClearColorValue			color;
+	VkClearDepthStencilValue	depthStencil;
+};
+
+struct VkClearAttachment
+{
+	VkImageAspectFlags	aspectMask;
+	deUint32			colorAttachment;
+	VkClearValue		clearValue;
+};
+
+struct VkClearRect
+{
+	VkRect2D	rect;
+	deUint32	baseArrayLayer;
+	deUint32	layerCount;
+};
+
+struct VkImageResolve
+{
+	VkImageSubresourceLayers	srcSubresource;
+	VkOffset3D					srcOffset;
+	VkImageSubresourceLayers	dstSubresource;
+	VkOffset3D					dstOffset;
+	VkExtent3D					extent;
+};
+
+struct VkMemoryBarrier
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	VkAccessFlags	srcAccessMask;
+	VkAccessFlags	dstAccessMask;
+};
+
+struct VkBufferMemoryBarrier
+{
+	VkStructureType	sType;
+	const void*		pNext;
+	VkAccessFlags	srcAccessMask;
+	VkAccessFlags	dstAccessMask;
+	deUint32		srcQueueFamilyIndex;
+	deUint32		dstQueueFamilyIndex;
+	VkBuffer		buffer;
+	VkDeviceSize	offset;
+	VkDeviceSize	size;
+};
+
+struct VkImageMemoryBarrier
+{
+	VkStructureType			sType;
+	const void*				pNext;
+	VkAccessFlags			srcAccessMask;
+	VkAccessFlags			dstAccessMask;
+	VkImageLayout			oldLayout;
+	VkImageLayout			newLayout;
+	deUint32				srcQueueFamilyIndex;
+	deUint32				dstQueueFamilyIndex;
+	VkImage					image;
+	VkImageSubresourceRange	subresourceRange;
+};
+
+struct VkRenderPassBeginInfo
+{
+	VkStructureType		sType;
+	const void*			pNext;
+	VkRenderPass		renderPass;
+	VkFramebuffer		framebuffer;
+	VkRect2D			renderArea;
+	deUint32			clearValueCount;
+	const VkClearValue*	pClearValues;
+};
+
+struct VkDispatchIndirectCommand
+{
+	deUint32	x;
+	deUint32	y;
+	deUint32	z;
+};
+
+struct VkDrawIndexedIndirectCommand
+{
+	deUint32	indexCount;
+	deUint32	instanceCount;
+	deUint32	firstIndex;
+	deInt32		vertexOffset;
+	deUint32	firstInstance;
+};
+
+struct VkDrawIndirectCommand
+{
+	deUint32	vertexCount;
+	deUint32	instanceCount;
+	deUint32	firstVertex;
+	deUint32	firstInstance;
+};
+
diff --git a/external/vulkancts/framework/vulkan/vkTypeUtil.cpp b/external/vulkancts/framework/vulkan/vkTypeUtil.cpp
new file mode 100644
index 0000000..a27f7f1
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkTypeUtil.cpp
@@ -0,0 +1,37 @@
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for creating commonly used composite types.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkTypeUtil.hpp"
+
+DE_EMPTY_CPP_FILE
diff --git a/external/vulkancts/framework/vulkan/vkTypeUtil.hpp b/external/vulkancts/framework/vulkan/vkTypeUtil.hpp
new file mode 100644
index 0000000..f63f8c2
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkTypeUtil.hpp
@@ -0,0 +1,90 @@
+#ifndef _VKTYPEUTIL_HPP
+#define _VKTYPEUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan CTS Framework
+ * --------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for creating commonly used composite types.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuVector.hpp"
+
+namespace vk
+{
+
+#include "vkTypeUtil.inl"
+
+inline VkClearValue makeClearValueColorF32 (float r, float g, float b, float a)
+{
+	VkClearValue v;
+	v.color.float32[0] = r;
+	v.color.float32[1] = g;
+	v.color.float32[2] = b;
+	v.color.float32[3] = a;
+	return v;
+}
+
+inline VkClearValue makeClearValueColor (const tcu::Vec4& color)
+{
+	VkClearValue v;
+	v.color.float32[0] = color[0];
+	v.color.float32[1] = color[1];
+	v.color.float32[2] = color[2];
+	v.color.float32[3] = color[3];
+	return v;
+}
+
+inline VkClearValue makeClearValueDepthStencil (float depth, deUint32 stencil)
+{
+	VkClearValue v;
+	v.depthStencil.depth	= depth;
+	v.depthStencil.stencil	= stencil;
+	return v;
+}
+
+inline VkClearValue makeClearValue (VkClearColorValue color)
+{
+	VkClearValue v;
+	v.color = color;
+	return v;
+}
+
+inline VkComponentMapping makeComponentMappingRGBA (void)
+{
+	return makeComponentMapping(VK_COMPONENT_SWIZZLE_R,
+								VK_COMPONENT_SWIZZLE_G,
+								VK_COMPONENT_SWIZZLE_B,
+								VK_COMPONENT_SWIZZLE_A);
+}
+
+} // vk
+
+#endif // _VKTYPEUTIL_HPP
diff --git a/external/vulkancts/framework/vulkan/vkTypeUtil.inl b/external/vulkancts/framework/vulkan/vkTypeUtil.inl
new file mode 100644
index 0000000..52f65d6
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkTypeUtil.inl
@@ -0,0 +1,369 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+
+inline VkAllocationCallbacks makeAllocationCallbacks (void* pUserData, PFN_vkAllocationFunction pfnAllocation, PFN_vkReallocationFunction pfnReallocation, PFN_vkFreeFunction pfnFree, PFN_vkInternalAllocationNotification pfnInternalAllocation, PFN_vkInternalFreeNotification pfnInternalFree)
+{
+	VkAllocationCallbacks res;
+	res.pUserData				= pUserData;
+	res.pfnAllocation			= pfnAllocation;
+	res.pfnReallocation			= pfnReallocation;
+	res.pfnFree					= pfnFree;
+	res.pfnInternalAllocation	= pfnInternalAllocation;
+	res.pfnInternalFree			= pfnInternalFree;
+	return res;
+}
+
+inline VkExtent3D makeExtent3D (deUint32 width, deUint32 height, deUint32 depth)
+{
+	VkExtent3D res;
+	res.width	= width;
+	res.height	= height;
+	res.depth	= depth;
+	return res;
+}
+
+inline VkMemoryRequirements makeMemoryRequirements (VkDeviceSize size, VkDeviceSize alignment, deUint32 memoryTypeBits)
+{
+	VkMemoryRequirements res;
+	res.size			= size;
+	res.alignment		= alignment;
+	res.memoryTypeBits	= memoryTypeBits;
+	return res;
+}
+
+inline VkSparseMemoryBind makeSparseMemoryBind (VkDeviceSize resourceOffset, VkDeviceSize size, VkDeviceMemory memory, VkDeviceSize memoryOffset, VkSparseMemoryBindFlags flags)
+{
+	VkSparseMemoryBind res;
+	res.resourceOffset	= resourceOffset;
+	res.size			= size;
+	res.memory			= memory;
+	res.memoryOffset	= memoryOffset;
+	res.flags			= flags;
+	return res;
+}
+
+inline VkSparseBufferMemoryBindInfo makeSparseBufferMemoryBindInfo (VkBuffer buffer, deUint32 bindCount, const VkSparseMemoryBind* pBinds)
+{
+	VkSparseBufferMemoryBindInfo res;
+	res.buffer		= buffer;
+	res.bindCount	= bindCount;
+	res.pBinds		= pBinds;
+	return res;
+}
+
+inline VkSparseImageOpaqueMemoryBindInfo makeSparseImageOpaqueMemoryBindInfo (VkImage image, deUint32 bindCount, const VkSparseMemoryBind* pBinds)
+{
+	VkSparseImageOpaqueMemoryBindInfo res;
+	res.image		= image;
+	res.bindCount	= bindCount;
+	res.pBinds		= pBinds;
+	return res;
+}
+
+inline VkImageSubresource makeImageSubresource (VkImageAspectFlags aspectMask, deUint32 mipLevel, deUint32 arrayLayer)
+{
+	VkImageSubresource res;
+	res.aspectMask	= aspectMask;
+	res.mipLevel	= mipLevel;
+	res.arrayLayer	= arrayLayer;
+	return res;
+}
+
+inline VkOffset3D makeOffset3D (deInt32 x, deInt32 y, deInt32 z)
+{
+	VkOffset3D res;
+	res.x	= x;
+	res.y	= y;
+	res.z	= z;
+	return res;
+}
+
+inline VkSparseImageMemoryBindInfo makeSparseImageMemoryBindInfo (VkImage image, deUint32 bindCount, const VkSparseImageMemoryBind* pBinds)
+{
+	VkSparseImageMemoryBindInfo res;
+	res.image		= image;
+	res.bindCount	= bindCount;
+	res.pBinds		= pBinds;
+	return res;
+}
+
+inline VkSubresourceLayout makeSubresourceLayout (VkDeviceSize offset, VkDeviceSize size, VkDeviceSize rowPitch, VkDeviceSize arrayPitch, VkDeviceSize depthPitch)
+{
+	VkSubresourceLayout res;
+	res.offset		= offset;
+	res.size		= size;
+	res.rowPitch	= rowPitch;
+	res.arrayPitch	= arrayPitch;
+	res.depthPitch	= depthPitch;
+	return res;
+}
+
+inline VkComponentMapping makeComponentMapping (VkComponentSwizzle r, VkComponentSwizzle g, VkComponentSwizzle b, VkComponentSwizzle a)
+{
+	VkComponentMapping res;
+	res.r	= r;
+	res.g	= g;
+	res.b	= b;
+	res.a	= a;
+	return res;
+}
+
+inline VkImageSubresourceRange makeImageSubresourceRange (VkImageAspectFlags aspectMask, deUint32 baseMipLevel, deUint32 levelCount, deUint32 baseArrayLayer, deUint32 layerCount)
+{
+	VkImageSubresourceRange res;
+	res.aspectMask		= aspectMask;
+	res.baseMipLevel	= baseMipLevel;
+	res.levelCount		= levelCount;
+	res.baseArrayLayer	= baseArrayLayer;
+	res.layerCount		= layerCount;
+	return res;
+}
+
+inline VkSpecializationMapEntry makeSpecializationMapEntry (deUint32 constantID, deUint32 offset, deUintptr size)
+{
+	VkSpecializationMapEntry res;
+	res.constantID	= constantID;
+	res.offset		= offset;
+	res.size		= size;
+	return res;
+}
+
+inline VkSpecializationInfo makeSpecializationInfo (deUint32 mapEntryCount, const VkSpecializationMapEntry* pMapEntries, deUintptr dataSize, const void* pData)
+{
+	VkSpecializationInfo res;
+	res.mapEntryCount	= mapEntryCount;
+	res.pMapEntries		= pMapEntries;
+	res.dataSize		= dataSize;
+	res.pData			= pData;
+	return res;
+}
+
+inline VkVertexInputBindingDescription makeVertexInputBindingDescription (deUint32 binding, deUint32 stride, VkVertexInputRate inputRate)
+{
+	VkVertexInputBindingDescription res;
+	res.binding		= binding;
+	res.stride		= stride;
+	res.inputRate	= inputRate;
+	return res;
+}
+
+inline VkVertexInputAttributeDescription makeVertexInputAttributeDescription (deUint32 location, deUint32 binding, VkFormat format, deUint32 offset)
+{
+	VkVertexInputAttributeDescription res;
+	res.location	= location;
+	res.binding		= binding;
+	res.format		= format;
+	res.offset		= offset;
+	return res;
+}
+
+inline VkViewport makeViewport (float x, float y, float width, float height, float minDepth, float maxDepth)
+{
+	VkViewport res;
+	res.x			= x;
+	res.y			= y;
+	res.width		= width;
+	res.height		= height;
+	res.minDepth	= minDepth;
+	res.maxDepth	= maxDepth;
+	return res;
+}
+
+inline VkOffset2D makeOffset2D (deInt32 x, deInt32 y)
+{
+	VkOffset2D res;
+	res.x	= x;
+	res.y	= y;
+	return res;
+}
+
+inline VkExtent2D makeExtent2D (deUint32 width, deUint32 height)
+{
+	VkExtent2D res;
+	res.width	= width;
+	res.height	= height;
+	return res;
+}
+
+inline VkStencilOpState makeStencilOpState (VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp, deUint32 compareMask, deUint32 writeMask, deUint32 reference)
+{
+	VkStencilOpState res;
+	res.failOp		= failOp;
+	res.passOp		= passOp;
+	res.depthFailOp	= depthFailOp;
+	res.compareOp	= compareOp;
+	res.compareMask	= compareMask;
+	res.writeMask	= writeMask;
+	res.reference	= reference;
+	return res;
+}
+
+inline VkPipelineColorBlendAttachmentState makePipelineColorBlendAttachmentState (VkBool32 blendEnable, VkBlendFactor srcColorBlendFactor, VkBlendFactor dstColorBlendFactor, VkBlendOp colorBlendOp, VkBlendFactor srcAlphaBlendFactor, VkBlendFactor dstAlphaBlendFactor, VkBlendOp alphaBlendOp, VkColorComponentFlags colorWriteMask)
+{
+	VkPipelineColorBlendAttachmentState res;
+	res.blendEnable			= blendEnable;
+	res.srcColorBlendFactor	= srcColorBlendFactor;
+	res.dstColorBlendFactor	= dstColorBlendFactor;
+	res.colorBlendOp		= colorBlendOp;
+	res.srcAlphaBlendFactor	= srcAlphaBlendFactor;
+	res.dstAlphaBlendFactor	= dstAlphaBlendFactor;
+	res.alphaBlendOp		= alphaBlendOp;
+	res.colorWriteMask		= colorWriteMask;
+	return res;
+}
+
+inline VkPushConstantRange makePushConstantRange (VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size)
+{
+	VkPushConstantRange res;
+	res.stageFlags	= stageFlags;
+	res.offset		= offset;
+	res.size		= size;
+	return res;
+}
+
+inline VkDescriptorSetLayoutBinding makeDescriptorSetLayoutBinding (deUint32 binding, VkDescriptorType descriptorType, deUint32 descriptorCount, VkShaderStageFlags stageFlags, const VkSampler* pImmutableSamplers)
+{
+	VkDescriptorSetLayoutBinding res;
+	res.binding				= binding;
+	res.descriptorType		= descriptorType;
+	res.descriptorCount		= descriptorCount;
+	res.stageFlags			= stageFlags;
+	res.pImmutableSamplers	= pImmutableSamplers;
+	return res;
+}
+
+inline VkDescriptorPoolSize makeDescriptorPoolSize (VkDescriptorType type, deUint32 descriptorCount)
+{
+	VkDescriptorPoolSize res;
+	res.type			= type;
+	res.descriptorCount	= descriptorCount;
+	return res;
+}
+
+inline VkDescriptorImageInfo makeDescriptorImageInfo (VkSampler sampler, VkImageView imageView, VkImageLayout imageLayout)
+{
+	VkDescriptorImageInfo res;
+	res.sampler		= sampler;
+	res.imageView	= imageView;
+	res.imageLayout	= imageLayout;
+	return res;
+}
+
+inline VkDescriptorBufferInfo makeDescriptorBufferInfo (VkBuffer buffer, VkDeviceSize offset, VkDeviceSize range)
+{
+	VkDescriptorBufferInfo res;
+	res.buffer	= buffer;
+	res.offset	= offset;
+	res.range	= range;
+	return res;
+}
+
+inline VkAttachmentDescription makeAttachmentDescription (VkAttachmentDescriptionFlags flags, VkFormat format, VkSampleCountFlagBits samples, VkAttachmentLoadOp loadOp, VkAttachmentStoreOp storeOp, VkAttachmentLoadOp stencilLoadOp, VkAttachmentStoreOp stencilStoreOp, VkImageLayout initialLayout, VkImageLayout finalLayout)
+{
+	VkAttachmentDescription res;
+	res.flags			= flags;
+	res.format			= format;
+	res.samples			= samples;
+	res.loadOp			= loadOp;
+	res.storeOp			= storeOp;
+	res.stencilLoadOp	= stencilLoadOp;
+	res.stencilStoreOp	= stencilStoreOp;
+	res.initialLayout	= initialLayout;
+	res.finalLayout		= finalLayout;
+	return res;
+}
+
+inline VkAttachmentReference makeAttachmentReference (deUint32 attachment, VkImageLayout layout)
+{
+	VkAttachmentReference res;
+	res.attachment	= attachment;
+	res.layout		= layout;
+	return res;
+}
+
+inline VkSubpassDescription makeSubpassDescription (VkSubpassDescriptionFlags flags, VkPipelineBindPoint pipelineBindPoint, deUint32 inputAttachmentCount, const VkAttachmentReference* pInputAttachments, deUint32 colorAttachmentCount, const VkAttachmentReference* pColorAttachments, const VkAttachmentReference* pResolveAttachments, const VkAttachmentReference* pDepthStencilAttachment, deUint32 preserveAttachmentCount, const deUint32* pPreserveAttachments)
+{
+	VkSubpassDescription res;
+	res.flags					= flags;
+	res.pipelineBindPoint		= pipelineBindPoint;
+	res.inputAttachmentCount	= inputAttachmentCount;
+	res.pInputAttachments		= pInputAttachments;
+	res.colorAttachmentCount	= colorAttachmentCount;
+	res.pColorAttachments		= pColorAttachments;
+	res.pResolveAttachments		= pResolveAttachments;
+	res.pDepthStencilAttachment	= pDepthStencilAttachment;
+	res.preserveAttachmentCount	= preserveAttachmentCount;
+	res.pPreserveAttachments	= pPreserveAttachments;
+	return res;
+}
+
+inline VkSubpassDependency makeSubpassDependency (deUint32 srcSubpass, deUint32 dstSubpass, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkAccessFlags srcAccessMask, VkAccessFlags dstAccessMask, VkDependencyFlags dependencyFlags)
+{
+	VkSubpassDependency res;
+	res.srcSubpass		= srcSubpass;
+	res.dstSubpass		= dstSubpass;
+	res.srcStageMask	= srcStageMask;
+	res.dstStageMask	= dstStageMask;
+	res.srcAccessMask	= srcAccessMask;
+	res.dstAccessMask	= dstAccessMask;
+	res.dependencyFlags	= dependencyFlags;
+	return res;
+}
+
+inline VkBufferCopy makeBufferCopy (VkDeviceSize srcOffset, VkDeviceSize dstOffset, VkDeviceSize size)
+{
+	VkBufferCopy res;
+	res.srcOffset	= srcOffset;
+	res.dstOffset	= dstOffset;
+	res.size		= size;
+	return res;
+}
+
+inline VkImageSubresourceLayers makeImageSubresourceLayers (VkImageAspectFlags aspectMask, deUint32 mipLevel, deUint32 baseArrayLayer, deUint32 layerCount)
+{
+	VkImageSubresourceLayers res;
+	res.aspectMask		= aspectMask;
+	res.mipLevel		= mipLevel;
+	res.baseArrayLayer	= baseArrayLayer;
+	res.layerCount		= layerCount;
+	return res;
+}
+
+inline VkClearDepthStencilValue makeClearDepthStencilValue (float depth, deUint32 stencil)
+{
+	VkClearDepthStencilValue res;
+	res.depth	= depth;
+	res.stencil	= stencil;
+	return res;
+}
+
+inline VkDispatchIndirectCommand makeDispatchIndirectCommand (deUint32 x, deUint32 y, deUint32 z)
+{
+	VkDispatchIndirectCommand res;
+	res.x	= x;
+	res.y	= y;
+	res.z	= z;
+	return res;
+}
+
+inline VkDrawIndexedIndirectCommand makeDrawIndexedIndirectCommand (deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance)
+{
+	VkDrawIndexedIndirectCommand res;
+	res.indexCount		= indexCount;
+	res.instanceCount	= instanceCount;
+	res.firstIndex		= firstIndex;
+	res.vertexOffset	= vertexOffset;
+	res.firstInstance	= firstInstance;
+	return res;
+}
+
+inline VkDrawIndirectCommand makeDrawIndirectCommand (deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance)
+{
+	VkDrawIndirectCommand res;
+	res.vertexCount		= vertexCount;
+	res.instanceCount	= instanceCount;
+	res.firstVertex		= firstVertex;
+	res.firstInstance	= firstInstance;
+	return res;
+}
diff --git a/external/vulkancts/framework/vulkan/vkVirtualDeviceInterface.inl b/external/vulkancts/framework/vulkan/vkVirtualDeviceInterface.inl
new file mode 100644
index 0000000..14dceb0
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkVirtualDeviceInterface.inl
@@ -0,0 +1,124 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual void		destroyDevice									(VkDevice device, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual void		getDeviceQueue									(VkDevice device, deUint32 queueFamilyIndex, deUint32 queueIndex, VkQueue* pQueue) const = 0;
+virtual VkResult	queueSubmit										(VkQueue queue, deUint32 submitCount, const VkSubmitInfo* pSubmits, VkFence fence) const = 0;
+virtual VkResult	queueWaitIdle									(VkQueue queue) const = 0;
+virtual VkResult	deviceWaitIdle									(VkDevice device) const = 0;
+virtual VkResult	allocateMemory									(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) const = 0;
+virtual void		freeMemory										(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	mapMemory										(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) const = 0;
+virtual void		unmapMemory										(VkDevice device, VkDeviceMemory memory) const = 0;
+virtual VkResult	flushMappedMemoryRanges							(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const = 0;
+virtual VkResult	invalidateMappedMemoryRanges					(VkDevice device, deUint32 memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges) const = 0;
+virtual void		getDeviceMemoryCommitment						(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) const = 0;
+virtual VkResult	bindBufferMemory								(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const = 0;
+virtual VkResult	bindImageMemory									(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const = 0;
+virtual void		getBufferMemoryRequirements						(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements) const = 0;
+virtual void		getImageMemoryRequirements						(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements) const = 0;
+virtual void		getImageSparseMemoryRequirements				(VkDevice device, VkImage image, deUint32* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) const = 0;
+virtual void		getPhysicalDeviceSparseImageFormatProperties	(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, deUint32* pPropertyCount, VkSparseImageFormatProperties* pProperties) const = 0;
+virtual VkResult	queueBindSparse									(VkQueue queue, deUint32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) const = 0;
+virtual VkResult	createFence										(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence) const = 0;
+virtual void		destroyFence									(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	resetFences										(VkDevice device, deUint32 fenceCount, const VkFence* pFences) const = 0;
+virtual VkResult	getFenceStatus									(VkDevice device, VkFence fence) const = 0;
+virtual VkResult	waitForFences									(VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout) const = 0;
+virtual VkResult	createSemaphore									(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore) const = 0;
+virtual void		destroySemaphore								(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createEvent										(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent) const = 0;
+virtual void		destroyEvent									(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	getEventStatus									(VkDevice device, VkEvent event) const = 0;
+virtual VkResult	setEvent										(VkDevice device, VkEvent event) const = 0;
+virtual VkResult	resetEvent										(VkDevice device, VkEvent event) const = 0;
+virtual VkResult	createQueryPool									(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool) const = 0;
+virtual void		destroyQueryPool								(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	getQueryPoolResults								(VkDevice device, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, deUintptr dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) const = 0;
+virtual VkResult	createBuffer									(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) const = 0;
+virtual void		destroyBuffer									(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createBufferView								(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView) const = 0;
+virtual void		destroyBufferView								(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createImage										(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage) const = 0;
+virtual void		destroyImage									(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual void		getImageSubresourceLayout						(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) const = 0;
+virtual VkResult	createImageView									(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView) const = 0;
+virtual void		destroyImageView								(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createShaderModule								(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule) const = 0;
+virtual void		destroyShaderModule								(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createPipelineCache								(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache) const = 0;
+virtual void		destroyPipelineCache							(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	getPipelineCacheData							(VkDevice device, VkPipelineCache pipelineCache, deUintptr* pDataSize, void* pData) const = 0;
+virtual VkResult	mergePipelineCaches								(VkDevice device, VkPipelineCache dstCache, deUint32 srcCacheCount, const VkPipelineCache* pSrcCaches) const = 0;
+virtual VkResult	createGraphicsPipelines							(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const = 0;
+virtual VkResult	createComputePipelines							(VkDevice device, VkPipelineCache pipelineCache, deUint32 createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) const = 0;
+virtual void		destroyPipeline									(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createPipelineLayout							(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout) const = 0;
+virtual void		destroyPipelineLayout							(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createSampler									(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) const = 0;
+virtual void		destroySampler									(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createDescriptorSetLayout						(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout) const = 0;
+virtual void		destroyDescriptorSetLayout						(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createDescriptorPool							(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) const = 0;
+virtual void		destroyDescriptorPool							(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	resetDescriptorPool								(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const = 0;
+virtual VkResult	allocateDescriptorSets							(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) const = 0;
+virtual VkResult	freeDescriptorSets								(VkDevice device, VkDescriptorPool descriptorPool, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets) const = 0;
+virtual void		updateDescriptorSets							(VkDevice device, deUint32 descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, deUint32 descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies) const = 0;
+virtual VkResult	createFramebuffer								(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer) const = 0;
+virtual void		destroyFramebuffer								(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	createRenderPass								(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass) const = 0;
+virtual void		destroyRenderPass								(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual void		getRenderAreaGranularity						(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) const = 0;
+virtual VkResult	createCommandPool								(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool) const = 0;
+virtual void		destroyCommandPool								(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult	resetCommandPool								(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const = 0;
+virtual VkResult	allocateCommandBuffers							(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) const = 0;
+virtual void		freeCommandBuffers								(VkDevice device, VkCommandPool commandPool, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const = 0;
+virtual VkResult	beginCommandBuffer								(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo) const = 0;
+virtual VkResult	endCommandBuffer								(VkCommandBuffer commandBuffer) const = 0;
+virtual VkResult	resetCommandBuffer								(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const = 0;
+virtual void		cmdBindPipeline									(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const = 0;
+virtual void		cmdSetViewport									(VkCommandBuffer commandBuffer, deUint32 firstViewport, deUint32 viewportCount, const VkViewport* pViewports) const = 0;
+virtual void		cmdSetScissor									(VkCommandBuffer commandBuffer, deUint32 firstScissor, deUint32 scissorCount, const VkRect2D* pScissors) const = 0;
+virtual void		cmdSetLineWidth									(VkCommandBuffer commandBuffer, float lineWidth) const = 0;
+virtual void		cmdSetDepthBias									(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor) const = 0;
+virtual void		cmdSetBlendConstants							(VkCommandBuffer commandBuffer, const float blendConstants[4]) const = 0;
+virtual void		cmdSetDepthBounds								(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const = 0;
+virtual void		cmdSetStencilCompareMask						(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 compareMask) const = 0;
+virtual void		cmdSetStencilWriteMask							(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 writeMask) const = 0;
+virtual void		cmdSetStencilReference							(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, deUint32 reference) const = 0;
+virtual void		cmdBindDescriptorSets							(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, deUint32 firstSet, deUint32 descriptorSetCount, const VkDescriptorSet* pDescriptorSets, deUint32 dynamicOffsetCount, const deUint32* pDynamicOffsets) const = 0;
+virtual void		cmdBindIndexBuffer								(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const = 0;
+virtual void		cmdBindVertexBuffers							(VkCommandBuffer commandBuffer, deUint32 firstBinding, deUint32 bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) const = 0;
+virtual void		cmdDraw											(VkCommandBuffer commandBuffer, deUint32 vertexCount, deUint32 instanceCount, deUint32 firstVertex, deUint32 firstInstance) const = 0;
+virtual void		cmdDrawIndexed									(VkCommandBuffer commandBuffer, deUint32 indexCount, deUint32 instanceCount, deUint32 firstIndex, deInt32 vertexOffset, deUint32 firstInstance) const = 0;
+virtual void		cmdDrawIndirect									(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const = 0;
+virtual void		cmdDrawIndexedIndirect							(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, deUint32 drawCount, deUint32 stride) const = 0;
+virtual void		cmdDispatch										(VkCommandBuffer commandBuffer, deUint32 x, deUint32 y, deUint32 z) const = 0;
+virtual void		cmdDispatchIndirect								(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const = 0;
+virtual void		cmdCopyBuffer									(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferCopy* pRegions) const = 0;
+virtual void		cmdCopyImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageCopy* pRegions) const = 0;
+virtual void		cmdBlitImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageBlit* pRegions, VkFilter filter) const = 0;
+virtual void		cmdCopyBufferToImage							(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkBufferImageCopy* pRegions) const = 0;
+virtual void		cmdCopyImageToBuffer							(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, deUint32 regionCount, const VkBufferImageCopy* pRegions) const = 0;
+virtual void		cmdUpdateBuffer									(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const deUint32* pData) const = 0;
+virtual void		cmdFillBuffer									(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, deUint32 data) const = 0;
+virtual void		cmdClearColorImage								(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const = 0;
+virtual void		cmdClearDepthStencilImage						(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, deUint32 rangeCount, const VkImageSubresourceRange* pRanges) const = 0;
+virtual void		cmdClearAttachments								(VkCommandBuffer commandBuffer, deUint32 attachmentCount, const VkClearAttachment* pAttachments, deUint32 rectCount, const VkClearRect* pRects) const = 0;
+virtual void		cmdResolveImage									(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, deUint32 regionCount, const VkImageResolve* pRegions) const = 0;
+virtual void		cmdSetEvent										(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const = 0;
+virtual void		cmdResetEvent									(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const = 0;
+virtual void		cmdWaitEvents									(VkCommandBuffer commandBuffer, deUint32 eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const = 0;
+virtual void		cmdPipelineBarrier								(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, deUint32 memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, deUint32 bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, deUint32 imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) const = 0;
+virtual void		cmdBeginQuery									(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query, VkQueryControlFlags flags) const = 0;
+virtual void		cmdEndQuery										(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 query) const = 0;
+virtual void		cmdResetQueryPool								(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount) const = 0;
+virtual void		cmdWriteTimestamp								(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, deUint32 query) const = 0;
+virtual void		cmdCopyQueryPoolResults							(VkCommandBuffer commandBuffer, VkQueryPool queryPool, deUint32 firstQuery, deUint32 queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags) const = 0;
+virtual void		cmdPushConstants								(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, deUint32 offset, deUint32 size, const void* pValues) const = 0;
+virtual void		cmdBeginRenderPass								(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) const = 0;
+virtual void		cmdNextSubpass									(VkCommandBuffer commandBuffer, VkSubpassContents contents) const = 0;
+virtual void		cmdEndRenderPass								(VkCommandBuffer commandBuffer) const = 0;
+virtual void		cmdExecuteCommands								(VkCommandBuffer commandBuffer, deUint32 commandBufferCount, const VkCommandBuffer* pCommandBuffers) const = 0;
diff --git a/external/vulkancts/framework/vulkan/vkVirtualInstanceInterface.inl b/external/vulkancts/framework/vulkan/vkVirtualInstanceInterface.inl
new file mode 100644
index 0000000..9c17a06
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkVirtualInstanceInterface.inl
@@ -0,0 +1,15 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual void				destroyInstance							(VkInstance instance, const VkAllocationCallbacks* pAllocator) const = 0;
+virtual VkResult			enumeratePhysicalDevices				(VkInstance instance, deUint32* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices) const = 0;
+virtual void				getPhysicalDeviceFeatures				(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures) const = 0;
+virtual void				getPhysicalDeviceFormatProperties		(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) const = 0;
+virtual VkResult			getPhysicalDeviceImageFormatProperties	(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) const = 0;
+virtual void				getPhysicalDeviceProperties				(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties) const = 0;
+virtual void				getPhysicalDeviceQueueFamilyProperties	(VkPhysicalDevice physicalDevice, deUint32* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties) const = 0;
+virtual void				getPhysicalDeviceMemoryProperties		(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties) const = 0;
+virtual PFN_vkVoidFunction	getDeviceProcAddr						(VkDevice device, const char* pName) const = 0;
+virtual VkResult			createDevice							(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) const = 0;
+virtual VkResult			enumerateDeviceExtensionProperties		(VkPhysicalDevice physicalDevice, const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const = 0;
+virtual VkResult			enumerateDeviceLayerProperties			(VkPhysicalDevice physicalDevice, deUint32* pPropertyCount, VkLayerProperties* pProperties) const = 0;
diff --git a/external/vulkancts/framework/vulkan/vkVirtualPlatformInterface.inl b/external/vulkancts/framework/vulkan/vkVirtualPlatformInterface.inl
new file mode 100644
index 0000000..dff00e3
--- /dev/null
+++ b/external/vulkancts/framework/vulkan/vkVirtualPlatformInterface.inl
@@ -0,0 +1,7 @@
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */
+virtual VkResult			createInstance							(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) const = 0;
+virtual PFN_vkVoidFunction	getInstanceProcAddr						(VkInstance instance, const char* pName) const = 0;
+virtual VkResult			enumerateInstanceExtensionProperties	(const char* pLayerName, deUint32* pPropertyCount, VkExtensionProperties* pProperties) const = 0;
+virtual VkResult			enumerateInstanceLayerProperties		(deUint32* pPropertyCount, VkLayerProperties* pProperties) const = 0;
diff --git a/external/vulkancts/gen_framework.py b/external/vulkancts/gen_framework.py
new file mode 100644
index 0000000..d081772
--- /dev/null
+++ b/external/vulkancts/gen_framework.py
@@ -0,0 +1,823 @@
+# -*- coding: utf-8 -*-
+
+#-------------------------------------------------------------------------
+# Vulkan CTS
+# ----------
+#
+# Copyright (c) 2015 Google Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and/or associated documentation files (the
+# "Materials"), to deal in the Materials without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Materials, and to
+# permit persons to whom the Materials are furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice(s) and this permission notice shall be
+# included in all copies or substantial portions of the Materials.
+#
+# The Materials are Confidential Information as defined by the
+# Khronos Membership Agreement until designated non-confidential by
+# Khronos, at which point this condition clause shall be removed.
+#
+# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+#
+#-------------------------------------------------------------------------
+
+import os
+import re
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts"))
+
+from build.common import DEQP_DIR
+from khr_util.format import indentLines, writeInlFile
+
+VULKAN_DIR = os.path.join(os.path.dirname(__file__), "framework", "vulkan")
+
+INL_HEADER = """\
+/* WARNING: This is auto-generated file. Do not modify, since changes will
+ * be lost! Modify the generating script instead.
+ */\
+"""
+
+PLATFORM_FUNCTIONS	= [
+	"vkCreateInstance",
+	"vkGetInstanceProcAddr",
+	"vkEnumerateInstanceExtensionProperties",
+	"vkEnumerateInstanceLayerProperties",
+]
+INSTANCE_FUNCTIONS	= [
+	"vkDestroyInstance",
+	"vkEnumeratePhysicalDevices",
+	"vkGetPhysicalDeviceFeatures",
+	"vkGetPhysicalDeviceFormatProperties",
+	"vkGetPhysicalDeviceImageFormatProperties",
+	"vkGetPhysicalDeviceLimits",
+	"vkGetPhysicalDeviceProperties",
+	"vkGetPhysicalDeviceQueueFamilyProperties",
+	"vkGetPhysicalDeviceMemoryProperties",
+	"vkEnumerateDeviceExtensionProperties",
+	"vkEnumerateDeviceLayerProperties",
+	"vkCreateDevice",
+	"vkGetDeviceProcAddr",
+]
+
+DEFINITIONS			= [
+	"VK_API_VERSION",
+	"VK_MAX_PHYSICAL_DEVICE_NAME_SIZE",
+	"VK_MAX_EXTENSION_NAME_SIZE",
+	"VK_UUID_SIZE",
+	"VK_MAX_MEMORY_TYPES",
+	"VK_MAX_MEMORY_HEAPS",
+	"VK_MAX_DESCRIPTION_SIZE",
+	"VK_ATTACHMENT_UNUSED",
+]
+
+class Handle:
+	TYPE_DISP		= 0
+	TYPE_NONDISP	= 1
+
+	def __init__ (self, type, name):
+		self.type	= type
+		self.name	= name
+
+	def getHandleType (self):
+		name = re.sub(r'([a-z])([A-Z])', r'\1_\2', self.name)
+		return "HANDLE_TYPE_" + name[3:].upper()
+
+class Enum:
+	def __init__ (self, name, values):
+		self.name	= name
+		self.values	= values
+
+class Bitfield:
+	def __init__ (self, name, values):
+		self.name	= name
+		self.values	= values
+
+class Variable:
+	def __init__ (self, type, name, arraySize = None):
+		self.type		= type
+		self.name		= name
+		self.arraySize	= arraySize
+
+class CompositeType:
+	CLASS_STRUCT	= 0
+	CLASS_UNION		= 1
+
+	def __init__ (self, typeClass, name, members):
+		self.typeClass	= typeClass
+		self.name		= name
+		self.members	= members
+
+	def getClassName (self):
+		names = {CompositeType.CLASS_STRUCT: 'struct', CompositeType.CLASS_UNION: 'union'}
+		return names[self.typeClass]
+
+class Function:
+	TYPE_PLATFORM		= 0 # Not bound to anything
+	TYPE_INSTANCE		= 1 # Bound to VkInstance
+	TYPE_DEVICE			= 2 # Bound to VkDevice
+
+	def __init__ (self, name, returnType, arguments):
+		self.name		= name
+		self.returnType	= returnType
+		self.arguments	= arguments
+
+	def getType (self):
+		if self.name in PLATFORM_FUNCTIONS:
+			return Function.TYPE_PLATFORM
+		elif self.name in INSTANCE_FUNCTIONS:
+			return Function.TYPE_INSTANCE
+		else:
+			return Function.TYPE_DEVICE
+
+class API:
+	def __init__ (self, definitions, handles, enums, bitfields, compositeTypes, functions):
+		self.definitions	= definitions
+		self.handles		= handles
+		self.enums			= enums
+		self.bitfields		= bitfields
+		self.compositeTypes	= compositeTypes
+		self.functions		= functions
+
+def readFile (filename):
+	with open(filename, 'rb') as f:
+		return f.read()
+
+IDENT_PTRN	= r'[a-zA-Z_][a-zA-Z0-9_]*'
+TYPE_PTRN	= r'[a-zA-Z_][a-zA-Z0-9_ \t*]*'
+
+def endswith (s, postfix):
+	return len(s) >= len(postfix) and s[len(s)-len(postfix):] == postfix
+
+def fixupEnumValues (values):
+	fixed = []
+	for name, value in values:
+		if endswith(name, "_BEGIN_RANGE") or endswith(name, "_END_RANGE"):
+			continue
+		fixed.append((name, value))
+	return fixed
+
+def fixupType (type):
+	replacements = [
+			("uint8_t",		"deUint8"),
+			("uint16_t",	"deUint16"),
+			("uint32_t",	"deUint32"),
+			("uint64_t",	"deUint64"),
+			("int8_t",		"deInt8"),
+			("int16_t",		"deInt16"),
+			("int32_t",		"deInt32"),
+			("int64_t",		"deInt64"),
+			("bool32_t",	"deUint32"),
+			("size_t",		"deUintptr"),
+		]
+
+	for src, dst in replacements:
+		type = type.replace(src, dst)
+
+	return type
+
+def fixupFunction (function):
+	fixedArgs		= [Variable(fixupType(a.type), a.name, a.arraySize) for a in function.arguments]
+	fixedReturnType	= fixupType(function.returnType)
+
+	return Function(function.name, fixedReturnType, fixedArgs)
+
+def getInterfaceName (function):
+	assert function.name[:2] == "vk"
+	return function.name[2].lower() + function.name[3:]
+
+def getFunctionTypeName (function):
+	assert function.name[:2] == "vk"
+	return function.name[2:] + "Func"
+
+def getBitEnumNameForBitfield (bitfieldName):
+	if bitfieldName[-3:] == "KHR":
+		postfix = "KHR"
+		bitfieldName = bitfieldName[:-3]
+	else:
+		postfix = ""
+
+	assert bitfieldName[-1] == "s"
+	return bitfieldName[:-1] + "Bits" + postfix
+
+def getBitfieldNameForBitEnum (bitEnumName):
+	if bitEnumName[-3:] == "KHR":
+		postfix = "KHR"
+		bitEnumName = bitEnumName[:-3]
+	else:
+		postfix = ""
+
+	assert bitEnumName[-4:] == "Bits"
+	return bitEnumName[:-4] + "s" + postfix
+
+def parsePreprocDefinedValue (src, name):
+	definition = re.search(r'#\s*define\s+' + name + r'\s+([^\n]+)\n', src)
+	if definition is None:
+		raise Exception("No such definition: %s" % name)
+	value = definition.group(1).strip()
+
+	if value == "UINT32_MAX":
+		value = "(~0u)"
+
+	return value
+
+def parseEnum (name, src):
+	keyValuePtrn	= '(' + IDENT_PTRN + r')\s*=\s*([^\s,}]+)\s*[,}]'
+	matches			= re.findall(keyValuePtrn, src)
+
+	return Enum(name, fixupEnumValues(matches))
+
+# \note Parses raw enums, some are mapped to bitfields later
+def parseEnums (src):
+	matches	= re.findall(r'typedef enum(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
+	enums	= []
+
+	for enumname, contents, typename in matches:
+		enums.append(parseEnum(typename, contents))
+
+	return enums
+
+def parseCompositeType (type, name, src):
+	# \todo [pyry] Array support is currently a hack (size coupled with name)
+	typeNamePtrn	= r'(' + TYPE_PTRN + ')(\s' + IDENT_PTRN + r'(\[[^\]]+\])*)\s*;'
+	matches			= re.findall(typeNamePtrn, src)
+	members			= [Variable(fixupType(t.strip()), n.strip()) for t, n, a in matches]
+
+	return CompositeType(type, name, members)
+
+def parseCompositeTypes (src):
+	typeMap	= { 'struct': CompositeType.CLASS_STRUCT, 'union': CompositeType.CLASS_UNION }
+	matches	= re.findall(r'typedef (struct|union)(\s*' + IDENT_PTRN + r')?\s*{([^}]*)}\s*(' + IDENT_PTRN + r')\s*;', src)
+	types	= []
+
+	for type, structname, contents, typename in matches:
+		if typename[-3:] == "KHR":
+			continue # \todo [2016-01-05 pyry] Figure out how to handle platform-specific types
+
+		types.append(parseCompositeType(typeMap[type], typename, contents))
+
+	return types
+
+def parseHandles (src):
+	matches	= re.findall(r'VK_DEFINE(_NON_DISPATCHABLE|)_HANDLE\((' + IDENT_PTRN + r')\)[ \t]*[\n\r]', src)
+	handles	= []
+	typeMap	= {'': Handle.TYPE_DISP, '_NON_DISPATCHABLE': Handle.TYPE_NONDISP}
+
+	for type, name in matches:
+		handle = Handle(typeMap[type], name)
+		handles.append(handle)
+
+	return handles
+
+def parseArgList (src):
+	typeNamePtrn	= r'(' + TYPE_PTRN + ')(\s' + IDENT_PTRN + r')(\[[^\]]+\])?'
+	args			= []
+
+	for rawArg in src.split(','):
+		m = re.search(typeNamePtrn, rawArg)
+		args.append(Variable(m.group(1).strip(), m.group(2).strip(), m.group(3)))
+
+	return args
+
+def parseFunctions (src):
+	ptrn		= r'VKAPI_ATTR\s+(' + TYPE_PTRN + ')VKAPI_CALL\s+(' + IDENT_PTRN + r')\s*\(([^)]*)\)\s*;'
+	matches		= re.findall(ptrn, src)
+	functions	= []
+
+	for returnType, name, argList in matches:
+		if name[-3:] == "KHR":
+			continue # \todo [2015-11-16 pyry] Figure out how to handle platform-specific extension functions
+
+		functions.append(Function(name.strip(), returnType.strip(), parseArgList(argList)))
+
+	return [fixupFunction(f) for f in functions]
+
+def parseBitfieldNames (src):
+	ptrn		= r'typedef\s+VkFlags\s(' + IDENT_PTRN + r')\s*;'
+	matches		= re.findall(ptrn, src)
+
+	return matches
+
+def parseAPI (src):
+	definitions		= [(name, parsePreprocDefinedValue(src, name)) for name in DEFINITIONS]
+	rawEnums		= parseEnums(src)
+	bitfieldNames	= parseBitfieldNames(src)
+	enums			= []
+	bitfields		= []
+	bitfieldEnums	= set([getBitEnumNameForBitfield(n) for n in bitfieldNames])
+
+	for enum in rawEnums:
+		if enum.name in bitfieldEnums:
+			bitfields.append(Bitfield(getBitfieldNameForBitEnum(enum.name), enum.values))
+		else:
+			enums.append(enum)
+
+	for bitfieldName in bitfieldNames:
+		if not bitfieldName in [bitfield.name for bitfield in bitfields]:
+			# Add empty bitfield
+			bitfields.append(Bitfield(bitfieldName, []))
+
+	return API(
+		definitions		= definitions,
+		handles			= parseHandles(src),
+		enums			= enums,
+		bitfields		= bitfields,
+		compositeTypes	= parseCompositeTypes(src),
+		functions		= parseFunctions(src))
+
+def writeHandleType (api, filename):
+	def gen ():
+		yield "enum HandleType"
+		yield "{"
+		yield "\t%s = 0," % api.handles[0].getHandleType()
+		for handle in api.handles[1:]:
+			yield "\t%s," % handle.getHandleType()
+		yield "\tHANDLE_TYPE_LAST"
+		yield "};"
+		yield ""
+
+	writeInlFile(filename, INL_HEADER, gen())
+
+def getEnumValuePrefix (enum):
+	prefix = enum.name[0]
+	for i in range(1, len(enum.name)):
+		if enum.name[i].isupper():
+			prefix += "_"
+		prefix += enum.name[i].upper()
+	return prefix
+
+def parseInt (value):
+	if value[:2] == "0x":
+		return int(value, 16)
+	else:
+		return int(value, 10)
+
+def areEnumValuesLinear (enum):
+	curIndex = 0
+	for name, value in enum.values:
+		if parseInt(value) != curIndex:
+			return False
+		curIndex += 1
+	return True
+
+def genEnumSrc (enum):
+	yield "enum %s" % enum.name
+	yield "{"
+
+	for line in indentLines(["\t%s\t= %s," % v for v in enum.values]):
+		yield line
+
+	if areEnumValuesLinear(enum):
+		yield ""
+		yield "\t%s_LAST" % getEnumValuePrefix(enum)
+
+	yield "};"
+
+def genBitfieldSrc (bitfield):
+	if len(bitfield.values) > 0:
+		yield "enum %s" % getBitEnumNameForBitfield(bitfield.name)
+		yield "{"
+		for line in indentLines(["\t%s\t= %s," % v for v in bitfield.values]):
+			yield line
+		yield "};"
+
+	yield "typedef deUint32 %s;" % bitfield.name
+
+def genCompositeTypeSrc (type):
+	yield "%s %s" % (type.getClassName(), type.name)
+	yield "{"
+	for line in indentLines(["\t%s\t%s;" % (m.type, m.name) for m in type.members]):
+		yield line
+	yield "};"
+
+def genHandlesSrc (handles):
+	def genLines (handles):
+		for handle in handles:
+			if handle.type == Handle.TYPE_DISP:
+				yield "VK_DEFINE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
+			elif handle.type == Handle.TYPE_NONDISP:
+				yield "VK_DEFINE_NON_DISPATCHABLE_HANDLE\t(%s,\t%s);" % (handle.name, handle.getHandleType())
+
+	for line in indentLines(genLines(handles)):
+		yield line
+
+def writeBasicTypes (api, filename):
+	def gen ():
+		for line in indentLines(["enum { %s\t= %s\t};" % define for define in api.definitions]):
+			yield line
+		yield ""
+		for line in genHandlesSrc(api.handles):
+			yield line
+		yield ""
+		for enum in api.enums:
+			for line in genEnumSrc(enum):
+				yield line
+			yield ""
+		for bitfield in api.bitfields:
+			for line in genBitfieldSrc(bitfield):
+				yield line
+			yield ""
+
+	writeInlFile(filename, INL_HEADER, gen())
+
+def writeCompositeTypes (api, filename):
+	def gen ():
+		for type in api.compositeTypes:
+			for line in genCompositeTypeSrc(type):
+				yield line
+			yield ""
+
+	writeInlFile(filename, INL_HEADER, gen())
+
+def argListToStr (args):
+	return ", ".join("%s %s%s" % (v.type, v.name, v.arraySize if v.arraySize != None else "") for v in args)
+
+def writeInterfaceDecl (api, filename, functionTypes, concrete):
+	def genProtos ():
+		postfix = "" if concrete else " = 0"
+		for function in api.functions:
+			if function.getType() in functionTypes:
+				yield "virtual %s\t%s\t(%s) const%s;" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments), postfix)
+
+	writeInlFile(filename, INL_HEADER, indentLines(genProtos()))
+
+def writeFunctionPtrTypes (api, filename):
+	def genTypes ():
+		for function in api.functions:
+			yield "typedef VKAPI_ATTR %s\t(VKAPI_CALL* %s)\t(%s);" % (function.returnType, getFunctionTypeName(function), argListToStr(function.arguments))
+
+	writeInlFile(filename, INL_HEADER, indentLines(genTypes()))
+
+def writeFunctionPointers (api, filename, functionTypes):
+	writeInlFile(filename, INL_HEADER, indentLines(["%s\t%s;" % (getFunctionTypeName(function), getInterfaceName(function)) for function in api.functions if function.getType() in functionTypes]))
+
+def writeInitFunctionPointers (api, filename, functionTypes):
+	def makeInitFunctionPointers ():
+		for function in api.functions:
+			if function.getType() in functionTypes:
+				yield "m_vk.%s\t= (%s)\tGET_PROC_ADDR(\"%s\");" % (getInterfaceName(function), getFunctionTypeName(function), function.name)
+
+	writeInlFile(filename, INL_HEADER, indentLines(makeInitFunctionPointers()))
+
+def writeFuncPtrInterfaceImpl (api, filename, functionTypes, className):
+	def makeFuncPtrInterfaceImpl ():
+		for function in api.functions:
+			if function.getType() in functionTypes:
+				yield ""
+				yield "%s %s::%s (%s) const" % (function.returnType, className, getInterfaceName(function), argListToStr(function.arguments))
+				yield "{"
+				yield "	%sm_vk.%s(%s);" % ("return " if function.returnType != "void" else "", getInterfaceName(function), ", ".join(a.name for a in function.arguments))
+				yield "}"
+
+	writeInlFile(filename, INL_HEADER, makeFuncPtrInterfaceImpl())
+
+def writeStrUtilProto (api, filename):
+	def makeStrUtilProto ():
+		for line in indentLines(["const char*\tget%sName\t(%s value);" % (enum.name[2:], enum.name) for enum in api.enums]):
+			yield line
+		yield ""
+		for line in indentLines(["inline tcu::Format::Enum<%s>\tget%sStr\t(%s value)\t{ return tcu::Format::Enum<%s>(get%sName, value);\t}" % (e.name, e.name[2:], e.name, e.name, e.name[2:]) for e in api.enums]):
+			yield line
+		yield ""
+		for line in indentLines(["inline std::ostream&\toperator<<\t(std::ostream& s, %s value)\t{ return s << get%sStr(value);\t}" % (e.name, e.name[2:]) for e in api.enums]):
+			yield line
+		yield ""
+		for line in indentLines(["tcu::Format::Bitfield<32>\tget%sStr\t(%s value);" % (bitfield.name[2:], bitfield.name) for bitfield in api.bitfields]):
+			yield line
+		yield ""
+		for line in indentLines(["std::ostream&\toperator<<\t(std::ostream& s, const %s& value);" % (s.name) for s in api.compositeTypes]):
+			yield line
+
+	writeInlFile(filename, INL_HEADER, makeStrUtilProto())
+
+def writeStrUtilImpl (api, filename):
+	def makeStrUtilImpl ():
+		for line in indentLines(["template<> const char*\tgetTypeName<%s>\t(void) { return \"%s\";\t}" % (handle.name, handle.name) for handle in api.handles]):
+			yield line
+
+		for enum in api.enums:
+			yield ""
+			yield "const char* get%sName (%s value)" % (enum.name[2:], enum.name)
+			yield "{"
+			yield "\tswitch (value)"
+			yield "\t{"
+			for line in indentLines(["\t\tcase %s:\treturn \"%s\";" % (n, n) for n, v in enum.values] + ["\t\tdefault:\treturn DE_NULL;"]):
+				yield line
+			yield "\t}"
+			yield "}"
+
+		for bitfield in api.bitfields:
+			yield ""
+			yield "tcu::Format::Bitfield<32> get%sStr (%s value)" % (bitfield.name[2:], bitfield.name)
+			yield "{"
+
+			if len(bitfield.values) > 0:
+				yield "\tstatic const tcu::Format::BitDesc s_desc[] ="
+				yield "\t{"
+				for line in indentLines(["\t\ttcu::Format::BitDesc(%s,\t\"%s\")," % (n, n) for n, v in bitfield.values]):
+					yield line
+				yield "\t};"
+				yield "\treturn tcu::Format::Bitfield<32>(value, DE_ARRAY_BEGIN(s_desc), DE_ARRAY_END(s_desc));"
+			else:
+				yield "\treturn tcu::Format::Bitfield<32>(value, DE_NULL, DE_NULL);"
+
+			yield "}"
+
+		bitfieldTypeNames = set([bitfield.name for bitfield in api.bitfields])
+
+		for type in api.compositeTypes:
+			yield ""
+			yield "std::ostream& operator<< (std::ostream& s, const %s& value)" % type.name
+			yield "{"
+			yield "\ts << \"%s = {\\n\";" % type.name
+			for member in type.members:
+				memberName	= member.name
+				valFmt		= None
+				newLine		= ""
+				if member.type in bitfieldTypeNames:
+					valFmt = "get%sStr(value.%s)" % (member.type[2:], member.name)
+				elif member.type == "const char*" or member.type == "char*":
+					valFmt = "getCharPtrStr(value.%s)" % member.name
+				elif '[' in member.name:
+					baseName = member.name[:member.name.find('[')]
+					if baseName in ["extensionName", "deviceName", "layerName", "description"]:
+						valFmt = "(const char*)value.%s" % baseName
+					elif member.type == 'char' or member.type == 'deUint8':
+						newLine = "'\\n' << "
+						valFmt = "tcu::formatArray(tcu::Format::HexIterator<%s>(DE_ARRAY_BEGIN(value.%s)), tcu::Format::HexIterator<%s>(DE_ARRAY_END(value.%s)))" % (member.type, baseName, member.type, baseName)
+					else:
+						newLine = "'\\n' << "
+						valFmt = "tcu::formatArray(DE_ARRAY_BEGIN(value.%s), DE_ARRAY_END(value.%s))" % (baseName, baseName)
+					memberName = baseName
+				else:
+					valFmt = "value.%s" % member.name
+				yield ("\ts << \"\\t%s = \" << " % memberName) + newLine + valFmt + " << '\\n';"
+			yield "\ts << '}';"
+			yield "\treturn s;"
+			yield "}"
+
+
+	writeInlFile(filename, INL_HEADER, makeStrUtilImpl())
+
+class ConstructorFunction:
+	def __init__ (self, type, name, objectType, iface, arguments):
+		self.type		= type
+		self.name		= name
+		self.objectType	= objectType
+		self.iface		= iface
+		self.arguments	= arguments
+
+def getConstructorFunctions (api):
+	funcs = []
+	for function in api.functions:
+		if (function.name[:8] == "vkCreate" or function.name == "vkAllocateMemory") and not "count" in [a.name for a in function.arguments]:
+			# \todo [pyry] Rather hacky
+			iface = None
+			if function.getType() == Function.TYPE_PLATFORM:
+				iface = Variable("const PlatformInterface&", "vk")
+			elif function.getType() == Function.TYPE_INSTANCE:
+				iface = Variable("const InstanceInterface&", "vk")
+			else:
+				iface = Variable("const DeviceInterface&", "vk")
+
+			assert function.arguments[-2].type == "const VkAllocationCallbacks*"
+
+			objectType	= function.arguments[-1].type.replace("*", "").strip()
+			arguments	= function.arguments[:-1]
+			funcs.append(ConstructorFunction(function.getType(), getInterfaceName(function), objectType, iface, arguments))
+	return funcs
+
+def writeRefUtilProto (api, filename):
+	functions	= getConstructorFunctions(api)
+
+	def makeRefUtilProto ():
+		unindented = []
+		for line in indentLines(["Move<%s>\t%s\t(%s = DE_NULL);" % (function.objectType, function.name, argListToStr([function.iface] + function.arguments)) for function in functions]):
+			yield line
+
+	writeInlFile(filename, INL_HEADER, makeRefUtilProto())
+
+def writeRefUtilImpl (api, filename):
+	functions = getConstructorFunctions(api)
+
+	def makeRefUtilImpl ():
+		yield "namespace refdetails"
+		yield "{"
+		yield ""
+
+		for function in api.functions:
+			if function.getType() == Function.TYPE_DEVICE \
+			   and (function.name[:9] == "vkDestroy" or function.name == "vkFreeMemory") \
+			   and not function.name == "vkDestroyDevice":
+				objectType = function.arguments[-2].type
+				yield "template<>"
+				yield "void Deleter<%s>::operator() (%s obj) const" % (objectType, objectType)
+				yield "{"
+				yield "\tm_deviceIface->%s(m_device, obj, m_allocator);" % (getInterfaceName(function))
+				yield "}"
+				yield ""
+
+		yield "} // refdetails"
+		yield ""
+
+		for function in functions:
+			dtorObj = "device" if function.type == Function.TYPE_DEVICE else "object"
+
+			yield "Move<%s> %s (%s)" % (function.objectType, function.name, argListToStr([function.iface] + function.arguments))
+			yield "{"
+			yield "\t%s object = 0;" % function.objectType
+			yield "\tVK_CHECK(vk.%s(%s));" % (function.name, ", ".join([a.name for a in function.arguments] + ["&object"]))
+			yield "\treturn Move<%s>(check<%s>(object), Deleter<%s>(%s));" % (function.objectType, function.objectType, function.objectType, ", ".join(["vk", dtorObj, function.arguments[-1].name]))
+			yield "}"
+			yield ""
+
+	writeInlFile(filename, INL_HEADER, makeRefUtilImpl())
+
+def writeNullDriverImpl (api, filename):
+	def genNullDriverImpl ():
+		specialFuncNames	= [
+				"vkCreateGraphicsPipelines",
+				"vkCreateComputePipelines",
+				"vkGetInstanceProcAddr",
+				"vkGetDeviceProcAddr",
+				"vkEnumeratePhysicalDevices",
+				"vkGetPhysicalDeviceProperties",
+				"vkGetPhysicalDeviceQueueFamilyProperties",
+				"vkGetPhysicalDeviceMemoryProperties",
+				"vkGetPhysicalDeviceFormatProperties",
+				"vkGetBufferMemoryRequirements",
+				"vkGetImageMemoryRequirements",
+				"vkMapMemory",
+				"vkAllocateDescriptorSets",
+				"vkFreeDescriptorSets",
+				"vkResetDescriptorPool",
+				"vkAllocateCommandBuffers",
+				"vkFreeCommandBuffers"
+			]
+		specialFuncs		= [f for f in api.functions if f.name in specialFuncNames]
+		createFuncs			= [f for f in api.functions if (f.name[:8] == "vkCreate" or f.name == "vkAllocateMemory") and not f in specialFuncs]
+		destroyFuncs		= [f for f in api.functions if (f.name[:9] == "vkDestroy" or f.name == "vkFreeMemory") and not f in specialFuncs]
+		dummyFuncs			= [f for f in api.functions if f not in specialFuncs + createFuncs + destroyFuncs]
+
+		def getHandle (name):
+			for handle in api.handles:
+				if handle.name == name:
+					return handle
+			raise Exception("No such handle: %s" % name)
+
+		for function in createFuncs:
+			objectType	= function.arguments[-1].type.replace("*", "").strip()
+			argsStr		= ", ".join([a.name for a in function.arguments[:-1]])
+
+			yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
+			yield "{"
+			yield "\tDE_UNREF(%s);" % function.arguments[-2].name
+
+			if getHandle(objectType).type == Handle.TYPE_NONDISP:
+				yield "\tVK_NULL_RETURN((*%s = allocateNonDispHandle<%s, %s>(%s)));" % (function.arguments[-1].name, objectType[2:], objectType, argsStr)
+			else:
+				yield "\tVK_NULL_RETURN((*%s = allocateHandle<%s, %s>(%s)));" % (function.arguments[-1].name, objectType[2:], objectType, argsStr)
+
+			yield "}"
+			yield ""
+
+		for function in destroyFuncs:
+			objectArg	= function.arguments[-2]
+
+			yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
+			yield "{"
+			for arg in function.arguments[:-2]:
+				yield "\tDE_UNREF(%s);" % arg.name
+
+			if getHandle(objectArg.type).type == Handle.TYPE_NONDISP:
+				yield "\tfreeNonDispHandle<%s, %s>(%s, %s);" % (objectArg.type[2:], objectArg.type, objectArg.name, function.arguments[-1].name)
+			else:
+				yield "\tfreeHandle<%s, %s>(%s, %s);" % (objectArg.type[2:], objectArg.type, objectArg.name, function.arguments[-1].name)
+
+			yield "}"
+			yield ""
+
+		for function in dummyFuncs:
+			yield "VKAPI_ATTR %s VKAPI_CALL %s (%s)" % (function.returnType, getInterfaceName(function), argListToStr(function.arguments))
+			yield "{"
+			for arg in function.arguments:
+				yield "\tDE_UNREF(%s);" % arg.name
+			if function.returnType != "void":
+				yield "\treturn VK_SUCCESS;"
+			yield "}"
+			yield ""
+
+		def genFuncEntryTable (type, name):
+			funcs = [f for f in api.functions if f.getType() == type]
+
+			yield "static const tcu::StaticFunctionLibrary::Entry %s[] =" % name
+			yield "{"
+			for line in indentLines(["\tVK_NULL_FUNC_ENTRY(%s,\t%s)," % (function.name, getInterfaceName(function)) for function in funcs]):
+				yield line
+			yield "};"
+			yield ""
+
+		# Func tables
+		for line in genFuncEntryTable(Function.TYPE_PLATFORM, "s_platformFunctions"):
+			yield line
+
+		for line in genFuncEntryTable(Function.TYPE_INSTANCE, "s_instanceFunctions"):
+			yield line
+
+		for line in genFuncEntryTable(Function.TYPE_DEVICE, "s_deviceFunctions"):
+			yield line
+
+
+	writeInlFile(filename, INL_HEADER, genNullDriverImpl())
+
+def writeTypeUtil (api, filename):
+	# Structs filled by API queries are not often used in test code
+	QUERY_RESULT_TYPES = set([
+			"VkPhysicalDeviceFeatures",
+			"VkPhysicalDeviceLimits",
+			"VkFormatProperties",
+			"VkImageFormatProperties",
+			"VkPhysicalDeviceSparseProperties",
+			"VkQueueFamilyProperties",
+			"VkMemoryType",
+			"VkMemoryHeap",
+		])
+	COMPOSITE_TYPES = set([t.name for t in api.compositeTypes])
+
+	def isSimpleStruct (type):
+		def hasArrayMember (type):
+			for member in type.members:
+				if "[" in member.name:
+					return True
+			return False
+
+		def hasCompositeMember (type):
+			for member in type.members:
+				if member.type in COMPOSITE_TYPES:
+					return True
+			return False
+
+		return type.typeClass == CompositeType.CLASS_STRUCT and \
+			   type.members[0].type != "VkStructureType" and \
+			   not type.name in QUERY_RESULT_TYPES and \
+			   not hasArrayMember(type) and \
+			   not hasCompositeMember(type)
+
+	def gen ():
+		for type in api.compositeTypes:
+			if not isSimpleStruct(type):
+				continue
+
+			yield ""
+			yield "inline %s make%s (%s)" % (type.name, type.name[2:], argListToStr(type.members))
+			yield "{"
+			yield "\t%s res;" % type.name
+			for line in indentLines(["\tres.%s\t= %s;" % (m.name, m.name) for m in type.members]):
+				yield line
+			yield "\treturn res;"
+			yield "}"
+
+	writeInlFile(filename, INL_HEADER, gen())
+
+if __name__ == "__main__":
+	src				= readFile(sys.argv[1])
+	api				= parseAPI(src)
+	platformFuncs	= set([Function.TYPE_PLATFORM])
+	instanceFuncs	= set([Function.TYPE_INSTANCE])
+	deviceFuncs		= set([Function.TYPE_DEVICE])
+
+	writeHandleType				(api, os.path.join(VULKAN_DIR, "vkHandleType.inl"))
+	writeBasicTypes				(api, os.path.join(VULKAN_DIR, "vkBasicTypes.inl"))
+	writeCompositeTypes			(api, os.path.join(VULKAN_DIR, "vkStructTypes.inl"))
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkVirtualPlatformInterface.inl"),		functionTypes = platformFuncs,	concrete = False)
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkVirtualInstanceInterface.inl"),		functionTypes = instanceFuncs,	concrete = False)
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkVirtualDeviceInterface.inl"),			functionTypes = deviceFuncs,	concrete = False)
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkConcretePlatformInterface.inl"),		functionTypes = platformFuncs,	concrete = True)
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkConcreteInstanceInterface.inl"),		functionTypes = instanceFuncs,	concrete = True)
+	writeInterfaceDecl			(api, os.path.join(VULKAN_DIR, "vkConcreteDeviceInterface.inl"),		functionTypes = deviceFuncs,	concrete = True)
+	writeFunctionPtrTypes		(api, os.path.join(VULKAN_DIR, "vkFunctionPointerTypes.inl"))
+	writeFunctionPointers		(api, os.path.join(VULKAN_DIR, "vkPlatformFunctionPointers.inl"),		functionTypes = platformFuncs)
+	writeFunctionPointers		(api, os.path.join(VULKAN_DIR, "vkInstanceFunctionPointers.inl"),		functionTypes = instanceFuncs)
+	writeFunctionPointers		(api, os.path.join(VULKAN_DIR, "vkDeviceFunctionPointers.inl"),			functionTypes = deviceFuncs)
+	writeInitFunctionPointers	(api, os.path.join(VULKAN_DIR, "vkInitPlatformFunctionPointers.inl"),	functionTypes = platformFuncs)
+	writeInitFunctionPointers	(api, os.path.join(VULKAN_DIR, "vkInitInstanceFunctionPointers.inl"),	functionTypes = instanceFuncs)
+	writeInitFunctionPointers	(api, os.path.join(VULKAN_DIR, "vkInitDeviceFunctionPointers.inl"),		functionTypes = deviceFuncs)
+	writeFuncPtrInterfaceImpl	(api, os.path.join(VULKAN_DIR, "vkPlatformDriverImpl.inl"),				functionTypes = platformFuncs,	className = "PlatformDriver")
+	writeFuncPtrInterfaceImpl	(api, os.path.join(VULKAN_DIR, "vkInstanceDriverImpl.inl"),				functionTypes = instanceFuncs,	className = "InstanceDriver")
+	writeFuncPtrInterfaceImpl	(api, os.path.join(VULKAN_DIR, "vkDeviceDriverImpl.inl"),				functionTypes = deviceFuncs,	className = "DeviceDriver")
+	writeStrUtilProto			(api, os.path.join(VULKAN_DIR, "vkStrUtil.inl"))
+	writeStrUtilImpl			(api, os.path.join(VULKAN_DIR, "vkStrUtilImpl.inl"))
+	writeRefUtilProto			(api, os.path.join(VULKAN_DIR, "vkRefUtil.inl"))
+	writeRefUtilImpl			(api, os.path.join(VULKAN_DIR, "vkRefUtilImpl.inl"))
+	writeNullDriverImpl			(api, os.path.join(VULKAN_DIR, "vkNullDriverImpl.inl"))
+	writeTypeUtil				(api, os.path.join(VULKAN_DIR, "vkTypeUtil.inl"))
diff --git a/external/vulkancts/modules/vulkan/CMakeLists.txt b/external/vulkancts/modules/vulkan/CMakeLists.txt
new file mode 100644
index 0000000..d7edecc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/CMakeLists.txt
@@ -0,0 +1,86 @@
+# dEQP-VK
+
+add_subdirectory(api)
+add_subdirectory(pipeline)
+add_subdirectory(binding_model)
+add_subdirectory(spirv_assembly)
+add_subdirectory(shaderrender)
+add_subdirectory(shaderexecutor)
+add_subdirectory(memory)
+add_subdirectory(ubo)
+add_subdirectory(dynamic_state)
+add_subdirectory(ssbo)
+add_subdirectory(query_pool)
+add_subdirectory(draw)
+add_subdirectory(compute)
+add_subdirectory(image)
+
+include_directories(
+	api
+	pipeline
+	binding_model
+	spirv_assembly
+	shaderrender
+	shaderexecutor
+	memory
+	ubo
+	dynamic_state
+	ssbo
+	query_pool
+	draw
+	compute
+	image
+	)
+
+set(DEQP_VK_COMMON_SRCS
+	vktTestCase.cpp
+	vktTestCase.hpp
+	vktTestCaseUtil.cpp
+	vktTestCaseUtil.hpp
+	vktTestPackage.cpp
+	vktTestPackage.hpp
+	vktShaderLibrary.cpp
+	vktShaderLibrary.hpp
+	vktRenderPassTests.cpp
+	vktRenderPassTests.hpp
+	vktTestGroupUtil.cpp
+	vktTestGroupUtil.hpp
+	vktInfoTests.cpp
+	vktInfoTests.hpp
+	)
+
+set(DEQP_VK_COMMON_LIBS
+	tcutil
+	vkutil
+	glutil
+	deqp-vk-api
+	deqp-vk-pipeline
+	deqp-vk-binding-model
+	deqp-vk-spirv-assembly
+	deqp-vk-shaderrender
+	deqp-vk-shaderexecutor
+	deqp-vk-memory
+	deqp-vk-ubo
+	deqp-vk-dynamic-state
+	deqp-vk-ssbo
+	deqp-vk-query-pool
+	deqp-vk-draw
+	deqp-vk-compute
+	deqp-vk-image
+	)
+
+add_library(deqp-vk-common STATIC ${DEQP_VK_COMMON_SRCS})
+target_link_libraries(deqp-vk-common ${DEQP_VK_COMMON_LIBS})
+add_dependencies(deqp-vk-common deqp-vk-data)
+
+set(DEQP_VK_SRCS	)
+set(DEQP_VK_LIBS	deqp-vk-common)
+
+if (DE_OS_IS_WIN32 OR DE_OS_IS_UNIX OR DE_OS_IS_OSX)
+	add_executable(vk-build-programs vktBuildPrograms.cpp)
+	target_link_libraries(vk-build-programs deqp-vk-common)
+endif ()
+
+add_deqp_module(deqp-vk "${DEQP_VK_SRCS}" "${DEQP_VK_LIBS}" vktTestPackageEntry.cpp)
+
+add_data_dir(deqp-vk ../../data/vulkan	vulkan)
diff --git a/external/vulkancts/modules/vulkan/api/CMakeLists.txt b/external/vulkancts/modules/vulkan/api/CMakeLists.txt
new file mode 100644
index 0000000..b189675
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/CMakeLists.txt
@@ -0,0 +1,39 @@
+# API layer tests
+
+include_directories(..)
+
+set(DEQP_VK_API_SRCS
+	vktApiTests.cpp
+	vktApiTests.hpp
+	vktApiSmokeTests.cpp
+	vktApiSmokeTests.hpp
+	vktApiDeviceInitializationTests.cpp
+	vktApiDeviceInitializationTests.hpp
+	vktApiObjectManagementTests.cpp
+	vktApiObjectManagementTests.hpp
+	vktApiBufferTests.cpp
+	vktApiBufferTests.hpp
+	vktApiBufferViewCreateTests.cpp
+	vktApiBufferViewCreateTests.hpp
+	vktApiBufferViewAccessTests.cpp
+	vktApiBufferViewAccessTests.hpp
+	vktApiFeatureInfo.cpp
+	vktApiFeatureInfo.hpp
+	vktApiCommandBuffersTests.cpp
+	vktApiCommandBuffersTests.hpp
+	vktApiComputeInstanceResultBuffer.cpp
+	vktApiComputeInstanceResultBuffer.hpp
+	vktApiBufferComputeInstance.cpp
+	vktApiBufferComputeInstance.hpp
+	vktApiCopiesAndBlittingTests.cpp
+	vktApiCopiesAndBlittingTests.hpp
+	)
+
+set(DEQP_VK_API_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-api STATIC ${DEQP_VK_API_SRCS})
+target_link_libraries(deqp-vk-api ${DEQP_VK_API_LIBS})
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.cpp b/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.cpp
new file mode 100644
index 0000000..7d44b9d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.cpp
@@ -0,0 +1,163 @@
+/*-------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Samsung Electronics Co., Ltd.
+* Copyright (c) 2015 Google Inc.
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be
+* included in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by
+* Khronos, at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*--------------------------------------------------------------------*/
+
+#include "vktApiBufferComputeInstance.hpp"
+#include "vktApiComputeInstanceResultBuffer.hpp"
+#include "vkRefUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+using namespace vk;
+
+Move<VkBuffer> createColorDataBuffer (deUint32 offset,
+									  deUint32 bufferSize,
+									  const tcu::Vec4& color1,
+									  const tcu::Vec4& color2,
+									  de::MovePtr<Allocation>* outAllocation,
+									  vkt::Context& context)
+{
+	const DeviceInterface&					vki						= context.getDeviceInterface();
+	const VkDevice							device					= context.getDevice();
+	Allocator&								allocator				= context.getDefaultAllocator();
+
+	DE_ASSERT(offset + sizeof(tcu::Vec4[2]) <= bufferSize);
+
+	const VkBufferUsageFlags				usageFlags				= (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+	const VkBufferCreateInfo				createInfo				=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+		(VkDeviceSize) bufferSize,									// size
+		usageFlags,													// usage
+		VK_SHARING_MODE_EXCLUSIVE,									// sharingMode
+		0u,															// queueFamilyCount
+		DE_NULL,													// pQueueFamilyIndices
+	};
+	Move<VkBuffer> buffer(createBuffer(vki, device, &createInfo));
+
+	const VkMemoryRequirements				requirements			= getBufferMemoryRequirements(vki, device, *buffer);
+	de::MovePtr<Allocation>					allocation				= allocator.allocate(requirements, MemoryRequirement::HostVisible);
+
+	VK_CHECK(vki.bindBufferMemory(device, *buffer, allocation->getMemory(), allocation->getOffset()));
+
+
+	void*									mapPtr					= allocation->getHostPtr();
+
+	if (offset)
+		deMemset(mapPtr, 0x5A, (size_t) offset);
+
+	deMemcpy((deUint8 *) mapPtr + offset, color1.getPtr(), sizeof(tcu::Vec4));
+	deMemcpy((deUint8 *) mapPtr + offset + sizeof(tcu::Vec4), color2.getPtr(), sizeof(tcu::Vec4));
+	deMemset((deUint8 *) mapPtr + offset + 2 * sizeof(tcu::Vec4), 0x5A,
+			 (size_t) bufferSize - (size_t) offset - 2 * sizeof(tcu::Vec4));
+
+	flushMappedMemoryRange(vki, device, allocation->getMemory(), allocation->getOffset(), bufferSize);
+
+	*outAllocation = allocation;
+	return buffer;
+}
+
+Move<VkDescriptorSetLayout> createDescriptorSetLayout (vkt::Context& context)
+{
+
+	const DeviceInterface&					vki						= context.getDeviceInterface();
+	const VkDevice							device					= context.getDevice();
+
+	DescriptorSetLayoutBuilder				builder;
+
+	builder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+	builder.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+
+	return builder.build(vki, device);
+}
+
+Move<VkDescriptorPool> createDescriptorPool (vkt::Context& context)
+{
+	const DeviceInterface&					vki						= context.getDeviceInterface();
+	const VkDevice							device					= context.getDevice();
+
+	return vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,1u)
+		.build(vki, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+Move<VkDescriptorSet> createDescriptorSet (VkDescriptorPool pool,
+										   VkDescriptorSetLayout layout,
+										   VkBuffer viewA,
+										   deUint32 offsetA,
+										   VkBuffer viewB,
+										   deUint32 offsetB,
+										   VkBuffer resBuf,
+										   vkt::Context& context)
+{
+	const DeviceInterface&					vki						= context.getDeviceInterface();
+	const VkDevice							device					= context.getDevice();
+
+	const vk::VkDescriptorBufferInfo		resultInfo				= makeDescriptorBufferInfo(resBuf, 0u, (vk::VkDeviceSize) ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkDescriptorBufferInfo		bufferInfos[2]			=
+	{
+		vk::makeDescriptorBufferInfo(viewA, (vk::VkDeviceSize)offsetA, (vk::VkDeviceSize)sizeof(tcu::Vec4[2])),
+		vk::makeDescriptorBufferInfo(viewB, (vk::VkDeviceSize)offsetB, (vk::VkDeviceSize)sizeof(tcu::Vec4[2])),
+	};
+
+	const vk::VkDescriptorSetAllocateInfo	allocInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+	vk::Move<vk::VkDescriptorSet>			descriptorSet			= allocateDescriptorSet(vki, device, &allocInfo);
+
+	DescriptorSetUpdateBuilder builder;
+
+	// result
+	builder.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// buffers
+	builder.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &bufferInfos[0]);
+
+	builder.update(vki, device);
+	return descriptorSet;
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.hpp b/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.hpp
new file mode 100644
index 0000000..db2e061
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferComputeInstance.hpp
@@ -0,0 +1,69 @@
+#ifndef _VKTAPIBUFFERCOMPUTEINSTANCE_HPP
+#define _VKTAPIBUFFERCOMPUTEINSTANCE_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuVectorType.hpp"
+#include "vkRef.hpp"
+#include "vkMemUtil.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+vk::Move<vk::VkBuffer>					createColorDataBuffer (	deUint32 offset,
+																deUint32 bufferSize,
+																const tcu::Vec4& color1,
+																const tcu::Vec4& color2,
+																de::MovePtr<vk::Allocation>* outAllocation,
+																vkt::Context& context);
+
+vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout (vkt::Context& context);
+
+vk::Move<vk::VkDescriptorPool>			createDescriptorPool (vkt::Context& context);
+
+vk::Move<vk::VkDescriptorSet>			createDescriptorSet (vk::VkDescriptorPool pool,
+															  vk::VkDescriptorSetLayout layout,
+															  vk::VkBuffer viewA, deUint32 offsetA,
+															  vk::VkBuffer viewB,
+															  deUint32 offsetB,
+															  vk::VkBuffer resBuf,
+															  vkt::Context& context);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIBUFFERCOMPUTEINSTANCE_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiBufferTests.cpp
new file mode 100644
index 0000000..474e646
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferTests.cpp
@@ -0,0 +1,329 @@
+/*------------------------------------------------------------------------
+ *  Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffers Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiBufferTests.hpp"
+
+#include "deStringUtil.hpp"
+#include "gluVarType.hpp"
+#include "tcuTestLog.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+
+using namespace vk;
+
+namespace api
+{
+
+namespace
+{
+
+struct BufferCaseParameters
+{
+	VkBufferUsageFlags	usage;
+	VkBufferCreateFlags	flags;
+	VkSharingMode		sharingMode;
+};
+
+class BufferTestInstance : public TestInstance
+{
+public:
+								BufferTestInstance			(Context&				ctx,
+															 BufferCaseParameters	testCase)
+									: TestInstance	(ctx)
+									, m_testCase	(testCase)
+								{}
+	virtual tcu::TestStatus		iterate						(void);
+	tcu::TestStatus				bufferCreateAndAllocTest	(VkDeviceSize		size);
+
+private:
+	BufferCaseParameters		m_testCase;
+};
+
+class BuffersTestCase : public TestCase
+{
+public:
+							BuffersTestCase		(tcu::TestContext&		testCtx,
+												 const std::string&		name,
+												 const std::string&		description,
+												 BufferCaseParameters	testCase)
+								: TestCase(testCtx, name, description)
+								, m_testCase(testCase)
+							{}
+
+	virtual					~BuffersTestCase	(void) {}
+	virtual TestInstance*	createInstance		(Context&				ctx) const
+							{
+								tcu::TestLog& log	= m_testCtx.getLog();
+								log << tcu::TestLog::Message << getBufferUsageFlagsStr(m_testCase.usage) << tcu::TestLog::EndMessage;
+								return new BufferTestInstance(ctx, m_testCase);
+							}
+
+private:
+	BufferCaseParameters		m_testCase;
+};
+
+ tcu::TestStatus BufferTestInstance::bufferCreateAndAllocTest (VkDeviceSize size)
+{
+	const VkDevice			vkDevice			= m_context.getDevice();
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	Move<VkBuffer>			testBuffer;
+	VkMemoryRequirements	memReqs;
+	Move<VkDeviceMemory>	memory;
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	// Create buffer
+	{
+		const VkBufferCreateInfo		bufferParams		=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+			DE_NULL,
+			m_testCase.flags,
+			size,
+			m_testCase.usage,
+			m_testCase.sharingMode,
+			1u,										//	deUint32			queueFamilyCount;
+			&queueFamilyIndex,
+		};
+
+		try
+		{
+			testBuffer = createBuffer(vk, vkDevice, &bufferParams, (const VkAllocationCallbacks*)DE_NULL);
+		}
+		catch (const vk::Error& error)
+		{
+			return tcu::TestStatus::fail("Buffer creation failed! (requested memory size: " + de::toString(size) + ", Error code: " + de::toString(error.getMessage()) + ")");
+		}
+
+		vk.getBufferMemoryRequirements(vkDevice, *testBuffer, &memReqs);
+
+		if (size > memReqs.size)
+		{
+			std::ostringstream errorMsg;
+			errorMsg << "Requied memory size (" << memReqs.size << " bytes) smaller than the buffer's size (" << size << " bytes)!";
+			return tcu::TestStatus::fail(errorMsg.str());
+		}
+	}
+
+	// Allocate and bind memory
+	{
+		const VkMemoryAllocateInfo memAlloc =
+		{
+			VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+			NULL,
+			memReqs.size,
+			0										//	deUint32		memoryTypeIndex
+		};
+
+		try
+		{
+			memory = allocateMemory(vk, vkDevice, &memAlloc, (const VkAllocationCallbacks*)DE_NULL);
+		}
+		catch (const vk::Error& error)
+		{
+			return tcu::TestStatus::fail("Alloc memory failed! (requested memory size: " + de::toString(size) + ", Error code: " + de::toString(error.getMessage()) + ")");
+		}
+
+		if ((m_testCase.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT) ||
+			(m_testCase.flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT) ||
+			(m_testCase.flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT))
+		{
+			VkQueue queue												= 0;
+
+			vk.getDeviceQueue(vkDevice, queueFamilyIndex, 0, &queue);
+
+			const VkSparseMemoryBind			sparseMemoryBind		=
+			{
+				0,										// VkDeviceSize								resourceOffset;
+				memReqs.size,							// VkDeviceSize								size;
+				*memory,								// VkDeviceMemory							memory;
+				0,										// VkDeviceSize								memoryOffset;
+				0										// VkSparseMemoryBindFlags					flags;
+			};
+
+			const VkSparseBufferMemoryBindInfo	sparseBufferMemoryBindInfo	=
+			{
+				*testBuffer,							// VkBuffer									buffer;
+				1u,										// deUint32									bindCount;
+				&sparseMemoryBind						// const VkSparseMemoryBind*				pBinds;
+			};
+
+			const VkBindSparseInfo				bindSparseInfo			=
+			{
+				VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,		// VkStructureType							sType;
+				DE_NULL,								// const void*								pNext;
+				0,										// deUint32									waitSemaphoreCount;
+				DE_NULL,								// const VkSemaphore*						pWaitSemaphores;
+				1u,										// deUint32									bufferBindCount;
+				&sparseBufferMemoryBindInfo,			// const VkSparseBufferMemoryBindInfo*		pBufferBinds;
+				0,										// deUint32									imageOpaqueBindCount;
+				DE_NULL,								// const VkSparseImageOpaqueMemoryBindInfo*	pImageOpaqueBinds;
+				0,										// deUint32									imageBindCount;
+				DE_NULL,								// const VkSparseImageMemoryBindInfo*		pImageBinds;
+				0,										// deUint32									signalSemaphoreCount;
+				DE_NULL,								// const VkSemaphore*						pSignalSemaphores;
+			};
+
+			const VkFenceCreateInfo fenceParams =
+			{
+				VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+				DE_NULL,								// const void*			pNext;
+				0u										// VkFenceCreateFlags	flags;
+			};
+
+			const vk::Unique<vk::VkFence> fence(vk::createFence(vk, vkDevice, &fenceParams));
+
+			VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+			if (vk.queueBindSparse(queue, 1, &bindSparseInfo, *fence) != VK_SUCCESS)
+				return tcu::TestStatus::fail("Bind sparse buffer memory failed! (requested memory size: " + de::toString(size) + ")");
+
+			VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), VK_TRUE, ~(0ull) /* infinity */));
+		} else
+			if (vk.bindBufferMemory(vkDevice, *testBuffer, *memory, 0) != VK_SUCCESS)
+				return tcu::TestStatus::fail("Bind buffer memory failed! (requested memory size: " + de::toString(size) + ")");
+	}
+
+	return tcu::TestStatus::pass("Buffer test");
+}
+
+tcu::TestStatus BufferTestInstance::iterate (void)
+{
+	const VkPhysicalDeviceFeatures&	physicalDeviceFeatures	= m_context.getDeviceFeatures();
+
+	if ((m_testCase.flags & VK_BUFFER_CREATE_SPARSE_BINDING_BIT ) && !physicalDeviceFeatures.sparseBinding)
+		TCU_THROW(NotSupportedError, "Sparse bindings feature is not supported");
+
+	if ((m_testCase.flags & VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT ) && !physicalDeviceFeatures.sparseResidencyBuffer)
+		TCU_THROW(NotSupportedError, "Sparse buffer residency feature is not supported");
+
+	if ((m_testCase.flags & VK_BUFFER_CREATE_SPARSE_ALIASED_BIT ) && !physicalDeviceFeatures.sparseResidencyAliased)
+		TCU_THROW(NotSupportedError, "Sparse aliased residency feature is not supported");
+
+	const VkDeviceSize testSizes[] =
+	{
+		1,
+		1181,
+		15991,
+		16384
+	};
+	tcu::TestStatus					testStatus			= tcu::TestStatus::pass("Buffer test");
+
+	for (int i = 0; i < DE_LENGTH_OF_ARRAY(testSizes); i++)
+	{
+		if ((testStatus = bufferCreateAndAllocTest(testSizes[i])).getCode() != QP_TEST_RESULT_PASS)
+			return testStatus;
+	}
+
+	if (m_testCase.usage & (VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT))
+	{
+		const VkPhysicalDevice					vkPhysicalDevice	= m_context.getPhysicalDevice();
+		const InstanceInterface&				vkInstance			= m_context.getInstanceInterface();
+		const VkPhysicalDeviceMemoryProperties	memoryProperties	= getPhysicalDeviceMemoryProperties(vkInstance, vkPhysicalDevice);
+		VkPhysicalDeviceProperties	props;
+
+		vkInstance.getPhysicalDeviceProperties(vkPhysicalDevice, &props);
+
+		const VkDeviceSize maxTestBufferSize = de::min((VkDeviceSize) props.limits.maxTexelBufferElements, memoryProperties.memoryHeaps[memoryProperties.memoryTypes[0].heapIndex].size / 16);
+
+		testStatus = bufferCreateAndAllocTest(maxTestBufferSize);
+	}
+
+	return testStatus;
+}
+
+} // anonymous
+
+ tcu::TestCaseGroup* createBufferTests (tcu::TestContext& testCtx)
+{
+	const VkBufferUsageFlags bufferUsageModes[] =
+	{
+		VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+		VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+		VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+		VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+		VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+		VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+		VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+		VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+		VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT
+	};
+
+	const VkBufferCreateFlags bufferCreateFlags[] =
+	{
+		VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+		VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+		VK_BUFFER_CREATE_SPARSE_ALIASED_BIT
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>	buffersTests	(new tcu::TestCaseGroup(testCtx, "buffer", "Buffer Tests"));
+
+	deUint32	numberOfBufferUsageFlags			= DE_LENGTH_OF_ARRAY(bufferUsageModes);
+	deUint32	numberOfBufferCreateFlags			= DE_LENGTH_OF_ARRAY(bufferCreateFlags);
+	deUint32	maximumValueOfBufferUsageFlags		= (1 << (numberOfBufferUsageFlags - 1)) - 1;
+	deUint32	maximumValueOfBufferCreateFlags		= (1 << (numberOfBufferCreateFlags)) - 1;
+
+	for (deUint32 combinedBufferCreateFlags = 0; combinedBufferCreateFlags <= maximumValueOfBufferCreateFlags; combinedBufferCreateFlags++)
+	{
+		for (deUint32 combinedBufferUsageFlags = 1; combinedBufferUsageFlags <= maximumValueOfBufferUsageFlags; combinedBufferUsageFlags++)
+		{
+			if (combinedBufferCreateFlags == VK_BUFFER_CREATE_SPARSE_ALIASED_BIT)
+			{
+				// spec says: If flags contains VK_BUFFER_CREATE_SPARSE_ALIASED_BIT, it must also contain at least one of
+				// VK_BUFFER_CREATE_SPARSE_BINDING_BIT or VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT
+				continue;
+			}
+			BufferCaseParameters	testParams =
+			{
+				combinedBufferUsageFlags,
+				combinedBufferCreateFlags,
+				VK_SHARING_MODE_EXCLUSIVE
+			};
+			std::ostringstream	testName;
+			std::ostringstream	testDescription;
+			testName << "createBuffer_" << combinedBufferUsageFlags << "_" << combinedBufferCreateFlags;
+			testDescription << "vkCreateBuffer test " << combinedBufferUsageFlags << " " << combinedBufferCreateFlags;
+			buffersTests->addChild(new BuffersTestCase(testCtx, testName.str(), testDescription.str(), testParams));
+		}
+	}
+
+	return buffersTests.release();
+}
+
+} // api
+} // vk
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiBufferTests.hpp
new file mode 100644
index 0000000..1fdc071
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferTests.hpp
@@ -0,0 +1,52 @@
+#ifndef _VKTAPIBUFFERTESTS_HPP
+#define _VKTAPIBUFFERTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffers Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+tcu::TestCaseGroup*		createBufferTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIBUFFERTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.cpp
new file mode 100644
index 0000000..f44908e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.cpp
@@ -0,0 +1,956 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffer View Memory Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiBufferViewAccessTests.hpp"
+
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTextureUtil.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+using namespace vk;
+
+namespace
+{
+
+struct BufferViewCaseParams
+{
+	deUint32	bufferSize;
+	deUint32	bufferViewSize;
+	deUint32	elementOffset;
+};
+
+class BufferViewTestInstance : public vkt::TestInstance
+{
+public:
+										BufferViewTestInstance		(Context&				context,
+																	 BufferViewCaseParams	testCase);
+	virtual								~BufferViewTestInstance		(void);
+	virtual tcu::TestStatus				iterate						(void);
+
+private:
+	void								createQuad					(void);
+	tcu::TestStatus						checkResult					(deInt8	factor = 1);
+
+private:
+	BufferViewCaseParams				m_testCase;
+
+	const tcu::IVec2					m_renderSize;
+	const VkFormat						m_colorFormat;
+
+	const VkDeviceSize					m_pixelDataSize;
+
+	Move<VkImage>						m_colorImage;
+	de::MovePtr<Allocation>				m_colorImageAlloc;
+	Move<VkImageView>					m_colorAttachmentView;
+	Move<VkRenderPass>					m_renderPass;
+	Move<VkFramebuffer>					m_framebuffer;
+
+	Move<VkDescriptorSetLayout>			m_descriptorSetLayout;
+	Move<VkDescriptorPool>				m_descriptorPool;
+	Move<VkDescriptorSet>				m_descriptorSet;
+
+	Move<VkBuffer>						m_uniformBuffer;
+	de::MovePtr<vk::Allocation>			m_uniformBufferAlloc;
+	Move<VkBufferView>					m_uniformBufferView;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	std::vector<tcu::Vec4>				m_vertices;
+	de::MovePtr<Allocation>				m_vertexBufferAlloc;
+
+	Move<VkPipelineLayout>				m_pipelineLayout;
+	Move<VkPipeline>					m_graphicsPipelines;
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+
+	Move<VkBuffer>						m_resultBuffer;
+	de::MovePtr<Allocation>				m_resultBufferAlloc;
+
+	Move<VkFence>						m_fence;
+};
+
+static void generateBuffer (std::vector<deUint32>& uniformData, deUint32 bufferSize, deInt8 factor = 1)
+{
+	for (deUint32 i = 0; i < bufferSize; ++i)
+		uniformData.push_back(factor * i);
+}
+
+void BufferViewTestInstance::createQuad (void)
+{
+	tcu::Vec4 a(-1.0, -1.0, 0.0, 1.0);
+	tcu::Vec4 b(1.0, -1.0, 0.0, 1.0);
+	tcu::Vec4 c(1.0, 1.0, 0.0, 1.0);
+	tcu::Vec4 d(-1.0, 1.0, 0.0, 1.0);
+
+	// Triangle 1
+	m_vertices.push_back(a);
+	m_vertices.push_back(c);
+	m_vertices.push_back(b);
+
+	// Triangle 2
+	m_vertices.push_back(c);
+	m_vertices.push_back(a);
+	m_vertices.push_back(d);
+}
+
+BufferViewTestInstance::~BufferViewTestInstance	(void)
+{
+}
+
+BufferViewTestInstance::BufferViewTestInstance (Context& context, BufferViewCaseParams testCase)
+	: vkt::TestInstance		(context)
+	, m_testCase			(testCase)
+	, m_renderSize			(testCase.bufferViewSize, testCase.bufferViewSize)
+	, m_colorFormat			(VK_FORMAT_R32_UINT)
+	, m_pixelDataSize		(m_renderSize.x() * m_renderSize.y() * mapVkFormat(m_colorFormat).getPixelSize())
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	channelMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType		sType;
+			DE_NULL,																	// const void*			pNext;
+			0u,																			// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType			imageType;
+			m_colorFormat,																// VkFormat				format;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y(), 1u },				// VkExtent3D			extent;
+			1u,																			// deUint32				mipLevels;
+			1u,																			// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,														// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode		sharingMode;
+			1u,																			// deUint32				queueFamilyCount;
+			&queueFamilyIndex,															// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout		initialLayout;
+		};
+
+		m_colorImage			= createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create destination buffer
+	{
+		const VkBufferCreateInfo bufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_pixelDataSize,							// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,		   // VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			0u,											// deUint32				queueFamilyCount;
+			DE_NULL,									// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_resultBuffer		= createBuffer(vk, vkDevice, &bufferParams);
+		m_resultBufferAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_resultBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_resultBuffer, m_resultBufferAlloc->getMemory(), m_resultBufferAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			channelMappingRGBA,									// VkChannelMapping			channels;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u },		// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags	flags;
+			m_colorFormat,										// VkFormat						format;
+			VK_SAMPLE_COUNT_1_BIT,								// deUint32						samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp			storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp			stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				finalLayout;
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags		flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint				pipelineBindPoint;
+			0u,													// deUint32							inputCount;
+			DE_NULL,											// const VkAttachmentReference*		pInputAttachments;
+			1u,													// deUint32							colorCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*		pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*		pResolveAttachments;
+			DE_NULL,											// VkAttachmentReference			depthStencilAttachment;
+			0u,													// deUint32							preserveCount;
+			DE_NULL												// const VkAttachmentReference*		pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			(VkRenderPassCreateFlags)0,
+			1u,													// deUint32							attachmentCount;
+			&colorAttachmentDescription,						// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachmentBindInfos[1] =
+		{
+			*m_colorAttachmentView,
+		};
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkFramebufferCreateFlags)0,
+			*m_renderPass,										// VkRenderPass					renderPass;
+			1u,													// deUint32						attachmentCount;
+			attachmentBindInfos,								// const VkImageView*			pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32						width;
+			(deUint32)m_renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create descriptors
+	{
+		const VkDescriptorSetLayoutBinding layoutBindings[1] =
+		{
+			{
+				0u,											// deUint32				binding;
+				VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,	// VkDescriptorType		descriptorType;
+				1u,											// deUint32				arraySize;
+				VK_SHADER_STAGE_ALL,						// VkShaderStageFlags	stageFlags;
+				DE_NULL										// const VkSampler*		pImmutableSamplers;
+			},
+		};
+
+		const VkDescriptorSetLayoutCreateInfo descriptorLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,	// VkStructureType						sType;
+			DE_NULL,												// cost void*							pNexŧ;
+			(VkDescriptorSetLayoutCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(layoutBindings),						// deUint32								count;
+			layoutBindings											// const VkDescriptorSetLayoutBinding	pBinding;
+		};
+
+		m_descriptorSetLayout = createDescriptorSetLayout(vk, vkDevice, &descriptorLayoutParams);
+
+		// Generate buffer
+		std::vector<deUint32> uniformData;
+		generateBuffer(uniformData, testCase.bufferSize);
+
+		const VkDeviceSize uniformSize = testCase.bufferSize * sizeof(deUint32);
+		const VkBufferCreateInfo uniformBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags; <-- TODO: 0u?
+			uniformSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,	// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_uniformBuffer			= createBuffer(vk, vkDevice, &uniformBufferParams);
+		m_uniformBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_uniformBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_uniformBuffer, m_uniformBufferAlloc->getMemory(), 0));
+		deMemcpy(m_uniformBufferAlloc->getHostPtr(), uniformData.data(), (size_t)uniformSize);
+
+		const VkBufferViewCreateInfo viewInfo =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,							// VkStructureType	sType;
+			DE_NULL,															// void*			pNext;
+			(VkBufferViewCreateFlags)0,
+			*m_uniformBuffer,													// VkBuffer			buffer;
+			m_colorFormat,														// VkFormat			format;
+			m_testCase.elementOffset * sizeof(deUint32),						// VkDeviceSize		offset;
+			m_testCase.bufferViewSize * sizeof(deUint32)						// VkDeviceSize		range;
+		};
+
+		m_uniformBufferView = createBufferView(vk, vkDevice, &viewInfo);
+
+		const VkDescriptorPoolSize descriptorTypes[1] =
+		{
+			{
+				VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,		// VkDescriptorType		type;
+				1												// deUint32				count;
+			}
+		};
+
+		const VkDescriptorPoolCreateInfo descriptorPoolParams =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// void*							pNext;
+			VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,	// VkDescriptorPoolCreateFlags		flags;
+			1u,													// uint32_t							maxSets;
+			DE_LENGTH_OF_ARRAY(descriptorTypes),				// deUint32							count;
+			descriptorTypes										// const VkDescriptorTypeCount*		pTypeCount
+		};
+
+		m_descriptorPool = createDescriptorPool(vk, vkDevice, &descriptorPoolParams);
+
+		const VkDescriptorSetAllocateInfo descriptorSetParams =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+			DE_NULL,
+			*m_descriptorPool,
+			1u,
+			&m_descriptorSetLayout.get(),
+		};
+		m_descriptorSet = allocateDescriptorSet(vk, vkDevice, &descriptorSetParams);
+
+		const VkWriteDescriptorSet writeDescritporSets[] =
+		{
+			{
+				VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,		// VkStructureType			sType;
+				DE_NULL,									// const void*				pNext;
+				*m_descriptorSet,							// VkDescriptorSet			destSet;
+				0,											// deUint32					destBinding;
+				0,											// deUint32					destArrayElement;
+				1u,											// deUint32					count;
+				VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,	// VkDescriptorType			descriptorType;
+				(const VkDescriptorImageInfo*)DE_NULL,
+				(const VkDescriptorBufferInfo*)DE_NULL,
+				&m_uniformBufferView.get(),
+			}
+		};
+
+		vk.updateDescriptorSets(vkDevice, DE_LENGTH_OF_ARRAY(writeDescritporSets), writeDescritporSets, 0u, DE_NULL);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkPipelineLayoutCreateFlags)0,
+			1u,													// deUint32						descriptorSetCount;
+			&*m_descriptorSetLayout,							// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shaders
+	{
+		m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("vert"), 0);
+		m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("frag"), 0);
+	}
+
+	// Create pipeline
+	{
+
+		const VkPipelineShaderStageCreateInfo shaderStageParams[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType				sType;
+				DE_NULL,													// const void*					pNext;
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStage				stage;
+				*m_vertexShaderModule,										// VkShader						shader;
+				"main",
+				DE_NULL														// const VkSpecializationInfo*	pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType				sType;
+				DE_NULL,													// const void*					pNext;
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStage				stage;
+				*m_fragmentShaderModule,									// VkShader						shader;
+				"main",
+				DE_NULL														// const VkSpecializationInfo*	pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,								// deUint32					binding;
+			sizeof(tcu::Vec4),				// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX		// VkVertexInputStepRate	stepRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[1] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offsetInBytes;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineVertexInputStateCreateFlags)0,
+			1u,																// deUint32									bindingCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			1u,																// deUint32									attributeCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,														// const void*			pNext;
+			(VkPipelineInputAssemblyStateCreateFlags)0,
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology	topology;
+			false															// VkBool32				primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	originX;
+			0.0f,						// float	originY;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+		const VkRect2D scissor =
+		{
+			{ 0, 0 },													// VkOffset2D  offset;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() }	// VkExtent2D  extent;
+		};
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType		sType;
+			DE_NULL,														// const void*			pNext;
+			(VkPipelineViewportStateCreateFlags)0,
+			1u,																// deUint32				viewportCount;
+			&viewport,														// const VkViewport*	pViewports;
+			1u,																// deUint32				scissorCount;
+			&scissor														// const VkRect2D*		pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType	sType;
+			DE_NULL,														// const void*		pNext;
+			(VkPipelineRasterizationStateCreateFlags)0,
+			false,															// VkBool32			depthClipEnable;
+			false,															// VkBool32			rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkFillMode		fillMode;
+			VK_CULL_MODE_NONE,												// VkCullMode		cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace		frontFace;
+			VK_FALSE,														// VkBool32			depthBiasEnable;
+			0.0f,															// float			depthBias;
+			0.0f,															// float			depthBiasClamp;
+			0.0f,															// float			slopeScaledDepthBias;
+			1.0f,															// float			lineWidth;
+		};
+
+		const VkPipelineMultisampleStateCreateInfo		multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits					rasterizationSamples;
+			VK_FALSE,														// VkBool32									sampleShadingEnable;
+			0.0f,															// float									minSampleShading;
+			DE_NULL,														// const VkSampleMask*						pSampleMask;
+			VK_FALSE,														// VkBool32									alphaToCoverageEnable;
+			VK_FALSE														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,														// VkBool32			blendEnable;
+			VK_BLEND_FACTOR_ONE,										// VkBlend			srcBlendColor;
+			VK_BLEND_FACTOR_ZERO,										// VkBlend			destBlendColor;
+			VK_BLEND_OP_ADD,											// VkBlendOp		blendOpColor;
+			VK_BLEND_FACTOR_ONE,										// VkBlend			srcBlendAlpha;
+			VK_BLEND_FACTOR_ZERO,										// VkBlend			destBlendAlpha;
+			VK_BLEND_OP_ADD,											// VkBlendOp		blendOpAlpha;
+			(VK_COLOR_COMPONENT_R_BIT |
+			 VK_COLOR_COMPONENT_G_BIT |
+			 VK_COLOR_COMPONENT_B_BIT |
+			 VK_COLOR_COMPONENT_A_BIT)									// VkChannelFlags	channelWriteMask;
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			(VkPipelineColorBlendStateCreateFlags)0,
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConst[4];
+		};
+
+		const VkPipelineDynamicStateCreateInfo	dynamicStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,													// const void*				pNext;
+			(VkPipelineDynamicStateCreateFlags)0,
+			0u,															// deUint32					dynamicStateCount;
+			DE_NULL														// const VkDynamicState*	pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterStateCreateInfo*			pRasterState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			DE_NULL,											// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipelines		= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex buffer
+	{
+		createQuad();
+		const VkDeviceSize vertexDataSize = m_vertices.size() * sizeof(tcu::Vec4);
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			vertexDataSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), (size_t)vertexDataSize);
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), vertexDataSize);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32				queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1u												// deUint32					count;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkCmdBufferOptimizeFlags	flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue clearValue = makeClearValueColorF32(0.0, 0.0, 0.0, 0.0);
+
+		const VkClearValue attachmentClearValues[1] =
+		{
+			clearValue,
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{
+				{ 0, 0 },
+				{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() }
+			},														// VkRect2D				renderArea;
+			1u,														// deUint32				clearValueCount;
+			attachmentClearValues									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+		const VkImageMemoryBarrier initialImageBarrier =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+			DE_NULL,									// const void*				pNext;
+			0,											// VkMemoryOutputFlags		outputMask;
+			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// VkMemoryInputFlags		inputMask;
+			VK_IMAGE_LAYOUT_UNDEFINED,					// VkImageLayout			oldLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// VkImageLayout			newLayout;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32					destQueueFamilyIndex;
+			*m_colorImage,								// VkImage					image;
+			{											// VkImageSubresourceRange	subresourceRange;
+				VK_IMAGE_ASPECT_COLOR_BIT,				// VkImageAspectFlags	aspectMask;
+				0u,										// deUint32				baseMipLevel;
+				1u,										// deUint32				mipLevels;
+				0u,										// deUint32				baseArraySlice;
+				1u										// deUint32				arraySize;
+			}
+		};
+
+		vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &initialImageBarrier);
+
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		const VkDeviceSize	vertexBufferOffset[1] = { 0 };
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines);
+		vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1, &*m_descriptorSet, 0u, DE_NULL);
+		vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), vertexBufferOffset);
+		vk.cmdDraw(*m_cmdBuffer, (deUint32)m_vertices.size(), 1, 0, 0);
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+
+		const VkImageMemoryBarrier imageBarrier =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+			DE_NULL,									// const void*				pNext;
+			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// VkMemoryOutputFlags		outputMask;
+			VK_ACCESS_TRANSFER_READ_BIT,				// VkMemoryInputFlags		inputMask;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// VkImageLayout			oldLayout;
+			VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32					destQueueFamilyIndex;
+			*m_colorImage,								// VkImage					image;
+			{											// VkImageSubresourceRange	subresourceRange;
+				VK_IMAGE_ASPECT_COLOR_BIT,				// VkImageAspectFlags	aspectMask;
+				0u,										// deUint32				baseMipLevel;
+				1u,										// deUint32				mipLevels;
+				0u,										// deUint32				baseArraySlice;
+				1u										// deUint32				arraySize;
+			}
+		};
+
+		const VkBufferMemoryBarrier bufferBarrier =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			VK_ACCESS_TRANSFER_WRITE_BIT,				// VkMemoryOutputFlags	outputMask;
+			VK_ACCESS_HOST_READ_BIT,					// VkMemoryInputFlags	inputMask;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32				srcQueueFamilyIndex;
+			VK_QUEUE_FAMILY_IGNORED,					// deUint32				destQueueFamilyIndex;
+			*m_resultBuffer,							// VkBuffer				buffer;
+			0u,											// VkDeviceSize			offset;
+			m_pixelDataSize								// VkDeviceSize			size;
+		};
+
+		const VkBufferImageCopy copyRegion =
+		{
+			0u,											// VkDeviceSize				bufferOffset;
+			(deUint32)m_renderSize.x(),					// deUint32					bufferRowLength;
+			(deUint32)m_renderSize.y(),					// deUint32					bufferImageHeight;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u },	// VkImageSubresourceCopy	imageSubresource;
+			{ 0, 0, 0 },								// VkOffset3D				imageOffset;
+			{
+				(deUint32)m_renderSize.x(),
+				(deUint32)m_renderSize.y(),
+				1u
+			}											// VkExtent3D				imageExtent;
+		};
+
+		vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+		vk.cmdCopyImageToBuffer(*m_cmdBuffer, *m_colorImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *m_resultBuffer, 1, &copyRegion);
+		vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+tcu::TestStatus BufferViewTestInstance::checkResult (deInt8 factor)
+{
+	const DeviceInterface&			vk					= m_context.getDeviceInterface();
+	const VkDevice					vkDevice			= m_context.getDevice();
+	const tcu::TextureFormat		tcuFormat			= mapVkFormat(m_colorFormat);
+	de::MovePtr<tcu::TextureLevel>	resultLevel			(new tcu::TextureLevel(tcuFormat, m_renderSize.x(), m_renderSize.y()));
+
+	invalidateMappedMemoryRange(vk, vkDevice, m_resultBufferAlloc->getMemory(), m_resultBufferAlloc->getOffset(), m_pixelDataSize);
+	tcu::copy(*resultLevel, tcu::ConstPixelBufferAccess(resultLevel->getFormat(), resultLevel->getSize(), m_resultBufferAlloc->getHostPtr()));
+
+	tcu::ConstPixelBufferAccess pixelBuffer = resultLevel->getAccess();
+	for (deInt32 i = 0; i < (deInt32) m_renderSize.x(); ++i)
+	{
+		tcu::IVec4 pixel	= pixelBuffer.getPixelInt(i, i);
+		deInt32 expected	= factor * (m_testCase.elementOffset + i);
+		deInt32 actual		= pixel[0];
+		if (expected != actual)
+		{
+			std::ostringstream errorMessage;
+			errorMessage << "BufferView test failed. expected: " << expected << " actual: " << actual;
+			return tcu::TestStatus::fail(errorMessage.str());
+		}
+	}
+
+	return tcu::TestStatus::pass("BufferView test");
+}
+
+tcu::TestStatus BufferViewTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+		0u,
+		(const VkSemaphore*)DE_NULL,
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,
+		&m_cmdBuffer.get(),
+		0u,
+		(const VkSemaphore*)DE_NULL,
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	tcu::TestStatus				testStatus	= checkResult();
+	if (testStatus.getCode() != QP_TEST_RESULT_PASS)
+	{
+		return testStatus;
+	}
+
+	// Generate and bind another buffer
+	std::vector<deUint32>		uniformData;
+	const VkDeviceSize			uniformSize = m_testCase.bufferSize * sizeof(deUint32);
+	const deInt8				factor		= 2;
+
+	generateBuffer(uniformData, m_testCase.bufferSize, factor);
+	deMemcpy(m_uniformBufferAlloc->getHostPtr(), uniformData.data(), (size_t)uniformSize);
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	return checkResult(factor);
+}
+
+class BufferViewTestCase : public vkt::TestCase
+{
+public:
+							BufferViewTestCase			(tcu::TestContext&			testCtx,
+														 const std::string&			name,
+														 const std::string&			description,
+														 BufferViewCaseParams	bufferViewTestInfo)
+								: vkt::TestCase			(testCtx, name, description)
+								, m_bufferViewTestInfo	(bufferViewTestInfo)
+							{}
+
+	virtual					~BufferViewTestCase			(void) {}
+	virtual	void			initPrograms				(SourceCollections&			programCollection) const;
+
+	virtual TestInstance*	createInstance				(Context&					context) const
+							{
+								return new BufferViewTestInstance(context, m_bufferViewTestInfo);
+							}
+private:
+	BufferViewCaseParams	m_bufferViewTestInfo;
+};
+
+void BufferViewTestCase::initPrograms (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(
+		"#version 310 es\n"
+		"layout (location = 0) in highp vec4 a_position;\n"
+		"void main()\n"
+		"{\n"
+		"	gl_Position = a_position;\n"
+		"}\n");
+
+
+	programCollection.glslSources.add("frag") << glu::FragmentSource(
+		"#version 310 es\n"
+		"#extension GL_EXT_texture_buffer : enable\n"
+		"layout (set=0, binding=0) uniform highp usamplerBuffer u_buffer;\n"
+		"layout (location = 0) out highp uint o_color;\n"
+		"void main()\n"
+		"{\n"
+		"	o_color = texelFetch(u_buffer, int(gl_FragCoord.x)).x;\n"
+		"}\n");
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createBufferViewAccessTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	bufferViewTests	(new tcu::TestCaseGroup(testCtx, "access", "BufferView Access Tests"));
+
+	{
+		BufferViewCaseParams info =
+		{
+			512,	// deUint32	bufferSize
+			512,	// deUint32	bufferViewSize
+			0,		// deUint32	elementOffset
+		};
+		std::ostringstream description;
+		description << "bufferSize: " << info.bufferSize << " bufferViewSize: " << info.bufferViewSize << " bufferView element offset: " << info.elementOffset;
+		bufferViewTests->addChild(new BufferViewTestCase(testCtx, "buffer_view_memory_test_complete", description.str(), info));
+	}
+
+	{
+		BufferViewCaseParams info =
+		{
+			4096,	// deUint32	bufferSize
+			512,	// deUint32	bufferViewSize
+			0,		// deUint32	elementOffset
+		};
+		std::ostringstream description;
+		description << "bufferSize: " << info.bufferSize << " bufferViewSize: " << info.bufferViewSize << " bufferView element offset: " << info.elementOffset;
+		bufferViewTests->addChild(new BufferViewTestCase(testCtx, "buffer_view_memory_test_partial_offset0", description.str(), info));
+	}
+
+	{
+		BufferViewCaseParams info =
+		{
+			4096,	// deUint32	bufferSize
+			512,	// deUint32	bufferViewSize
+			128,	// deUint32	elementOffset
+		};
+		std::ostringstream description;
+		description << "bufferSize: " << info.bufferSize << " bufferViewSize: " << info.bufferViewSize << " bufferView element offset: " << info.elementOffset;
+		bufferViewTests->addChild(new BufferViewTestCase(testCtx, "buffer_view_memory_test_partial_offset1", description.str(), info));
+	}
+
+	return bufferViewTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.hpp
new file mode 100644
index 0000000..7445d0d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferViewAccessTests.hpp
@@ -0,0 +1,52 @@
+#ifndef _VKTAPIBUFFERVIEWACCESSTESTS_HPP
+#define _VKTAPIBUFFERVIEWACCESSTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffer View Memory Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+tcu::TestCaseGroup*		createBufferViewAccessTests	(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIBUFFERVIEWACCESSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.cpp
new file mode 100644
index 0000000..4e49496
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.cpp
@@ -0,0 +1,261 @@
+/*------------------------------------------------------------------------
+ *  Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffer View Creation Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiBufferViewCreateTests.hpp"
+
+#include "deStringUtil.hpp"
+#include "gluVarType.hpp"
+#include "tcuTestLog.hpp"
+#include "vkPrograms.hpp"
+#include "vkRefUtil.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+
+using namespace vk;
+
+namespace api
+{
+
+namespace
+{
+
+struct BufferViewCaseParameters
+{
+	VkFormat				format;
+	VkDeviceSize			offset;
+	VkDeviceSize			range;
+	VkBufferUsageFlags		usage;
+	VkFormatFeatureFlags	features;
+};
+
+class BufferViewTestInstance : public TestInstance
+{
+public:
+								BufferViewTestInstance		(Context&					ctx,
+															 BufferViewCaseParameters	createInfo);
+	virtual tcu::TestStatus		iterate						(void);
+
+private:
+	BufferViewCaseParameters		m_testCase;
+};
+
+class BufferViewTestCase : public TestCase
+{
+public:
+							BufferViewTestCase		(tcu::TestContext&			testCtx,
+													 const std::string&			name,
+													 const std::string&			description,
+													 BufferViewCaseParameters	createInfo)
+								: TestCase			(testCtx, name, description)
+								, m_testCase		(createInfo)
+							{}
+
+	virtual					~BufferViewTestCase		(void) {}
+	virtual TestInstance*	createInstance			(Context&	ctx) const
+							{
+								return new BufferViewTestInstance(ctx, m_testCase);
+							}
+
+private:
+	BufferViewCaseParameters	m_testCase;
+};
+
+BufferViewTestInstance::BufferViewTestInstance (Context&					ctx,
+												BufferViewCaseParameters	createInfo)
+	: TestInstance	(ctx)
+	, m_testCase	(createInfo)
+{
+}
+
+tcu::TestStatus BufferViewTestInstance::iterate (void)
+{
+	// Create buffer
+	const VkDevice				vkDevice				= m_context.getDevice();
+	const DeviceInterface&		vk						= m_context.getDeviceInterface();
+	const deUint32				queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
+	const VkDeviceSize			size					= 3 * 5 * 7 * 64;
+	Move<VkBuffer>				testBuffer;
+	VkMemoryRequirements		memReqs;
+	VkFormatProperties			properties;
+	const VkBufferCreateInfo	bufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,													//	VkStructureType			sType;
+		DE_NULL,																				//	const void*				pNext;
+		0u,																						//	VkBufferCreateFlags		flags;
+		size,																					//	VkDeviceSize			size;
+		m_testCase.usage,																		//	VkBufferUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,																//	VkSharingMode			sharingMode;
+		1u,																						//	deUint32				queueFamilyCount;
+		&queueFamilyIndex,																		//	const deUint32*			pQueueFamilyIndices;
+	};
+
+	m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_testCase.format, &properties);
+	if (!(properties.bufferFeatures & m_testCase.features))
+		TCU_THROW(NotSupportedError, "Format not supported");
+
+	try
+	{
+		testBuffer = createBuffer(vk, vkDevice, &bufferParams, (const VkAllocationCallbacks*)DE_NULL);
+	}
+	catch (const vk::Error& error)
+	{
+		return tcu::TestStatus::fail("Buffer creation failed! (Error code: " + de::toString(error.getMessage()) + ")");
+	}
+
+	vk.getBufferMemoryRequirements(vkDevice, *testBuffer, &memReqs);
+
+	if (size > memReqs.size)
+	{
+		std::ostringstream errorMsg;
+		errorMsg << "Requied memory size (" << memReqs.size << " bytes) smaller than the buffer's size (" << size << " bytes)!";
+		return tcu::TestStatus::fail(errorMsg.str());
+	}
+
+	Move<VkDeviceMemory>		memory;
+	const VkMemoryAllocateInfo	memAlloc				=
+	{
+		VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,		//	VkStructureType		sType
+		NULL,										//	const void*			pNext
+		memReqs.size,								//	VkDeviceSize		allocationSize
+		0											//	deUint32			memoryTypeIndex
+	};
+
+	{
+		// Create buffer view.
+		Move<VkBufferView>				bufferView;
+		const VkBufferViewCreateInfo	bufferViewCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,	//	VkStructureType		sType;
+			NULL,										//	const void*			pNext;
+			(VkBufferViewCreateFlags)0,
+			*testBuffer,								//	VkBuffer			buffer;
+			m_testCase.format,							//	VkFormat			format;
+			m_testCase.offset,							//	VkDeviceSize		offset;
+			m_testCase.range,							//	VkDeviceSize		range;
+		};
+
+		try
+		{
+			memory = allocateMemory(vk, vkDevice, &memAlloc, (const VkAllocationCallbacks*)DE_NULL);
+		}
+		catch (const vk::Error& error)
+		{
+			return tcu::TestStatus::fail("Alloc memory failed! (Error code: " + de::toString(error.getMessage()) + ")");
+		}
+
+		if (vk.bindBufferMemory(vkDevice, *testBuffer, *memory, 0) != VK_SUCCESS)
+			return tcu::TestStatus::fail("Bind buffer memory failed!");
+
+		try
+		{
+			bufferView = createBufferView(vk, vkDevice, &bufferViewCreateInfo, (const VkAllocationCallbacks*)DE_NULL);
+		}
+		catch (const vk::Error& error)
+		{
+			return tcu::TestStatus::fail("Buffer View creation failed! (Error code: " + de::toString(error.getMessage()) + ")");
+		}
+	}
+
+	// Testing complete view size.
+	{
+		Move<VkBufferView>		completeBufferView;
+		VkBufferViewCreateInfo	completeBufferViewCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,	//	VkStructureType		sType;
+			NULL,										//	const void*			pNext;
+			(VkBufferViewCreateFlags)0,
+			*testBuffer,								//	VkBuffer			buffer;
+			m_testCase.format,							//	VkFormat			format;
+			m_testCase.offset,							//	VkDeviceSize		offset;
+			size,										//	VkDeviceSize		range;
+		};
+
+		try
+		{
+			completeBufferView = createBufferView(vk, vkDevice, &completeBufferViewCreateInfo, (const VkAllocationCallbacks*)DE_NULL);
+		}
+		catch (const vk::Error& error)
+		{
+			return tcu::TestStatus::fail("Buffer View creation failed! (Error code: " + de::toString(error.getMessage()) + ")");
+		}
+	}
+
+	return tcu::TestStatus::pass("BufferView test");
+}
+
+} // anonymous
+
+ tcu::TestCaseGroup* createBufferViewCreateTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	bufferViewTests	(new tcu::TestCaseGroup(testCtx, "create", "BufferView Construction Tests"));
+
+	const VkDeviceSize range = VK_WHOLE_SIZE;
+	for (deUint32 format = VK_FORMAT_UNDEFINED + 1; format < VK_FORMAT_LAST; format++)
+	{
+		std::ostringstream	testName;
+		std::ostringstream	testDescription;
+		testName << "createBufferView_" << format;
+		testDescription << "vkBufferView test " << testName.str();
+		{
+			BufferViewCaseParameters testParams	=
+			{
+				(VkFormat)format,							// VkFormat				format;
+				0,											// VkDeviceSize			offset;
+				range,										// VkDeviceSize			range;
+				VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,	// VkBufferUsageFlags	usage;
+				VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT, // VkFormatFeatureFlags flags;
+			};
+			bufferViewTests->addChild(new BufferViewTestCase(testCtx, testName.str() + "_uniform", testDescription.str(), testParams));
+		}
+		{
+			BufferViewCaseParameters testParams	=
+			{
+				(VkFormat)format,							// VkFormat				format;
+				0,											// VkDeviceSize			offset;
+				range,										// VkDeviceSize			range;
+				VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,	// VkBufferUsageFlags	usage;
+				VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT, // VkFormatFeatureFlags flags;
+			};
+			bufferViewTests->addChild(new BufferViewTestCase(testCtx, testName.str() + "_storage", testDescription.str(), testParams));
+		}
+	}
+
+	return bufferViewTests.release();
+}
+
+} // api
+} // vk
diff --git a/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.hpp
new file mode 100644
index 0000000..8a46454
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiBufferViewCreateTests.hpp
@@ -0,0 +1,52 @@
+#ifndef _VKTAPIBUFFERVIEWCREATETESTS_HPP
+#define _VKTAPIBUFFERVIEWCREATETESTS_HPP
+/*------------------------------------------------------------------------
+ *  Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Buffer View Creation Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+tcu::TestCaseGroup*		createBufferViewCreateTests	(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIBUFFERVIEWCREATETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.cpp
new file mode 100644
index 0000000..ce88002
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.cpp
@@ -0,0 +1,3201 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkAllocationCallbackUtil.hpp"
+#include "vktApiCommandBuffersTests.hpp"
+#include "vktApiBufferComputeInstance.hpp"
+#include "vktApiComputeInstanceResultBuffer.hpp"
+#include "deSharedPtr.hpp"
+#include <sstream>
+
+namespace vkt
+{
+namespace api
+{
+namespace
+{
+
+using namespace vk;
+
+typedef de::SharedPtr<vk::Unique<vk::VkEvent> >	VkEventSp;
+
+// Global variables
+const deUint64								INFINITE_TIMEOUT		= ~(deUint64)0u;
+
+// Testcases
+/********* 19.1. Command Pools (6.1 in VK 1.0 Spec) ***************************/
+tcu::TestStatus createPoolNullParamsTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	createCommandPool(vk, vkDevice, &cmdPoolParams, DE_NULL);
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+tcu::TestStatus createPoolNonNullAllocatorTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	const VkAllocationCallbacks*			allocationCallbacks		= getSystemAllocator();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	createCommandPool(vk, vkDevice, &cmdPoolParams, allocationCallbacks);
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+tcu::TestStatus createPoolTransientBitTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,						// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	createCommandPool(vk, vkDevice, &cmdPoolParams, DE_NULL);
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+tcu::TestStatus createPoolResetBitTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	createCommandPool(vk, vkDevice, &cmdPoolParams, DE_NULL);
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+tcu::TestStatus resetPoolReleaseResourcesBitTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams, DE_NULL));
+
+	VK_CHECK(vk.resetCommandPool(vkDevice, *cmdPool, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT));
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+tcu::TestStatus resetPoolNoFlagsTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams, DE_NULL));
+
+	VK_CHECK(vk.resetCommandPool(vkDevice, *cmdPool, 0u));
+
+	return tcu::TestStatus::pass("Command Pool allocated correctly.");
+}
+
+/******** 19.2. Command Buffer Lifetime (6.2 in VK 1.0 Spec) ******************/
+tcu::TestStatus allocatePrimaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// bufferCount;
+	};
+	const Unique<VkCommandBuffer>			cmdBuf					(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	return tcu::TestStatus::pass("Buffer was created correctly.");
+}
+
+tcu::TestStatus allocateManyPrimaryBuffersTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// create a minimal amount of command buffers - is there any minimal amount in spec?
+	const unsigned minCommandBuffer = 10000;
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		minCommandBuffer,											//	uint32_t					bufferCount;
+	};
+
+	// do not keep the handles to buffers, as they will be freed with command pool
+
+	// allocate the minimum required amount of buffers
+	VkCommandBuffer cmdBuffers[minCommandBuffer];
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, cmdBuffers));
+
+	std::ostringstream out;
+	out << "allocateManyPrimaryBuffersTest succeded: created " << minCommandBuffer << " command buffers";
+
+	return tcu::TestStatus::pass(out.str());
+}
+
+tcu::TestStatus allocateZeroPrimaryBuffersTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		0u,															// bufferCount;
+	};
+
+	VkCommandBuffer cmdBuffer;
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, &cmdBuffer));
+
+	return tcu::TestStatus::pass("allocateZeroPrimaryBuffersTest passed.");
+}
+
+tcu::TestStatus allocateSecondaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		1u,															// bufferCount;
+	};
+	const Unique<VkCommandBuffer>			cmdBuf					(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	return tcu::TestStatus::pass("Buffer was created correctly.");
+}
+
+tcu::TestStatus allocateManySecondaryBuffersTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// create a minimal amount of command buffers - is there any minimal amount in spec?
+	const unsigned minCommandBuffer = 10000;
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		minCommandBuffer,											//	uint32_t					bufferCount;
+	};
+
+	// do not keep the handles to buffers, as they will be freed with command pool
+
+	// allocate the minimum required amount of buffers
+	VkCommandBuffer cmdBuffers[minCommandBuffer];
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, cmdBuffers));
+
+	std::ostringstream out;
+	out << "allocateManySecondaryBuffersTest succeded: created " << minCommandBuffer << " command buffers";
+
+	return tcu::TestStatus::pass(out.str());
+}
+
+tcu::TestStatus allocateZeroSecondaryBuffersTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		0u,															// bufferCount;
+	};
+
+	VkCommandBuffer cmdBuffer;
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, &cmdBuffer));
+
+	return tcu::TestStatus::pass("allocateZeroSecondaryBuffersTest passed.");
+}
+
+tcu::TestStatus executePrimaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// record setting event
+		vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the command buffer to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*event);
+	if (result == VK_EVENT_SET)
+		return tcu::TestStatus::pass("Execute Primary Command Buffer succeeded");
+
+	return tcu::TestStatus::fail("Execute Primary Command Buffer FAILED");
+}
+
+tcu::TestStatus executeLargePrimaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	const deUint32							LARGE_BUFFER_SIZE		= 10000;
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	std::vector<VkEventSp>					events;
+	for (deUint32 ndx = 0; ndx < LARGE_BUFFER_SIZE; ++ndx)
+		events.push_back(VkEventSp(new vk::Unique<VkEvent>(createEvent(vk, vkDevice, &eventCreateInfo, DE_NULL))));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// set all the events
+		for (deUint32 ndx = 0; ndx < LARGE_BUFFER_SIZE; ++ndx)
+		{
+			vk.cmdSetEvent(*primCmdBuf, events[ndx]->get(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+		}
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the command buffer to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if the buffer was executed correctly - all events had their status
+	// changed
+	tcu::TestStatus testResult = tcu::TestStatus::incomplete();
+
+	for (deUint32 ndx = 0; ndx < LARGE_BUFFER_SIZE; ++ndx)
+	{
+		if (vk.getEventStatus(vkDevice, events[ndx]->get()) != VK_EVENT_SET)
+		{
+			testResult = tcu::TestStatus::fail("An event was not set.");
+			break;
+		}
+	}
+
+	if (!testResult.isComplete())
+		testResult = tcu::TestStatus::pass("All events set correctly.");
+
+	return testResult;
+}
+
+tcu::TestStatus resetBufferImplicitlyTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// bufferCount;
+	};
+	const Unique<VkCommandBuffer>			cmdBuf						(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+	};
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// Put the command buffer in recording state.
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuf, &cmdBufBeginInfo));
+	{
+		// Set the event
+		vk.cmdSetEvent(*cmdBuf, *event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+	}
+	VK_CHECK(vk.endCommandBuffer(*cmdBuf));
+
+	// We'll use a fence to wait for the execution of the queue
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags
+	};
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&cmdBuf.get(),												// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submitting the command buffer that sets the event to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, fence.get()));
+
+	// Waiting for the queue to finish executing
+	VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), 0u, INFINITE_TIMEOUT));
+	// Reset the fence so that we can reuse it
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &fence.get()));
+
+	// Check if the buffer was executed
+	if (vk.getEventStatus(vkDevice, *event) != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Failed to set the event.");
+
+	// Reset the event
+	vk.resetEvent(vkDevice, *event);
+	if(vk.getEventStatus(vkDevice, *event) != VK_EVENT_RESET)
+		return tcu::TestStatus::fail("Failed to reset the event.");
+
+	// Reset the command buffer by putting it in recording state again. This
+	// should empty the command buffer.
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuf, &cmdBufBeginInfo));
+	VK_CHECK(vk.endCommandBuffer(*cmdBuf));
+
+	// Submit the command buffer after resetting. It should have no commands
+	// recorded, so the event should remain unsignaled.
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, fence.get()));
+	// Waiting for the queue to finish executing
+	VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// Check if the event remained unset.
+	if(vk.getEventStatus(vkDevice, *event) == VK_EVENT_RESET)
+		return tcu::TestStatus::pass("Buffer was reset correctly.");
+	else
+		return tcu::TestStatus::fail("Buffer was not reset correctly.");
+}
+
+/******** 19.3. Command Buffer Recording (6.3 in VK 1.0 Spec) *****************/
+tcu::TestStatus recordSinglePrimaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// record setting event
+		vk.cmdSetEvent(*primCmdBuf, *event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	return tcu::TestStatus::pass("Primary buffer recorded successfully.");
+}
+
+tcu::TestStatus recordLargePrimaryBufferTest(Context &context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// define minimal amount of commands to accept
+		const long long unsigned minNumCommands = 10000llu;
+
+		for ( long long unsigned currentCommands = 0; currentCommands < minNumCommands / 2; ++currentCommands )
+		{
+			// record setting event
+			vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+
+			// record resetting event
+			vk.cmdResetEvent(*primCmdBuf, *event,stageMask);
+		};
+
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the command buffer to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	return tcu::TestStatus::pass("hugeTest succeeded");
+}
+
+tcu::TestStatus recordSingleSecondaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(VkRenderPass)0u,											// renderPass
+		0u,															// subpass
+		(VkFramebuffer)0u,											// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+	{
+		// record setting event
+		vk.cmdSetEvent(*secCmdBuf, *event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+	}
+	VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+	return tcu::TestStatus::pass("Secondary buffer recorded successfully.");
+}
+
+tcu::TestStatus recordLargeSecondaryBufferTest(Context &context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// define minimal amount of commands to accept
+		const long long unsigned minNumCommands = 10000llu;
+
+		for ( long long unsigned currentCommands = 0; currentCommands < minNumCommands / 2; ++currentCommands )
+		{
+			// record setting event
+			vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+
+			// record resetting event
+			vk.cmdResetEvent(*primCmdBuf, *event,stageMask);
+		};
+
+
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the command buffer to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	return tcu::TestStatus::pass("hugeTest succeeded");
+}
+
+tcu::TestStatus submitPrimaryBufferTwiceTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// record setting event
+		vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit primary buffer
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &fence.get()));
+	// check if buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Submit Twice Test FAILED");
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if buffer has been executed
+	result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Submit Twice Test FAILED");
+	else
+		return tcu::TestStatus::pass("Submit Twice Test succeeded");
+}
+
+tcu::TestStatus submitSecondaryBufferTwiceTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+
+	const Unique<VkCommandBuffer>			primCmdBuf1				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const Unique<VkCommandBuffer>			primCmdBuf2				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	// Secondary Command buffer
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(VkRenderPass)0u,											// renderPass
+		0u,															// subpass
+		(VkFramebuffer)0u,											// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0u,															// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record first primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf1, &primCmdBufBeginInfo));
+	{
+		// record secondary command buffer
+		VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+		{
+			// allow execution of event during every stage of pipeline
+			VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+			// record setting event
+			vk.cmdSetEvent(*secCmdBuf, *event,stageMask);
+		}
+
+		// end recording of secondary buffers
+		VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf1, 1, &secCmdBuf.get());
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf1));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo1				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf1.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo1, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &fence.get()));
+
+	// check if secondary buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Submit Twice Secondary Command Buffer FAILED");
+
+	// reset first primary buffer
+	vk.resetCommandBuffer( *primCmdBuf1, 0u);
+
+	// reset event to allow receiving it again
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record second primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf2, &primCmdBufBeginInfo));
+	{
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf2, 1, &secCmdBuf.get());
+	}
+	// end recording
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf2));
+
+	// submit second primary buffer, the secondary should be executed too
+	const VkSubmitInfo						submitInfo2				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf2.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo2, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if secondary buffer has been executed
+	result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Submit Twice Secondary Command Buffer FAILED");
+	else
+		return tcu::TestStatus::pass("Submit Twice Secondary Command Buffer succeeded");
+}
+
+tcu::TestStatus oneTimeSubmitFlagPrimaryBufferTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,				// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// record setting event
+		vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit primary buffer
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &fence.get()));
+
+	// check if buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("oneTimeSubmitFlagPrimaryBufferTest FAILED");
+
+	// record primary command buffer again - implicit reset because of VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// record setting event
+		vk.cmdSetEvent(*primCmdBuf, *event,stageMask);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if buffer has been executed
+	result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("oneTimeSubmitFlagPrimaryBufferTest FAILED");
+	else
+		return tcu::TestStatus::pass("oneTimeSubmitFlagPrimaryBufferTest succeeded");
+}
+
+tcu::TestStatus oneTimeSubmitFlagSecondaryBufferTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+
+	const Unique<VkCommandBuffer>			primCmdBuf1				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const Unique<VkCommandBuffer>			primCmdBuf2				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	// Secondary Command buffer
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(VkRenderPass)0u,											// renderPass
+		0u,															// subpass
+		(VkFramebuffer)0u,											// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,				// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record first primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf1, &primCmdBufBeginInfo));
+	{
+		// record secondary command buffer
+		VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+		{
+			// allow execution of event during every stage of pipeline
+			VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+			// record setting event
+			vk.cmdSetEvent(*secCmdBuf, *event,stageMask);
+		}
+
+		// end recording of secondary buffers
+		VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf1, 1, &secCmdBuf.get());
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf1));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo1				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf1.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo1, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &fence.get()));
+
+	// check if secondary buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("Submit Twice Secondary Command Buffer FAILED");
+
+	// reset first primary buffer
+	vk.resetCommandBuffer( *primCmdBuf1, 0u);
+
+	// reset event to allow receiving it again
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record secondary command buffer again
+	VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// record setting event
+		vk.cmdSetEvent(*secCmdBuf, *event,stageMask);
+	}
+	// end recording of secondary buffers
+	VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+	// record second primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf2, &primCmdBufBeginInfo));
+	{
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf2, 1, &secCmdBuf.get());
+	}
+	// end recording
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf2));
+
+	// submit second primary buffer, the secondary should be executed too
+	const VkSubmitInfo						submitInfo2				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf2.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo2, *fence));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if secondary buffer has been executed
+	result = vk.getEventStatus(vkDevice,*event);
+	if (result != VK_EVENT_SET)
+		return tcu::TestStatus::fail("oneTimeSubmitFlagSecondaryBufferTest FAILED");
+	else
+		return tcu::TestStatus::pass("oneTimeSubmitFlagSecondaryBufferTest succeeded");
+}
+
+tcu::TestStatus simultaneousUsePrimaryBufferTest(Context& context)
+{
+
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,				// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					eventOne				(createEvent(vk, vkDevice, &eventCreateInfo));
+	const Unique<VkEvent>					eventTwo				(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *eventOne));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// wait for event
+		vk.cmdWaitEvents(*primCmdBuf, 1u, &eventOne.get(), stageMask, stageMask, 0u, DE_NULL, 0u, DE_NULL, 0u, DE_NULL);
+
+		// Set the second event
+		vk.cmdSetEvent(*primCmdBuf, eventTwo.get(), stageMask);
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence1					(createFence(vk, vkDevice, &fenceCreateInfo));
+	const Unique<VkFence>					fence2					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit first buffer
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence1));
+
+	// submit second buffer
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence2));
+
+	// wait for both buffer to stop at event for 100 microseconds
+	vk.waitForFences(vkDevice, 1, &fence1.get(), 0u, 100000);
+	vk.waitForFences(vkDevice, 1, &fence2.get(), 0u, 100000);
+
+	// set event
+	VK_CHECK(vk.setEvent(vkDevice, *eventOne));
+
+	// wait for end of execution of the first buffer
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence1.get(), 0u, INFINITE_TIMEOUT));
+	// wait for end of execution of the second buffer
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence2.get(), 0u, INFINITE_TIMEOUT));
+
+	// TODO: this will be true if the command buffer was executed only once
+	// TODO: add some test that will say if it was executed twice
+
+	// check if buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice, *eventTwo);
+	if (result == VK_EVENT_SET)
+		return tcu::TestStatus::pass("simultaneous use - primary buffers test succeeded");
+	else
+		return tcu::TestStatus::fail("simultaneous use - primary buffers test FAILED");
+}
+
+tcu::TestStatus simultaneousUseSecondaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	// Secondary Command buffer params
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(VkRenderPass)0u,											// renderPass
+		0u,															// subpass
+		(VkFramebuffer)0u,											// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,				// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					eventOne				(createEvent(vk, vkDevice, &eventCreateInfo));
+	const Unique<VkEvent>					eventTwo				(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *eventOne));
+	VK_CHECK(vk.resetEvent(vkDevice, *eventTwo));
+
+	// record secondary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+		// wait for event
+		vk.cmdWaitEvents(*secCmdBuf, 1, &eventOne.get(), stageMask, stageMask, 0, DE_NULL, 0u, DE_NULL, 0u, DE_NULL);
+
+		// reset event
+		vk.cmdSetEvent(*secCmdBuf, *eventTwo, stageMask);
+	}
+	// end recording of secondary buffers
+	VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf, 1, &secCmdBuf.get());
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit primary buffer, the secondary should be executed too
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+
+	// wait for both buffers to stop at event for 100 microseconds
+	vk.waitForFences(vkDevice, 1, &fence.get(), 0u, 100000);
+
+	// set event
+	VK_CHECK(vk.setEvent(vkDevice, *eventOne));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// TODO: this will be true if the command buffer was executed only once
+	// TODO: add some test that will say if it was executed twice
+
+	// check if secondary buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice,*eventTwo);
+	if (result == VK_EVENT_SET)
+		return tcu::TestStatus::pass("Simulatous Secondary Command Buffer Execution succeeded");
+	else
+		return tcu::TestStatus::fail("Simulatous Secondary Command Buffer Execution FAILED");
+}
+
+tcu::TestStatus recordBufferQueryPreciseWithFlagTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		primCmdBufParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &primCmdBufParams));
+
+	// Secondary Command buffer params
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secBufferInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		0u,															// renderPass
+		0u,															// subpass
+		0u,															// framebuffer
+		VK_TRUE,													// occlusionQueryEnable
+		VK_QUERY_CONTROL_PRECISE_BIT,								// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		&secBufferInheritInfo,
+	};
+
+	// Create an occlusion query with VK_QUERY_CONTROL_PRECISE_BIT set
+	const VkQueryPoolCreateInfo				queryPoolCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,					// sType
+		DE_NULL,													// pNext
+		VK_QUERY_CONTROL_PRECISE_BIT,								// flags
+		VK_QUERY_TYPE_OCCLUSION,									// queryType
+		1u,															// entryCount
+		0u,															// pipelineStatistics
+	};
+	Unique<VkQueryPool>						queryPool				(createQueryPool(vk, vkDevice, &queryPoolCreateInfo));
+
+	VK_CHECK(vk.beginCommandBuffer(secCmdBuf.get(), &secBufferBeginInfo));
+	VK_CHECK(vk.endCommandBuffer(secCmdBuf.get()));
+
+	VK_CHECK(vk.beginCommandBuffer(primCmdBuf.get(), &primBufferBeginInfo));
+	{
+		vk.cmdBeginQuery(primCmdBuf.get(), queryPool.get(), 0u, VK_QUERY_CONTROL_PRECISE_BIT);
+		{
+			vk.cmdExecuteCommands(primCmdBuf.get(), 1u, &secCmdBuf.get());
+		}
+		vk.cmdEndQuery(primCmdBuf.get(), queryPool.get(), 0u);
+	}
+	VK_CHECK(vk.endCommandBuffer(primCmdBuf.get()));
+
+	return tcu::TestStatus::pass("Successfully recorded a secondary command buffer allowing a precise occlusion query.");
+}
+
+tcu::TestStatus recordBufferQueryImpreciseWithFlagTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		primCmdBufParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &primCmdBufParams));
+
+	// Secondary Command buffer params
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secBufferInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		0u,															// renderPass
+		0u,															// subpass
+		0u,															// framebuffer
+		VK_TRUE,													// occlusionQueryEnable
+		VK_QUERY_CONTROL_PRECISE_BIT,								// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		&secBufferInheritInfo,
+	};
+
+	// Create an occlusion query with VK_QUERY_CONTROL_PRECISE_BIT set
+	const VkQueryPoolCreateInfo				queryPoolCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,					// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		VK_QUERY_TYPE_OCCLUSION,									// queryType
+		1u,															// entryCount
+		0u,															// pipelineStatistics
+	};
+	Unique<VkQueryPool>						queryPool				(createQueryPool(vk, vkDevice, &queryPoolCreateInfo));
+
+	VK_CHECK(vk.beginCommandBuffer(secCmdBuf.get(), &secBufferBeginInfo));
+	VK_CHECK(vk.endCommandBuffer(secCmdBuf.get()));
+
+	VK_CHECK(vk.beginCommandBuffer(primCmdBuf.get(), &primBufferBeginInfo));
+	{
+		vk.cmdBeginQuery(primCmdBuf.get(), queryPool.get(), 0u, VK_QUERY_CONTROL_PRECISE_BIT);
+		{
+			vk.cmdExecuteCommands(primCmdBuf.get(), 1u, &secCmdBuf.get());
+		}
+		vk.cmdEndQuery(primCmdBuf.get(), queryPool.get(), 0u);
+	}
+	VK_CHECK(vk.endCommandBuffer(primCmdBuf.get()));
+
+	return tcu::TestStatus::pass("Successfully recorded a secondary command buffer allowing a precise occlusion query.");
+}
+
+tcu::TestStatus recordBufferQueryImpreciseWithoutFlagTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		primCmdBufParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &primCmdBufParams));
+
+	// Secondary Command buffer params
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		1u,															// flags;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secBufferInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		0u,															// renderPass
+		0u,															// subpass
+		0u,															// framebuffer
+		VK_TRUE,													// occlusionQueryEnable
+		0u,															// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		&secBufferInheritInfo,
+	};
+
+	// Create an occlusion query with VK_QUERY_CONTROL_PRECISE_BIT set
+	const VkQueryPoolCreateInfo				queryPoolCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,					// sType
+		DE_NULL,													// pNext
+		(VkQueryPoolCreateFlags)0,
+		VK_QUERY_TYPE_OCCLUSION,
+		1u,
+		0u,
+	};
+	Unique<VkQueryPool>						queryPool				(createQueryPool(vk, vkDevice, &queryPoolCreateInfo));
+
+	VK_CHECK(vk.beginCommandBuffer(secCmdBuf.get(), &secBufferBeginInfo));
+	VK_CHECK(vk.endCommandBuffer(secCmdBuf.get()));
+
+	VK_CHECK(vk.beginCommandBuffer(primCmdBuf.get(), &primBufferBeginInfo));
+	{
+		vk.cmdBeginQuery(primCmdBuf.get(), queryPool.get(), 0u, VK_QUERY_CONTROL_PRECISE_BIT);
+		{
+			vk.cmdExecuteCommands(primCmdBuf.get(), 1u, &secCmdBuf.get());
+		}
+		vk.cmdEndQuery(primCmdBuf.get(), queryPool.get(), 0u);
+	}
+	VK_CHECK(vk.endCommandBuffer(primCmdBuf.get()));
+
+	return tcu::TestStatus::pass("Successfully recorded a secondary command buffer allowing a precise occlusion query.");
+}
+
+/******** 19.4. Command Buffer Submission (6.4 in VK 1.0 Spec) ****************/
+tcu::TestStatus submitBufferCountNonZero(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const deUint32							BUFFER_COUNT			= 5u;
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		BUFFER_COUNT,												// bufferCount;
+	};
+	VkCommandBuffer cmdBuffers[BUFFER_COUNT];
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, cmdBuffers));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+	};
+
+	std::vector<VkEventSp>					events;
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		events.push_back(VkEventSp(new vk::Unique<VkEvent>(createEvent(vk, vkDevice, &eventCreateInfo, DE_NULL))));
+	}
+
+	// Record the command buffers
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		VK_CHECK(vk.beginCommandBuffer(cmdBuffers[ndx], &cmdBufBeginInfo));
+		{
+			vk.cmdSetEvent(cmdBuffers[ndx], events[ndx]->get(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+		}
+		VK_CHECK(vk.endCommandBuffer(cmdBuffers[ndx]));
+	}
+
+	// We'll use a fence to wait for the execution of the queue
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags
+	};
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		BUFFER_COUNT,												// commandBufferCount
+		cmdBuffers,													// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the alpha command buffer to the queue
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, fence.get()));
+	// Wait for the queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), VK_TRUE, INFINITE_TIMEOUT));
+
+	// Check if the buffers were executed
+	tcu::TestStatus testResult = tcu::TestStatus::incomplete();
+
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		if (vk.getEventStatus(vkDevice, events[ndx]->get()) != VK_EVENT_SET)
+		{
+			testResult = tcu::TestStatus::fail("Failed to set the event.");
+			break;
+		}
+	}
+
+	if (!testResult.isComplete())
+		testResult = tcu::TestStatus::pass("All buffers were submitted and executed correctly.");
+
+	return testResult;
+}
+
+tcu::TestStatus submitBufferCountEqualZero(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const deUint32							BUFFER_COUNT			= 2u;
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		BUFFER_COUNT,												// bufferCount;
+	};
+	VkCommandBuffer cmdBuffers[BUFFER_COUNT];
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, cmdBuffers));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+	};
+
+	std::vector<VkEventSp>					events;
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+		events.push_back(VkEventSp(new vk::Unique<VkEvent>(createEvent(vk, vkDevice, &eventCreateInfo, DE_NULL))));
+
+	// Record the command buffers
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		VK_CHECK(vk.beginCommandBuffer(cmdBuffers[ndx], &cmdBufBeginInfo));
+		{
+			vk.cmdSetEvent(cmdBuffers[ndx], events[ndx]->get(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+		}
+		VK_CHECK(vk.endCommandBuffer(cmdBuffers[ndx]));
+	}
+
+	// We'll use a fence to wait for the execution of the queue
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags
+	};
+	const Unique<VkFence>					fenceZero				(createFence(vk, vkDevice, &fenceCreateInfo));
+	const Unique<VkFence>					fenceOne				(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfoCountZero		=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&cmdBuffers[0],												// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	const VkSubmitInfo						submitInfoCountOne		=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&cmdBuffers[1],												// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Submit the command buffers to the queue
+	// We're performing two submits to make sure that the first one has
+	// a chance to be processed before we check the event's status
+	VK_CHECK(vk.queueSubmit(queue, 0, &submitInfoCountZero, fenceZero.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfoCountOne, fenceOne.get()));
+
+	const VkFence							fences[]				=
+	{
+		fenceZero.get(),
+		fenceOne.get(),
+	};
+
+	// Wait for the queue
+	VK_CHECK(vk.waitForFences(vkDevice, (deUint32)DE_LENGTH_OF_ARRAY(fences), fences, VK_TRUE, INFINITE_TIMEOUT));
+
+	// Check if the first buffer was executed
+	tcu::TestStatus testResult = tcu::TestStatus::incomplete();
+
+	if (vk.getEventStatus(vkDevice, events[0]->get()) == VK_EVENT_SET)
+		testResult = tcu::TestStatus::fail("The first event was signaled.");
+	else
+		testResult = tcu::TestStatus::pass("The first submission was ignored.");
+
+	return testResult;
+}
+
+tcu::TestStatus submitBufferNullFence(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const short								BUFFER_COUNT			= 2;
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// bufferCount;
+	};
+	VkCommandBuffer cmdBuffers[BUFFER_COUNT];
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+		VK_CHECK(vk.allocateCommandBuffers(vkDevice, &cmdBufParams, &cmdBuffers[ndx]));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags;
+	};
+
+	std::vector<VkEventSp>					events;
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+		events.push_back(VkEventSp(new vk::Unique<VkEvent>(createEvent(vk, vkDevice, &eventCreateInfo, DE_NULL))));
+
+	// Record the command buffers
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		VK_CHECK(vk.beginCommandBuffer(cmdBuffers[ndx], &cmdBufBeginInfo));
+		{
+			vk.cmdSetEvent(cmdBuffers[ndx], events[ndx]->get(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
+		}
+		VK_CHECK(vk.endCommandBuffer(cmdBuffers[ndx]));
+	}
+
+	// We'll use a fence to wait for the execution of the queue
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,						// sType;
+		DE_NULL,													// pNext;
+		0u,															// flags
+	};
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfoNullFence		=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&cmdBuffers[0],												// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	const VkSubmitInfo						submitInfoNonNullFence	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&cmdBuffers[1],												// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// Perform two submissions - one with no fence, the other one with a valid
+	// fence Hoping submitting the other buffer will give the first one time to
+	// execute
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfoNullFence, DE_NULL));
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfoNonNullFence, fence.get()));
+
+	// Wait for the queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), VK_TRUE, INFINITE_TIMEOUT));
+
+	tcu::TestStatus testResult = tcu::TestStatus::incomplete();
+
+	if (vk.getEventStatus(vkDevice, events[0]->get()) != VK_EVENT_SET)
+		testResult = tcu::TestStatus::fail("The first event was not signaled -> the buffer was not executed.");
+	else
+		testResult = tcu::TestStatus::pass("The first event was signaled -> the buffer with null fence submitted and executed correctly.");
+
+	return testResult;
+}
+
+/******** 19.5. Secondary Command Buffer Execution (6.6 in VK 1.0 Spec) *******/
+tcu::TestStatus executeSecondaryBufferTest(Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags;
+		queueFamilyIndex,											// queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level;
+		1u,															// bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBuf				(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	// Secondary Command buffer
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType;
+		DE_NULL,													// pNext;
+		*cmdPool,													// commandPool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							// level;
+		1u,															// bufferCount;
+	};
+	const Unique<VkCommandBuffer>			secCmdBuf				(allocateCommandBuffer(vk, vkDevice, &secCmdBufParams));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		DE_NULL,													// renderPass
+		0u,															// subpass
+		DE_NULL,													// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					event					(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *event));
+
+	// record secondary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*secCmdBuf, &secCmdBufBeginInfo));
+	{
+		// allow execution of event during every stage of pipeline
+		VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+		// record setting event
+		vk.cmdSetEvent(*secCmdBuf, *event, stageMask);
+	}
+	// end recording of the secondary buffer
+	VK_CHECK(vk.endCommandBuffer(*secCmdBuf));
+
+	// record primary command buffer
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBuf, &primCmdBufBeginInfo));
+	{
+		// execute secondary buffer
+		vk.cmdExecuteCommands(*primCmdBuf, 1u, &secCmdBuf.get());
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBuf));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fence					(createFence(vk, vkDevice, &fenceCreateInfo));
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1u,															// commandBufferCount
+		&primCmdBuf.get(),											// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit primary buffer, the secondary should be executed too
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, fence.get()));
+
+	// wait for end of execution of queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), 0u, INFINITE_TIMEOUT));
+
+	// check if secondary buffer has been executed
+	VkResult result = vk.getEventStatus(vkDevice, *event);
+	if (result == VK_EVENT_SET)
+		return tcu::TestStatus::pass("executeSecondaryBufferTest succeeded");
+
+	return tcu::TestStatus::fail("executeSecondaryBufferTest FAILED");
+}
+
+tcu::TestStatus executeSecondaryBufferTwiceTest(Context& context)
+{
+	const deUint32							BUFFER_COUNT			= 10u;
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType				sType;
+		DE_NULL,													//	const void*					pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			//	VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32					queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							//	VkCommandBufferLevel		level;
+		1u,															//	uint32_t					bufferCount;
+	};
+	const Unique<VkCommandBuffer>			primCmdBufOne			(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+	const Unique<VkCommandBuffer>			primCmdBufTwo			(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	// Secondary Command buffers params
+	const VkCommandBufferAllocateInfo		secCmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		*cmdPool,													//	VkCommandPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_SECONDARY,							//	VkCommandBufferLevel		level;
+		BUFFER_COUNT,												//	uint32_t					bufferCount;
+	};
+	VkCommandBuffer cmdBuffers[BUFFER_COUNT];
+	VK_CHECK(vk.allocateCommandBuffers(vkDevice, &secCmdBufParams, cmdBuffers));
+
+	const VkCommandBufferBeginInfo			primCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkCommandBufferInheritanceInfo	secCmdBufInheritInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(VkRenderPass)0u,											// renderPass
+		0u,															// subpass
+		(VkFramebuffer)0u,											// framebuffer
+		VK_FALSE,													// occlusionQueryEnable
+		(VkQueryControlFlags)0u,									// queryFlags
+		(VkQueryPipelineStatisticFlags)0u,							// pipelineStatistics
+	};
+	const VkCommandBufferBeginInfo			secCmdBufBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT,				// flags
+		&secCmdBufInheritInfo,
+	};
+
+	// Fill create info struct for event
+	const VkEventCreateInfo					eventCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+		DE_NULL,
+		0u,
+	};
+
+	// create event that will be used to check if secondary command buffer has been executed
+	const Unique<VkEvent>					eventOne				(createEvent(vk, vkDevice, &eventCreateInfo));
+
+	// reset event
+	VK_CHECK(vk.resetEvent(vkDevice, *eventOne));
+
+	for (deUint32 ndx = 0; ndx < BUFFER_COUNT; ++ndx)
+	{
+		// record secondary command buffer
+		VK_CHECK(vk.beginCommandBuffer(cmdBuffers[ndx], &secCmdBufBeginInfo));
+		{
+			// allow execution of event during every stage of pipeline
+			VkPipelineStageFlags stageMask = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
+
+			// wait for event
+			vk.cmdWaitEvents(cmdBuffers[ndx], 1, &eventOne.get(), stageMask, stageMask, 0, DE_NULL, 0u, DE_NULL, 0u, DE_NULL);
+		}
+		// end recording of secondary buffers
+		VK_CHECK(vk.endCommandBuffer(cmdBuffers[ndx]));
+	};
+
+	// record primary command buffer one
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBufOne, &primCmdBufBeginInfo));
+	{
+		// execute one secondary buffer
+		vk.cmdExecuteCommands(*primCmdBufOne, 1, cmdBuffers );
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBufOne));
+
+	// record primary command buffer two
+	VK_CHECK(vk.beginCommandBuffer(*primCmdBufTwo, &primCmdBufBeginInfo));
+	{
+		// execute one secondary buffer with all buffers
+		vk.cmdExecuteCommands(*primCmdBufTwo, BUFFER_COUNT, cmdBuffers );
+	}
+	VK_CHECK(vk.endCommandBuffer(*primCmdBufTwo));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+	};
+
+	// create fence to wait for execution of queue
+	const Unique<VkFence>					fenceOne				(createFence(vk, vkDevice, &fenceCreateInfo));
+	const Unique<VkFence>					fenceTwo				(createFence(vk, vkDevice, &fenceCreateInfo));
+
+	const VkSubmitInfo						submitInfoOne			=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBufOne.get(),										// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit primary buffer, the secondary should be executed too
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfoOne, *fenceOne));
+
+	// wait for buffer to stop at event for 100 microseconds
+	vk.waitForFences(vkDevice, 1, &fenceOne.get(), 0u, 100000);
+
+	const VkSubmitInfo						submitInfoTwo			=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&primCmdBufTwo.get(),										// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+
+	// submit second primary buffer, the secondary should be executed too
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfoTwo, *fenceTwo));
+
+	// wait for all buffers to stop at event for 100 microseconds
+	vk.waitForFences(vkDevice, 1, &fenceOne.get(), 0u, 100000);
+
+	// now all buffers are waiting at eventOne
+	// set event eventOne
+	VK_CHECK(vk.setEvent(vkDevice, *eventOne));
+
+	// wait for end of execution of fenceOne
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fenceOne.get(), 0u, INFINITE_TIMEOUT));
+
+	// wait for end of execution of second queue
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fenceTwo.get(), 0u, INFINITE_TIMEOUT));
+
+	return tcu::TestStatus::pass("executeSecondaryBufferTwiceTest succeeded");
+}
+
+/******** 19.6. Commands Allowed Inside Command Buffers (6.7 in VK 1.0 Spec) **/
+tcu::TestStatus orderBindPipelineTest(Context& context)
+{
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkDevice							device					= context.getDevice();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	Allocator&								allocator				= context.getDefaultAllocator();
+	const ComputeInstanceResultBuffer		result					(vk, device, allocator);
+
+	enum
+	{
+		ADDRESSABLE_SIZE = 256, // allocate a lot more than required
+	};
+
+	const tcu::Vec4							colorA1					= tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4							colorA2					= tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4							colorB1					= tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f);
+	const tcu::Vec4							colorB2					= tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f);
+
+	const deUint32							dataOffsetA				= (0u);
+	const deUint32							dataOffsetB				= (0u);
+	const deUint32							viewOffsetA				= (0u);
+	const deUint32							viewOffsetB				= (0u);
+	const deUint32							bufferSizeA				= dataOffsetA + ADDRESSABLE_SIZE;
+	const deUint32							bufferSizeB				= dataOffsetB + ADDRESSABLE_SIZE;
+
+	de::MovePtr<Allocation>					bufferMemA;
+	const Unique<VkBuffer>					bufferA					(createColorDataBuffer(dataOffsetA, bufferSizeA, colorA1, colorA2, &bufferMemA, context));
+
+	de::MovePtr<Allocation>					bufferMemB;
+	const Unique<VkBuffer>					bufferB					(createColorDataBuffer(dataOffsetB, bufferSizeB, colorB1, colorB2, &bufferMemB, context));
+
+	const Unique<VkDescriptorSetLayout>		descriptorSetLayout		(createDescriptorSetLayout(context));
+	const Unique<VkDescriptorPool>			descriptorPool			(createDescriptorPool(context));
+	const Unique<VkDescriptorSet>			descriptorSet			(createDescriptorSet(*descriptorPool, *descriptorSetLayout, *bufferA, viewOffsetA, *bufferB, viewOffsetB, result.getBuffer(), context));
+	const VkDescriptorSet					descriptorSets[]		= { *descriptorSet };
+	const int								numDescriptorSets		= DE_LENGTH_OF_ARRAY(descriptorSets);
+
+	const VkPipelineLayoutCreateInfo layoutCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,				// sType
+		DE_NULL,													// pNext
+		(VkPipelineLayoutCreateFlags)0,
+		numDescriptorSets,											// setLayoutCount
+		&descriptorSetLayout.get(),									// pSetLayouts
+		0u,															// pushConstantRangeCount
+		DE_NULL,													// pPushConstantRanges
+	};
+	Unique<VkPipelineLayout>				pipelineLayout			(createPipelineLayout(vk, device, &layoutCreateInfo));
+
+	const Unique<VkShaderModule>			computeModuleGood		(createShaderModule(vk, device, context.getBinaryCollection().get("compute_good"), (VkShaderModuleCreateFlags)0u));
+	const Unique<VkShaderModule>			computeModuleBad		(createShaderModule(vk, device, context.getBinaryCollection().get("compute_bad"),  (VkShaderModuleCreateFlags)0u));
+
+	const VkPipelineShaderStageCreateInfo	shaderCreateInfoGood	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+		DE_NULL,
+		(VkPipelineShaderStageCreateFlags)0,
+		VK_SHADER_STAGE_COMPUTE_BIT,								// stage
+		*computeModuleGood,											// shader
+		"main",
+		DE_NULL,													// pSpecializationInfo
+	};
+
+	const VkPipelineShaderStageCreateInfo	shaderCreateInfoBad	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineShaderStageCreateFlags)0,
+		vk::VK_SHADER_STAGE_COMPUTE_BIT,							// stage
+		*computeModuleBad,											// shader
+		"main",
+		DE_NULL,													// pSpecializationInfo
+	};
+
+	const VkComputePipelineCreateInfo		createInfoGood			=
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+		shaderCreateInfoGood,										// cs
+		*pipelineLayout,											// layout
+		(vk::VkPipeline)0,											// basePipelineHandle
+		0u,															// basePipelineIndex
+	};
+
+	const VkComputePipelineCreateInfo		createInfoBad			=
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+		shaderCreateInfoBad,										// cs
+		*pipelineLayout,											// descriptorSetLayout.get()
+		(VkPipeline)0,												// basePipelineHandle
+		0u,															// basePipelineIndex
+	};
+
+	const Unique<VkPipeline>				pipelineGood			(createComputePipeline(vk, device, (VkPipelineCache)0u, &createInfoGood));
+	const Unique<VkPipeline>				pipelineBad				(createComputePipeline(vk, device, (VkPipelineCache)0u, &createInfoBad));
+
+	const VkAccessFlags						inputBit				= (VK_ACCESS_UNIFORM_READ_BIT);
+	const VkBufferMemoryBarrier				bufferBarriers[]		=
+	{
+		{
+			VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			VK_ACCESS_HOST_WRITE_BIT,									// outputMask
+			inputBit,													// inputMask
+			VK_QUEUE_FAMILY_IGNORED,									// srcQueueFamilyIndex
+			VK_QUEUE_FAMILY_IGNORED,									// destQueueFamilyIndex
+			*bufferA,													// buffer
+			(VkDeviceSize)0u,											// offset
+			(VkDeviceSize)bufferSizeA,									// size
+		},
+		{
+			VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			VK_ACCESS_HOST_WRITE_BIT,									// outputMask
+			inputBit,													// inputMask
+			VK_QUEUE_FAMILY_IGNORED,									// srcQueueFamilyIndex
+			VK_QUEUE_FAMILY_IGNORED,									// destQueueFamilyIndex
+			*bufferB,													// buffer
+			(VkDeviceSize)0u,											// offset
+			(VkDeviceSize)bufferSizeB,									// size
+		}
+	};
+
+	const deUint32							numSrcBuffers			= 1u;
+
+	const deUint32* const					dynamicOffsets			= (DE_NULL);
+	const deUint32							numDynamicOffsets		= (0);
+	const int								numPreBarriers			= numSrcBuffers;
+	const vk::VkBufferMemoryBarrier* const	postBarriers			= result.getResultReadBarrier();
+	const int								numPostBarriers			= 1;
+	const tcu::Vec4							refQuadrantValue14		= (colorA2);
+	const tcu::Vec4							refQuadrantValue23		= (colorA1);
+	const tcu::Vec4							references[4]			=
+	{
+		refQuadrantValue14,
+		refQuadrantValue23,
+		refQuadrantValue23,
+		refQuadrantValue14,
+	};
+	tcu::Vec4								results[4];
+
+	// submit and wait begin
+
+	const tcu::UVec3 numWorkGroups = tcu::UVec3(4, 1u, 1);
+
+	const VkCommandPoolCreateInfo			cmdPoolCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType;
+		DE_NULL,													// pNext
+		VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,						// flags
+		queueFamilyIndex,											// queueFamilyIndex
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, device, &cmdPoolCreateInfo));
+
+	const VkFenceCreateInfo					fenceCreateInfo			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,			// flags
+	};
+
+	const VkCommandBufferAllocateInfo		cmdBufCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,				// sType
+		DE_NULL,													// pNext
+		*cmdPool,													// commandPool
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,							// level
+		1u,															// bufferCount;
+	};
+
+	const VkCommandBufferBeginInfo			cmdBufBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,				// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const Unique<VkFence>					cmdCompleteFence		(createFence(vk, device, &fenceCreateInfo));
+	const Unique<VkCommandBuffer>			cmd						(allocateCommandBuffer(vk, device, &cmdBufCreateInfo));
+
+	VK_CHECK(vk.beginCommandBuffer(*cmd, &cmdBufBeginInfo));
+
+	vk.cmdBindPipeline(*cmd, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineBad);
+	vk.cmdBindPipeline(*cmd, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineGood);
+	vk.cmdBindDescriptorSets(*cmd, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0, numDescriptorSets, descriptorSets, numDynamicOffsets, dynamicOffsets);
+
+	if (numPreBarriers)
+		vk.cmdPipelineBarrier(*cmd, 0u, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0,
+							  0, (const VkMemoryBarrier*)DE_NULL,
+							  numPreBarriers, bufferBarriers,
+							  0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	vk.cmdDispatch(*cmd, numWorkGroups.x(), numWorkGroups.y(), numWorkGroups.z());
+	vk.cmdPipelineBarrier(*cmd, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+						  0, (const VkMemoryBarrier*)DE_NULL,
+						  numPostBarriers, postBarriers,
+						  0, (const VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(vk.endCommandBuffer(*cmd));
+
+	// run
+	// submit second primary buffer, the secondary should be executed too
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,								// sType
+		DE_NULL,													// pNext
+		0u,															// waitSemaphoreCount
+		DE_NULL,													// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,						// pWaitDstStageMask
+		1,															// commandBufferCount
+		&cmd.get(),													// pCommandBuffers
+		0u,															// signalSemaphoreCount
+		DE_NULL,													// pSignalSemaphores
+	};
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *cmdCompleteFence));
+
+	VK_CHECK(vk.waitForFences(device, 1u, &cmdCompleteFence.get(), 0u, INFINITE_TIMEOUT)); // \note: timeout is failure
+	VK_CHECK(vk.resetFences(device, 1u, &cmdCompleteFence.get()));
+
+	// submit and wait end
+	result.readResultContentsTo(&results);
+
+	// verify
+	if (results[0] == references[0] &&
+		results[1] == references[1] &&
+		results[2] == references[2] &&
+		results[3] == references[3])
+	{
+		return tcu::TestStatus::pass("Pass");
+	}
+	else if (results[0] == tcu::Vec4(-1.0f) &&
+			 results[1] == tcu::Vec4(-1.0f) &&
+			 results[2] == tcu::Vec4(-1.0f) &&
+			 results[3] == tcu::Vec4(-1.0f))
+	{
+		context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< "Result buffer was not written to."
+		<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Result buffer was not written to");
+	}
+	else
+	{
+		context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< "Error expected ["
+		<< references[0] << ", "
+		<< references[1] << ", "
+		<< references[2] << ", "
+		<< references[3] << "], got ["
+		<< results[0] << ", "
+		<< results[1] << ", "
+		<< results[2] << ", "
+		<< results[3] << "]"
+		<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Invalid result values");
+	}
+}
+
+// Shaders
+void genComputeSource (SourceCollections& programCollection)
+{
+	const char* const						versionDecl				= glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_310_ES);
+	std::ostringstream						buf_good;
+
+	buf_good << versionDecl << "\n"
+	<< ""
+	<< "layout(local_size_x = 1u, local_size_y = 1u, local_size_z = 1) in;\n"
+	<< "layout(set = 0, binding = 1u, std140) uniform BufferName\n"
+	<< "{\n"
+	<< "	highp vec4 colorA;\n"
+	<< "	highp vec4 colorB;\n"
+	<< "} b_instance;\n"
+	<< "layout(set = 0, binding = 0, std140) writeonly buffer OutBuf\n"
+	<< "{\n"
+	<< "	highp vec4 read_colors[4];\n"
+	<< "} b_out;\n"
+	<< "void main(void)\n"
+	<< "{\n"
+	<< "	highp int quadrant_id = int(gl_WorkGroupID.x);\n"
+	<< "	highp vec4 result_color;\n"
+	<< "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+	<< "		result_color = b_instance.colorA;\n"
+	<< "	else\n"
+	<< "		result_color = b_instance.colorB;\n"
+	<< "	b_out.read_colors[gl_WorkGroupID.x] = result_color;\n"
+	<< "}\n";
+
+	programCollection.glslSources.add("compute_good") << glu::ComputeSource(buf_good.str());
+
+	std::ostringstream	buf_bad;
+
+	buf_bad	<< versionDecl << "\n"
+	<< ""
+	<< "layout(local_size_x = 1u, local_size_y = 1u, local_size_z = 1) in;\n"
+	<< "layout(set = 0, binding = 1u, std140) uniform BufferName\n"
+	<< "{\n"
+	<< "	highp vec4 colorA;\n"
+	<< "	highp vec4 colorB;\n"
+	<< "} b_instance;\n"
+	<< "layout(set = 0, binding = 0, std140) writeonly buffer OutBuf\n"
+	<< "{\n"
+	<< "	highp vec4 read_colors[4];\n"
+	<< "} b_out;\n"
+	<< "void main(void)\n"
+	<< "{\n"
+	<< "	highp int quadrant_id = int(gl_WorkGroupID.x);\n"
+	<< "	highp vec4 result_color;\n"
+	<< "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+	<< "		result_color = b_instance.colorA;\n"
+	<< "	else\n"
+	<< "		result_color = b_instance.colorB;\n"
+	<< "	b_out.read_colors[gl_WorkGroupID.x] = vec4(0.0, 0.0, 0.0, 0.0);\n"
+	<< "}\n";
+
+	programCollection.glslSources.add("compute_bad") << glu::ComputeSource(buf_bad.str());
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createCommandBuffersTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	commandBuffersTests	(new tcu::TestCaseGroup(testCtx, "command_buffers", "Command Buffers Tests"));
+
+	/* 19.1. Command Pools (6.1 in VK 1.0 Spec) */
+	addFunctionCase				(commandBuffersTests.get(), "pool_create_null_params",			"",	createPoolNullParamsTest);
+	addFunctionCase				(commandBuffersTests.get(), "pool_create_non_null_allocator",	"",	createPoolNonNullAllocatorTest);
+	addFunctionCase				(commandBuffersTests.get(), "pool_create_transient_bit",		"",	createPoolTransientBitTest);
+	addFunctionCase				(commandBuffersTests.get(), "pool_create_reset_bit",			"",	createPoolResetBitTest);
+	addFunctionCase				(commandBuffersTests.get(), "pool_reset_release_res",			"",	resetPoolReleaseResourcesBitTest);
+	addFunctionCase				(commandBuffersTests.get(), "pool_reset_no_flags_res",			"",	resetPoolNoFlagsTest);
+	/* 19.2. Command Buffer Lifetime (6.2 in VK 1.0 Spec) */
+	addFunctionCase				(commandBuffersTests.get(), "allocate_single_primary",			"", allocatePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "allocate_many_primary",			"",	allocateManyPrimaryBuffersTest);
+	addFunctionCase				(commandBuffersTests.get(), "allocate_zero_primary",			"", allocateZeroPrimaryBuffersTest);
+	addFunctionCase				(commandBuffersTests.get(), "allocate_single_secondary",		"", allocateSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "allocate_many_secondary",			"", allocateManySecondaryBuffersTest);
+	addFunctionCase				(commandBuffersTests.get(), "allocate_zero_secondary",			"", allocateZeroSecondaryBuffersTest);
+	addFunctionCase				(commandBuffersTests.get(), "execute_small_primary",			"",	executePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "execute_large_primary",			"",	executeLargePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "reset_implicit",					"", resetBufferImplicitlyTest);
+	/* 19.3. Command Buffer Recording (6.3 in VK 1.0 Spec) */
+	addFunctionCase				(commandBuffersTests.get(), "record_single_primary",			"",	recordSinglePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_many_primary",				"", recordLargePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_single_secondary",			"",	recordSingleSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_many_secondary",			"", recordLargeSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "submit_twice_primary",				"",	submitPrimaryBufferTwiceTest);
+	addFunctionCase				(commandBuffersTests.get(), "submit_twice_secondary",			"",	submitSecondaryBufferTwiceTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_one_time_submit_primary",	"",	oneTimeSubmitFlagPrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_one_time_submit_secondary",	"",	oneTimeSubmitFlagSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_simul_use_primary",			"",	simultaneousUsePrimaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_simul_use_secondary",		"",	simultaneousUseSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_query_precise_w_flag",		"",	recordBufferQueryPreciseWithFlagTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_query_imprecise_w_flag",	"",	recordBufferQueryImpreciseWithFlagTest);
+	addFunctionCase				(commandBuffersTests.get(), "record_query_imprecise_wo_flag",	"",	recordBufferQueryImpreciseWithoutFlagTest);
+	/* 19.4. Command Buffer Submission (6.4 in VK 1.0 Spec) */
+	addFunctionCase				(commandBuffersTests.get(), "submit_count_non_zero",			"", submitBufferCountNonZero);
+	addFunctionCase				(commandBuffersTests.get(), "submit_count_equal_zero",			"", submitBufferCountEqualZero);
+	addFunctionCase				(commandBuffersTests.get(), "submit_null_fence",				"", submitBufferNullFence);
+	/* 19.5. Secondary Command Buffer Execution (6.6 in VK 1.0 Spec) */
+	addFunctionCase				(commandBuffersTests.get(), "secondary_execute",				"",	executeSecondaryBufferTest);
+	addFunctionCase				(commandBuffersTests.get(), "secondary_execute_twice",			"",	executeSecondaryBufferTwiceTest);
+	/* 19.6. Commands Allowed Inside Command Buffers (6.7 in VK 1.0 Spec) */
+	addFunctionCaseWithPrograms (commandBuffersTests.get(), "order_bind_pipeline",				"", genComputeSource, orderBindPipelineTest);
+
+	return commandBuffersTests.release();
+}
+
+} // api
+} // vkt
+
diff --git a/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.hpp
new file mode 100644
index 0000000..e5f2bd2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiCommandBuffersTests.hpp
@@ -0,0 +1,48 @@
+#ifndef _VKTAPICOMMANDBUFFERSTESTS_HPP
+#define _VKTAPICOMMANDBUFFERSTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup* createCommandBuffersTests (tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPICOMMANDBUFFERSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.cpp b/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.cpp
new file mode 100644
index 0000000..6723e21
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.cpp
@@ -0,0 +1,117 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiComputeInstanceResultBuffer.hpp"
+#include "vktApiBufferComputeInstance.hpp"
+#include "vkRefUtil.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+using namespace vk;
+
+ComputeInstanceResultBuffer::ComputeInstanceResultBuffer (const DeviceInterface &vki,
+																	  VkDevice device,
+																	  Allocator &allocator)
+		: m_vki(vki),
+		m_device(device),
+		m_bufferMem(DE_NULL),
+		m_buffer(createResultBuffer(m_vki, m_device, allocator, &m_bufferMem)),
+		m_bufferBarrier(createResultBufferBarrier(*m_buffer))
+{
+}
+
+void ComputeInstanceResultBuffer::readResultContentsTo(tcu::Vec4 (*results)[4]) const
+{
+	invalidateMappedMemoryRange(m_vki, m_device, m_bufferMem->getMemory(), m_bufferMem->getOffset(), sizeof(*results));
+	deMemcpy(*results, m_bufferMem->getHostPtr(), sizeof(*results));
+}
+
+Move<VkBuffer> ComputeInstanceResultBuffer::createResultBuffer(const DeviceInterface &vki,
+																	 VkDevice device,
+																	 Allocator &allocator,
+																	 de::MovePtr<Allocation> *outAllocation)
+{
+	const VkBufferCreateInfo createInfo =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,															// flags
+		(VkDeviceSize) DATA_SIZE,									// size
+		VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,							// usage
+		VK_SHARING_MODE_EXCLUSIVE,									// sharingMode
+		0u,															// queueFamilyCount
+		DE_NULL,													// pQueueFamilyIndices
+	};
+
+	Move<VkBuffer> buffer(createBuffer(vki, device, &createInfo));
+
+	const VkMemoryRequirements				requirements			= getBufferMemoryRequirements(vki, device, *buffer);
+	de::MovePtr<Allocation>					allocation				= allocator.allocate(requirements, MemoryRequirement::HostVisible);
+
+	VK_CHECK(vki.bindBufferMemory(device, *buffer, allocation->getMemory(), allocation->getOffset()));
+
+	const float								clearValue				= -1.0f;
+	void*									mapPtr					= allocation->getHostPtr();
+
+	for (size_t offset = 0; offset < DATA_SIZE; offset += sizeof(float))
+		deMemcpy(((deUint8 *) mapPtr) + offset, &clearValue, sizeof(float));
+
+	flushMappedMemoryRange(vki, device, allocation->getMemory(), allocation->getOffset(), (VkDeviceSize) DATA_SIZE);
+
+	*outAllocation = allocation;
+	return buffer;
+}
+
+VkBufferMemoryBarrier ComputeInstanceResultBuffer::createResultBufferBarrier(VkBuffer buffer)
+{
+	const VkBufferMemoryBarrier bufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+		VK_ACCESS_SHADER_WRITE_BIT,									// outputMask
+		VK_ACCESS_SHADER_READ_BIT,									// inputMask
+		VK_QUEUE_FAMILY_IGNORED,									// srcQueueFamilyIndex
+		VK_QUEUE_FAMILY_IGNORED,									// destQueueFamilyIndex
+		buffer,														// buffer
+		(VkDeviceSize) 0u,											// offset
+		DATA_SIZE,													// size
+	};
+
+	return bufferBarrier;
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.hpp b/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.hpp
new file mode 100644
index 0000000..37908ca
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiComputeInstanceResultBuffer.hpp
@@ -0,0 +1,86 @@
+#ifndef _VKTAPICOMPUTEINSTANCERESULTBUFFER_HPP
+#define _VKTAPICOMPUTEINSTANCERESULTBUFFER_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestLog.hpp"
+#include "deUniquePtr.hpp"
+#include "vkRef.hpp"
+#include "vkMemUtil.hpp"
+#include "vkQueryUtil.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+class ComputeInstanceResultBuffer
+{
+public:
+	enum
+	{
+		DATA_SIZE = sizeof(tcu::Vec4[4])
+	};
+
+											ComputeInstanceResultBuffer (const vk::DeviceInterface &vki,
+																				vk::VkDevice device,
+																				vk::Allocator &allocator);
+
+	void									readResultContentsTo(tcu::Vec4 (* results)[4]) const;
+
+	inline vk::VkBuffer						getBuffer(void) const { return *m_buffer; }
+
+	inline const vk::VkBufferMemoryBarrier*	getResultReadBarrier(void) const { return &m_bufferBarrier; }
+
+private:
+	static vk::Move<vk::VkBuffer>			createResultBuffer(const vk::DeviceInterface &vki,
+														vk::VkDevice device,
+														vk::Allocator &allocator,
+														de::MovePtr<vk::Allocation>* outAllocation);
+
+	static vk::VkBufferMemoryBarrier		createResultBufferBarrier(vk::VkBuffer buffer);
+
+	const vk::DeviceInterface &				m_vki;
+	const vk::VkDevice						m_device;
+
+	de::MovePtr<vk::Allocation>				m_bufferMem;
+	const vk::Unique<vk::VkBuffer>			m_buffer;
+	const vk::VkBufferMemoryBarrier			m_bufferBarrier;
+};
+
+} // api
+} // vkt
+
+#endif // _VKTAPICOMPUTEINSTANCERESULTBUFFER_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.cpp
new file mode 100644
index 0000000..1a1fcac
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.cpp
@@ -0,0 +1,1814 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Copies And Blitting Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiCopiesAndBlittingTests.hpp"
+
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuVectorType.hpp"
+#include "tcuTexture.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+using namespace vk;
+
+namespace
+{
+
+union CopyRegion
+{
+	VkBufferCopy		bufferCopy;
+	VkImageCopy			imageCopy;
+	VkBufferImageCopy	bufferImageCopy;
+	VkImageBlit			imageBlit;
+};
+
+struct TestParams
+{
+	union Data
+	{
+		struct Buffer
+		{
+			VkDeviceSize	size;
+		}	buffer;
+		struct Image
+		{
+			VkFormat		format;
+			VkExtent3D		extent;
+		}	image;
+	}	src, dst;
+
+	std::vector<CopyRegion>	regions;
+};
+
+class CopiesAndBlittingTestInstance : public vkt::TestInstance
+{
+public:
+										CopiesAndBlittingTestInstance		(Context&	context,
+																			 TestParams	testParams);
+	virtual								~CopiesAndBlittingTestInstance		(void);
+	virtual tcu::TestStatus				iterate								(void) = 0;
+	enum FillMode
+	{
+		FILL_MODE_SEQUENTIAL = 0,
+		FILL_MODE_RANDOM,
+		FILL_MODE_WHITE,
+		FILL_MODE_RED,
+
+		FILL_MODE_LAST
+	};
+protected:
+	const TestParams					m_params;
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+	Move<VkFence>						m_fence;
+	de::MovePtr<tcu::TextureLevel>		m_sourceTextureLevel;
+	de::MovePtr<tcu::TextureLevel>		m_destinationTextureLevel;
+	de::MovePtr<tcu::TextureLevel>		m_expectedTextureLevel;
+
+	VkCommandBufferBeginInfo			m_cmdBufferBeginInfo;
+
+	void								generateBuffer						(tcu::PixelBufferAccess buffer, int width, int height, int depth = 1, FillMode = FILL_MODE_SEQUENTIAL);
+	virtual void						generateExpectedResult				(void);
+	void								uploadBuffer						(tcu::ConstPixelBufferAccess bufferAccess, const Allocation& bufferAlloc);
+	void								uploadImage							(tcu::ConstPixelBufferAccess imageAccess, const VkImage& image);
+	virtual tcu::TestStatus				checkTestResult						(tcu::ConstPixelBufferAccess result);
+	virtual void						copyRegionToTextureLevel			(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region) = 0;
+	VkImageAspectFlags					getAspectFlag						(tcu::TextureFormat format);
+	deUint32							calculateSize						(tcu::ConstPixelBufferAccess src) const
+										{
+											return src.getWidth() * src.getHeight() * src.getDepth() * tcu::getPixelSize(src.getFormat());
+										}
+
+	de::MovePtr<tcu::TextureLevel>		readImage							(const vk::DeviceInterface&	vk,
+																			 vk::VkDevice				device,
+																			 vk::VkQueue				queue,
+																			 vk::Allocator&				allocator,
+																			 vk::VkImage				image,
+																			 vk::VkFormat				format,
+																			 const VkExtent3D			imageSize);
+};
+
+CopiesAndBlittingTestInstance::~CopiesAndBlittingTestInstance	(void)
+{
+}
+
+CopiesAndBlittingTestInstance::CopiesAndBlittingTestInstance	(Context& context, TestParams testParams)
+	: vkt::TestInstance		(context)
+	, m_params			(testParams)
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32				queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+void CopiesAndBlittingTestInstance::generateBuffer(tcu::PixelBufferAccess buffer, int width, int height, int depth, FillMode mode)
+{
+	de::Random rnd(width ^ height ^ depth);
+	for (int z = 0; z < depth; z++)
+	{
+		for (int y = 0; y < height; y++)
+		{
+			for (int x = 0; x < width; x++)
+			{
+				switch (mode)
+				{
+					case FILL_MODE_SEQUENTIAL:
+						buffer.setPixel(tcu::UVec4(x, y, z, 255), x, y, z);
+						break;
+					case FILL_MODE_WHITE:
+						buffer.setPixel(tcu::UVec4(255, 255, 255, 255), x, y, z);
+						break;
+					case FILL_MODE_RED:
+						buffer.setPixel(tcu::UVec4(255, 0, 0, 255), x, y, z);
+						break;
+					case FILL_MODE_RANDOM:
+						buffer.setPixel(tcu::UVec4(rnd.getUint8(), rnd.getUint8(), rnd.getUint8(), 255), x, y, z);
+					default:
+						break;
+				}
+			}
+		}
+	}
+}
+
+void CopiesAndBlittingTestInstance::uploadBuffer(tcu::ConstPixelBufferAccess bufferAccess, const Allocation& bufferAlloc)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const deUint32				bufferSize	= calculateSize(bufferAccess);
+
+	// Write buffer data
+	deMemcpy(bufferAlloc.getHostPtr(), bufferAccess.getDataPtr(), bufferSize);
+	flushMappedMemoryRange(vk, vkDevice, bufferAlloc.getMemory(), bufferAlloc.getOffset(), bufferSize);
+}
+
+void CopiesAndBlittingTestInstance::uploadImage(tcu::ConstPixelBufferAccess imageAccess, const VkImage& image)
+{
+	const DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const VkDevice				vkDevice			= m_context.getDevice();
+	const VkQueue				queue				= m_context.getUniversalQueue();
+	const deUint32				queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	Move<VkBuffer>				buffer;
+	const deUint32				bufferSize		= calculateSize(imageAccess);
+	de::MovePtr<Allocation>		bufferAlloc;
+	Move<VkCommandBuffer>		cmdBuffer;
+	Move<VkFence>				fence;
+
+	// Create source buffer
+	{
+		const VkBufferCreateInfo			bufferParams			=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			bufferSize,									// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		buffer		= createBuffer(vk, vkDevice, &bufferParams);
+		bufferAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, bufferAlloc->getMemory(), bufferAlloc->getOffset()));
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo	cmdBufferAllocateInfo	=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,												// deUint32					bufferCount;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo				fenceParams				=
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u											// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// Barriers for copying buffer to image
+	const VkBufferMemoryBarrier				preBufferBarrier		=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,		// VkStructureType	sType;
+		DE_NULL,										// const void*		pNext;
+		VK_ACCESS_HOST_WRITE_BIT,						// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32			dstQueueFamilyIndex;
+		*buffer,										// VkBuffer			buffer;
+		0u,												// VkDeviceSize		offset;
+		bufferSize										// VkDeviceSize		size;
+	};
+
+	const VkImageMemoryBarrier				preImageBarrier			=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkAccessFlags			srcAccessMask;
+		0u,												// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		image,											// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(imageAccess.getFormat()),	// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			1u,										// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			1u,										// deUint32			arraySize;
+		}
+	};
+
+	const VkImageMemoryBarrier				postImageBarrier		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,					// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_SHADER_READ_BIT,						// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_GENERAL,						// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		image,											// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(imageAccess.getFormat()),	// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			1u,										// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			1u,										// deUint32			arraySize;
+		}
+	};
+
+	const VkBufferImageCopy					copyRegion				=
+	{
+		0u,												// VkDeviceSize				bufferOffset;
+		(deUint32)imageAccess.getWidth(),				// deUint32					bufferRowLength;
+		(deUint32)imageAccess.getHeight(),				// deUint32					bufferImageHeight;
+		{												// VkImageSubresourceLayers	imageSubresource;
+			getAspectFlag(imageAccess.getFormat()),	// VkImageAspect	aspect;
+			0u,										// deUint32			mipLevel;
+			0u,										// deUint32			baseArrayLayer;
+			1u,										// deUint32			layerCount;
+		},
+		{ 0, 0, 0 },									// VkOffset3D				imageOffset;
+		{
+			(deUint32)imageAccess.getWidth(),
+			(deUint32)imageAccess.getHeight(),
+			1u
+		}												// VkExtent3D				imageExtent;
+	};
+
+	// Write buffer data
+	deMemcpy(bufferAlloc->getHostPtr(), imageAccess.getDataPtr(), bufferSize);
+	flushMappedMemoryRange(vk, vkDevice, bufferAlloc->getMemory(), bufferAlloc->getOffset(), bufferSize);
+
+	// Copy buffer to image
+	const VkCommandBufferBeginInfo			cmdBufferBeginInfo		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &preBufferBarrier, 1, &preImageBarrier);
+	vk.cmdCopyBufferToImage(*cmdBuffer, *buffer, image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkSubmitInfo						submitInfo				=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity */));
+}
+
+tcu::TestStatus CopiesAndBlittingTestInstance::checkTestResult(tcu::ConstPixelBufferAccess result)
+{
+	const tcu::ConstPixelBufferAccess	expected	= m_expectedTextureLevel->getAccess();
+	const tcu::UVec4					treshold	(0, 0, 0, 0);
+
+	if (!tcu::intThresholdCompare(m_context.getTestContext().getLog(), "Compare", "Result comparsion", expected, result, treshold, tcu::COMPARE_LOG_RESULT))
+		return tcu::TestStatus::fail("CopiesAndBlitting test");
+
+	return tcu::TestStatus::pass("CopiesAndBlitting test");
+}
+
+void CopiesAndBlittingTestInstance::generateExpectedResult()
+{
+	const tcu::ConstPixelBufferAccess src = m_sourceTextureLevel->getAccess();
+	const tcu::ConstPixelBufferAccess dst = m_destinationTextureLevel->getAccess();
+
+	m_expectedTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(dst.getFormat(), dst.getWidth(), dst.getHeight(), dst.getDepth()));
+	tcu::copy(m_expectedTextureLevel->getAccess(), dst);
+	for (deUint32 i = 0; i < m_params.regions.size(); i++)
+		copyRegionToTextureLevel(src, m_expectedTextureLevel->getAccess(), m_params.regions[i]);
+}
+
+class CopiesAndBlittingTestCase : public vkt::TestCase
+{
+public:
+							CopiesAndBlittingTestCase	(tcu::TestContext&			testCtx,
+														 const std::string&			name,
+														 const std::string&			description)
+								: vkt::TestCase			(testCtx, name, description)
+							{}
+
+	virtual					~CopiesAndBlittingTestCase	(void) {}
+
+	virtual TestInstance*	createInstance				(Context&					context) const = 0;
+};
+
+VkImageAspectFlags CopiesAndBlittingTestInstance::getAspectFlag(tcu::TextureFormat format)
+{
+	VkImageAspectFlags aspectFlag = 0;
+	aspectFlag |= (tcu::hasDepthComponent(format.order)? VK_IMAGE_ASPECT_DEPTH_BIT : 0);
+	aspectFlag |= (tcu::hasStencilComponent(format.order)? VK_IMAGE_ASPECT_STENCIL_BIT : 0);
+
+	if (!aspectFlag)
+		aspectFlag = VK_IMAGE_ASPECT_COLOR_BIT;
+
+	return aspectFlag;
+}
+
+de::MovePtr<tcu::TextureLevel> CopiesAndBlittingTestInstance::readImage	(const vk::DeviceInterface&	vk,
+																		 vk::VkDevice				device,
+																		 vk::VkQueue				queue,
+																		 vk::Allocator&				allocator,
+																		 vk::VkImage				image,
+																		 vk::VkFormat				format,
+																		 const VkExtent3D			imageSize)
+{
+	Move<VkBuffer>					buffer;
+	de::MovePtr<Allocation>			bufferAlloc;
+	Move<VkCommandBuffer>			cmdBuffer;
+	Move<VkFence>					fence;
+	const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	const tcu::TextureFormat		tcuFormat			= mapVkFormat(format);
+	const VkDeviceSize				pixelDataSize		= imageSize.width * imageSize.height * imageSize.depth * tcu::getPixelSize(tcuFormat);
+	de::MovePtr<tcu::TextureLevel>	resultLevel			(new tcu::TextureLevel(tcuFormat, imageSize.width, imageSize.height, imageSize.depth));
+
+	// Create destination buffer
+	{
+		const VkBufferCreateInfo bufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			pixelDataSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		buffer		= createBuffer(vk, device, &bufferParams);
+		bufferAlloc = allocator.allocate(getBufferMemoryRequirements(vk, device, *buffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(device, *buffer, bufferAlloc->getMemory(), bufferAlloc->getOffset()));
+	}
+
+	// Create command pool and buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u											// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, device, &fenceParams);
+	}
+
+	// Barriers for copying image to buffer
+
+	const VkImageMemoryBarrier imageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,		// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		image,										// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(tcuFormat),	// VkImageAspectFlags	aspectMask;
+			0u,							// deUint32				baseMipLevel;
+			1u,							// deUint32				mipLevels;
+			0u,							// deUint32				baseArraySlice;
+			1u							// deUint32				arraySize;
+		}
+	};
+
+	const VkBufferMemoryBarrier bufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*buffer,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		pixelDataSize								// VkDeviceSize		size;
+	};
+
+	// Copy image to buffer
+
+	const VkBufferImageCopy copyRegion =
+	{
+		0u,											// VkDeviceSize				bufferOffset;
+		(deUint32)imageSize.width,					// deUint32					bufferRowLength;
+		(deUint32)imageSize.height,					// deUint32					bufferImageHeight;
+		{ getAspectFlag(tcuFormat), 0u, 0u, 1u },	// VkImageSubresourceLayers	imageSubresource;
+		{ 0, 0, 0 },								// VkOffset3D				imageOffset;
+		imageSize									// VkExtent3D				imageExtent;
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+	vk.cmdCopyImageToBuffer(*cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *buffer, 1, &copyRegion);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1, &fence.get(), 0, ~(0ull) /* infinity */));
+
+	// Read buffer data
+	invalidateMappedMemoryRange(vk, device, bufferAlloc->getMemory(), bufferAlloc->getOffset(), pixelDataSize);
+	tcu::copy(*resultLevel, tcu::ConstPixelBufferAccess(resultLevel->getFormat(), resultLevel->getSize(), bufferAlloc->getHostPtr()));
+
+	return resultLevel;
+}
+
+// Copy from image to image.
+
+class CopyImageToImage : public CopiesAndBlittingTestInstance
+{
+public:
+										CopyImageToImage			(Context&	context,
+																	 TestParams params);
+	virtual tcu::TestStatus				iterate						(void);
+private:
+	Move<VkImage>						m_source;
+	de::MovePtr<Allocation>				m_sourceImageAlloc;
+	Move<VkImage>						m_destination;
+	de::MovePtr<Allocation>				m_destinationImageAlloc;
+
+	virtual void						copyRegionToTextureLevel	(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region);
+};
+
+CopyImageToImage::CopyImageToImage (Context& context, TestParams params)
+	: CopiesAndBlittingTestInstance(context, params)
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+
+	VkImageFormatProperties properties;
+	if ((context.getInstanceInterface().getPhysicalDeviceImageFormatProperties (context.getPhysicalDevice(),
+																				m_params.src.image.format,
+																				VK_IMAGE_TYPE_2D,
+																				VK_IMAGE_TILING_OPTIMAL,
+																				VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0,
+																				&properties) == VK_ERROR_FORMAT_NOT_SUPPORTED) ||
+		(context.getInstanceInterface().getPhysicalDeviceImageFormatProperties (context.getPhysicalDevice(),
+																				m_params.dst.image.format,
+																				VK_IMAGE_TYPE_2D,
+																				VK_IMAGE_TILING_OPTIMAL,
+																				VK_IMAGE_USAGE_TRANSFER_DST_BIT, 0,
+																				&properties) == VK_ERROR_FORMAT_NOT_SUPPORTED))
+	{
+		TCU_THROW(NotSupportedError, "Format not supported");
+	}
+
+	// Create source image
+	{
+		const VkImageCreateInfo sourceImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u,										// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,						// VkImageType			imageType;
+			m_params.src.image.format,				// VkFormat				format;
+			m_params.src.image.extent,				// VkExtent3D			extent;
+			1u,										// deUint32				mipLevels;
+			1u,										// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,					// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,				// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+			    VK_IMAGE_USAGE_TRANSFER_DST_BIT,	// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+			1u,										// deUint32				queueFamilyCount;
+			&queueFamilyIndex,						// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout		initialLayout;
+		};
+
+		m_source = createImage(vk, vkDevice, &sourceImageParams);
+		m_sourceImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_source), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_source, m_sourceImageAlloc->getMemory(), m_sourceImageAlloc->getOffset()));
+	}
+
+	// Create destination image
+	{
+		const VkImageCreateInfo destinationImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u,										// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,						// VkImageType			imageType;
+			m_params.dst.image.format,				// VkFormat				format;
+			m_params.dst.image.extent,				// VkExtent3D			extent;
+			1u,										// deUint32				mipLevels;
+			1u,										// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,					// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,				// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+			    VK_IMAGE_USAGE_TRANSFER_DST_BIT,	// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+			1u,										// deUint32				queueFamilyCount;
+			&queueFamilyIndex,						// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout		initialLayout;
+		};
+
+		m_destination = createImage(vk, vkDevice, &destinationImageParams);
+		m_destinationImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_destination), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_destination, m_destinationImageAlloc->getMemory(), m_destinationImageAlloc->getOffset()));
+	}
+}
+
+tcu::TestStatus CopyImageToImage::iterate()
+{
+	tcu::TextureFormat srcTcuFormat = mapVkFormat(m_params.src.image.format);
+	tcu::TextureFormat dstTcuFormat = mapVkFormat(m_params.dst.image.format);
+	m_sourceTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(srcTcuFormat,
+																				m_params.src.image.extent.width,
+																				m_params.src.image.extent.height,
+																				m_params.src.image.extent.depth));
+	generateBuffer(m_sourceTextureLevel->getAccess(), m_params.src.image.extent.width, m_params.src.image.extent.height, m_params.src.image.extent.depth, FILL_MODE_WHITE);
+	m_destinationTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(dstTcuFormat,
+																					 (int)m_params.dst.image.extent.width,
+																					 (int)m_params.dst.image.extent.height,
+																					 (int)m_params.dst.image.extent.depth));
+	generateBuffer(m_destinationTextureLevel->getAccess(), m_params.dst.image.extent.width, m_params.dst.image.extent.height, m_params.dst.image.extent.depth, FILL_MODE_SEQUENTIAL);
+	generateExpectedResult();
+
+	uploadImage(m_sourceTextureLevel->getAccess(), m_source.get());
+	uploadImage(m_destinationTextureLevel->getAccess(), m_destination.get());
+
+	const DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const VkDevice				vkDevice			= m_context.getDevice();
+	const VkQueue				queue				= m_context.getUniversalQueue();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	VkImageCopy* imageCopies = ((VkImageCopy*)deMalloc(m_params.regions.size() * sizeof(VkImageCopy)));
+	for (deUint32 i = 0; i < m_params.regions.size(); i++)
+		imageCopies[i] = m_params.regions[i].imageCopy;
+
+	// Barriers for copying image to buffer
+	const VkImageMemoryBarrier srcImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_GENERAL,					// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		m_source.get(),								// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(srcTcuFormat),	// VkImageAspectFlags	aspectMask;
+			0u,							// deUint32				baseMipLevel;
+			1u,							// deUint32				mipLevels;
+			0u,							// deUint32				baseArraySlice;
+			1u							// deUint32				arraySize;
+		}
+	};
+
+	const VkImageMemoryBarrier dstImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_GENERAL,					// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		m_destination.get(),						// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(dstTcuFormat),	// VkImageAspectFlags	aspectMask;
+			0u,							// deUint32				baseMipLevel;
+			1u,							// deUint32				mipLevels;
+			0u,							// deUint32				baseArraySlice;
+			1u							// deUint32				arraySize;
+		}
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &srcImageBarrier);
+	vk.cmdCopyImage(*m_cmdBuffer, m_source.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_destination.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)m_params.regions.size(), imageCopies);
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &dstImageBarrier);
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+	deFree(imageCopies);
+
+	de::MovePtr<tcu::TextureLevel> resultTextureLevel = readImage(vk, vkDevice, queue, memAlloc, *m_destination, m_params.dst.image.format, m_params.dst.image.extent);
+
+	return checkTestResult(resultTextureLevel->getAccess());
+}
+
+void CopyImageToImage::copyRegionToTextureLevel(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region)
+{
+	VkOffset3D srcOffset	= region.imageCopy.srcOffset;
+	VkOffset3D dstOffset	= region.imageCopy.dstOffset;
+	VkExtent3D extent		= region.imageCopy.extent;
+
+	const tcu::ConstPixelBufferAccess	srcSubRegion = tcu::getSubregion(src, srcOffset.x, srcOffset.y, extent.width, extent.height);
+	// CopyImage acts like a memcpy. Replace the destination format with the srcformat to use a memcpy.
+	const tcu::PixelBufferAccess		dstWithSrcFormat(srcSubRegion.getFormat(), dst.getSize(), dst.getDataPtr());
+	const tcu::PixelBufferAccess		dstSubRegion = tcu::getSubregion(dstWithSrcFormat, dstOffset.x, dstOffset.y, extent.width, extent.height);
+
+	tcu::copy(dstSubRegion, srcSubRegion);
+}
+
+class CopyImageToImageTestCase : public vkt::TestCase
+{
+public:
+							CopyImageToImageTestCase	(tcu::TestContext&				testCtx,
+														 const std::string&				name,
+														 const std::string&				description,
+														 const TestParams				params)
+								: vkt::TestCase			(testCtx, name, description)
+								, m_params				(params)
+							{}
+
+	virtual					~CopyImageToImageTestCase	(void) {}
+
+	virtual TestInstance*	createInstance				(Context&						context) const
+							{
+								return new CopyImageToImage(context, m_params);
+							}
+private:
+	TestParams				m_params;
+};
+
+// Copy from buffer to buffer.
+
+class CopyBufferToBuffer : public CopiesAndBlittingTestInstance
+{
+public:
+								CopyBufferToBuffer			(Context& context, TestParams params);
+	virtual tcu::TestStatus		iterate						(void);
+private:
+	virtual void				copyRegionToTextureLevel	(tcu::ConstPixelBufferAccess, tcu::PixelBufferAccess, CopyRegion);
+	Move<VkBuffer>				m_source;
+	de::MovePtr<Allocation>		m_sourceBufferAlloc;
+	Move<VkBuffer>				m_destination;
+	de::MovePtr<Allocation>		m_destinationBufferAlloc;
+};
+
+CopyBufferToBuffer::CopyBufferToBuffer (Context& context, TestParams params)
+	: CopiesAndBlittingTestInstance	(context, params)
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+
+	// Create source buffer
+	{
+		const VkBufferCreateInfo	sourceBufferParams	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_params.src.buffer.size,					// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_source				= createBuffer(vk, vkDevice, &sourceBufferParams);
+		m_sourceBufferAlloc		= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_source), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_source, m_sourceBufferAlloc->getMemory(), m_sourceBufferAlloc->getOffset()));
+	}
+
+	// Create desctination buffer
+	{
+		const VkBufferCreateInfo	destinationBufferParams	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_params.dst.buffer.size,					// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_destination				= createBuffer(vk, vkDevice, &destinationBufferParams);
+		m_destinationBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_destination), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_destination, m_destinationBufferAlloc->getMemory(), m_destinationBufferAlloc->getOffset()));
+	}
+}
+
+tcu::TestStatus CopyBufferToBuffer::iterate()
+{
+	const int srcLevelWidth = (int)(m_params.src.buffer.size/4); // Here the format is VK_FORMAT_R32_UINT, we need to divide the buffer size by 4
+	m_sourceTextureLevel		= de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(mapVkFormat(VK_FORMAT_R32_UINT), srcLevelWidth, 1));
+	generateBuffer(m_sourceTextureLevel->getAccess(), srcLevelWidth, 1, 1, FILL_MODE_RED);
+
+	const int dstLevelWidth = (int)(m_params.dst.buffer.size/4);
+	m_destinationTextureLevel	= de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(mapVkFormat(VK_FORMAT_R32_UINT), dstLevelWidth, 1));
+	generateBuffer(m_destinationTextureLevel->getAccess(), dstLevelWidth, 1, 1, FILL_MODE_WHITE);
+
+	generateExpectedResult();
+
+	uploadBuffer(m_sourceTextureLevel->getAccess(), *m_sourceBufferAlloc);
+	uploadBuffer(m_destinationTextureLevel->getAccess(), *m_destinationBufferAlloc);
+
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+
+	const VkBufferMemoryBarrier srcBufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*m_source,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		m_params.src.buffer.size					// VkDeviceSize		size;
+	};
+
+	const VkBufferMemoryBarrier dstBufferBarrier	=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*m_destination,								// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		m_params.dst.buffer.size					// VkDeviceSize		size;
+	};
+
+	VkBufferCopy* bufferCopies = ((VkBufferCopy*)deMalloc(m_params.regions.size() * sizeof(VkBufferCopy)));
+	for (deUint32 i = 0; i < m_params.regions.size(); i++)
+		bufferCopies[i] = m_params.regions[i].bufferCopy;
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &srcBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	vk.cmdCopyBuffer(*m_cmdBuffer, m_source.get(), m_destination.get(), (deUint32)m_params.regions.size(), bufferCopies);
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &dstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	// Read buffer data
+	de::MovePtr<tcu::TextureLevel>	resultLevel		(new tcu::TextureLevel(mapVkFormat(VK_FORMAT_R32_UINT), dstLevelWidth, 1));
+	invalidateMappedMemoryRange(vk, vkDevice, m_destinationBufferAlloc->getMemory(), m_destinationBufferAlloc->getOffset(), m_params.dst.buffer.size);
+	tcu::copy(*resultLevel, tcu::ConstPixelBufferAccess(resultLevel->getFormat(), resultLevel->getSize(), m_destinationBufferAlloc->getHostPtr()));
+	deFree(bufferCopies);
+
+	return checkTestResult(resultLevel->getAccess());
+}
+
+void CopyBufferToBuffer::copyRegionToTextureLevel (tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region)
+{
+	deMemcpy((deUint8*) dst.getDataPtr() + region.bufferCopy.dstOffset,
+				(deUint8*) src.getDataPtr() + region.bufferCopy.srcOffset,
+				(size_t)region.bufferCopy.size);
+}
+
+class BufferToBufferTestCase : public vkt::TestCase
+{
+public:
+							BufferToBufferTestCase	(tcu::TestContext&	testCtx,
+													 const std::string&	name,
+													 const std::string&	description,
+													 const TestParams	params)
+								: vkt::TestCase		(testCtx, name, description)
+								, m_params			(params)
+							{}
+	virtual					~BufferToBufferTestCase	(void) {}
+
+	virtual TestInstance*	createInstance			(Context& context) const
+							{
+								return new CopyBufferToBuffer(context, m_params);
+							}
+private:
+	TestParams				m_params;
+};
+
+// Copy from image to buffer.
+
+class CopyImageToBuffer : public CopiesAndBlittingTestInstance
+{
+public:
+								CopyImageToBuffer			(Context&	context,
+															 TestParams	testParams);
+	virtual						~CopyImageToBuffer			(void) {}
+	virtual tcu::TestStatus		iterate						(void);
+private:
+	virtual void				copyRegionToTextureLevel	(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region);
+
+	tcu::TextureFormat			m_textureFormat;
+	VkDeviceSize				m_bufferSize;
+
+	Move<VkImage>				m_source;
+	de::MovePtr<Allocation>		m_sourceImageAlloc;
+	Move<VkBuffer>				m_destination;
+	de::MovePtr<Allocation>		m_destinationBufferAlloc;
+};
+
+CopyImageToBuffer::CopyImageToBuffer (Context& context, TestParams testParams)
+	: CopiesAndBlittingTestInstance(context, testParams)
+	, m_textureFormat(mapVkFormat(testParams.src.image.format))
+	, m_bufferSize(m_params.dst.buffer.size * tcu::getPixelSize(m_textureFormat))
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+
+	// Create source image
+	{
+		const VkImageCreateInfo		sourceImageParams		=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u,										// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,						// VkImageType			imageType;
+			m_params.src.image.format,				// VkFormat				format;
+			m_params.src.image.extent,				// VkExtent3D			extent;
+			1u,										// deUint32				mipLevels;
+			1u,										// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,					// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,				// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+				VK_IMAGE_USAGE_TRANSFER_DST_BIT,	// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+			1u,										// deUint32				queueFamilyCount;
+			&queueFamilyIndex,						// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout		initialLayout;
+		};
+
+		m_source			= createImage(vk, vkDevice, &sourceImageParams);
+		m_sourceImageAlloc	= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_source), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_source, m_sourceImageAlloc->getMemory(), m_sourceImageAlloc->getOffset()));
+	}
+
+	// Create destination buffer
+	{
+		const VkBufferCreateInfo	destinationBufferParams	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_bufferSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_destination				= createBuffer(vk, vkDevice, &destinationBufferParams);
+		m_destinationBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_destination), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_destination, m_destinationBufferAlloc->getMemory(), m_destinationBufferAlloc->getOffset()));
+	}
+}
+
+tcu::TestStatus CopyImageToBuffer::iterate()
+{
+	m_sourceTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(m_textureFormat,
+																				m_params.src.image.extent.width,
+																				m_params.src.image.extent.height,
+																				m_params.src.image.extent.depth));
+	generateBuffer(m_sourceTextureLevel->getAccess(), m_params.src.image.extent.width, m_params.src.image.extent.height, m_params.src.image.extent.depth, FILL_MODE_RED);
+	m_destinationTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(m_textureFormat, (int)m_params.dst.buffer.size, 1));
+	generateBuffer(m_destinationTextureLevel->getAccess(), (int)m_params.dst.buffer.size, 1, 1);
+
+	generateExpectedResult();
+
+	uploadImage(m_sourceTextureLevel->getAccess(), *m_source);
+	uploadBuffer(m_destinationTextureLevel->getAccess(), *m_destinationBufferAlloc);
+
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+
+	// Barriers for copying image to buffer
+	const VkImageMemoryBarrier imageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_GENERAL,					// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		*m_source,									// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(m_textureFormat),	// VkImageAspectFlags	aspectMask;
+			0u,								// deUint32				baseMipLevel;
+			1u,								// deUint32				mipLevels;
+			0u,								// deUint32				baseArraySlice;
+			1u								// deUint32				arraySize;
+		}
+	};
+
+	const VkBufferMemoryBarrier bufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*m_destination,								// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		m_bufferSize								// VkDeviceSize		size;
+	};
+
+	// Copy from image to buffer
+	VkBufferImageCopy* bufferImageCopies = ((VkBufferImageCopy*)deMalloc(m_params.regions.size() * sizeof(VkBufferImageCopy)));
+	for (deUint32 i = 0; i < m_params.regions.size(); i++)
+		bufferImageCopies[i] = m_params.regions[i].bufferImageCopy;
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+	vk.cmdCopyImageToBuffer(*m_cmdBuffer, m_source.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_destination.get(), (deUint32)m_params.regions.size(), bufferImageCopies);
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+	const VkSubmitInfo				submitInfo		=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	// Read buffer data
+	de::MovePtr<tcu::TextureLevel>	resultLevel		(new tcu::TextureLevel(m_textureFormat, (int)m_params.dst.buffer.size, 1));
+	invalidateMappedMemoryRange(vk, vkDevice, m_destinationBufferAlloc->getMemory(), m_destinationBufferAlloc->getOffset(), m_bufferSize);
+	tcu::copy(*resultLevel, tcu::ConstPixelBufferAccess(resultLevel->getFormat(), resultLevel->getSize(), m_destinationBufferAlloc->getHostPtr()));
+	deFree(bufferImageCopies);
+
+	return checkTestResult(resultLevel->getAccess());
+}
+
+class CopyImageToBufferTestCase : public vkt::TestCase
+{
+public:
+							CopyImageToBufferTestCase	(tcu::TestContext&		testCtx,
+														 const std::string&		name,
+														 const std::string&		description,
+														 const TestParams		params)
+								: vkt::TestCase			(testCtx, name, description)
+								, m_params				(params)
+							{}
+
+	virtual					~CopyImageToBufferTestCase	(void) {}
+
+	virtual TestInstance*	createInstance				(Context&				context) const
+							{
+								return new CopyImageToBuffer(context, m_params);
+							}
+private:
+	TestParams				m_params;
+};
+
+void CopyImageToBuffer::copyRegionToTextureLevel(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region)
+{
+	deUint32 rowLength = region.bufferImageCopy.bufferRowLength;
+	if (!rowLength)
+		rowLength = region.bufferImageCopy.imageExtent.width;
+
+	deUint32 imageHeight = region.bufferImageCopy.bufferImageHeight;
+	if (!imageHeight)
+		imageHeight = region.bufferImageCopy.imageExtent.height;
+
+	const int			texelSize	= src.getFormat().getPixelSize();
+	const VkExtent3D	extent		= region.bufferImageCopy.imageExtent;
+	const VkOffset3D	srcOffset	= region.bufferImageCopy.imageOffset;
+	const int			texelOffset	= (int) region.bufferImageCopy.bufferOffset / texelSize;
+
+	for (deUint32 z = 0; z < extent.depth; z++)
+	{
+		for (deUint32 y = 0; y < extent.height; y++)
+		{
+			int									texelIndex		= texelOffset + (z * imageHeight + y) *  rowLength;
+			const tcu::ConstPixelBufferAccess	srcSubRegion	= tcu::getSubregion(src, srcOffset.x, srcOffset.y + y, srcOffset.z + z,
+																					region.bufferImageCopy.imageExtent.width, 1, 1);
+			const tcu::PixelBufferAccess		dstSubRegion	= tcu::getSubregion(dst, texelIndex, 0, region.bufferImageCopy.imageExtent.width, 1);
+			tcu::copy(dstSubRegion, srcSubRegion);
+		}
+	}
+}
+
+// Copy from buffer to image.
+
+class CopyBufferToImage : public CopiesAndBlittingTestInstance
+{
+public:
+								CopyBufferToImage			(Context&	context,
+															 TestParams	testParams);
+	virtual						~CopyBufferToImage			(void) {}
+	virtual tcu::TestStatus		iterate						(void);
+private:
+	virtual void				copyRegionToTextureLevel	(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region);
+
+	tcu::TextureFormat			m_textureFormat;
+	VkDeviceSize				m_bufferSize;
+
+	Move<VkBuffer>				m_source;
+	de::MovePtr<Allocation>		m_sourceBufferAlloc;
+	Move<VkImage>				m_destination;
+	de::MovePtr<Allocation>		m_destinationImageAlloc;
+};
+
+CopyBufferToImage::CopyBufferToImage (Context& context, TestParams testParams)
+	: CopiesAndBlittingTestInstance(context, testParams)
+	, m_textureFormat(mapVkFormat(testParams.dst.image.format))
+	, m_bufferSize(m_params.src.buffer.size * tcu::getPixelSize(m_textureFormat))
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+
+	// Create source buffer
+	{
+		const VkBufferCreateInfo	sourceBufferParams		=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_bufferSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_source				= createBuffer(vk, vkDevice, &sourceBufferParams);
+		m_sourceBufferAlloc		= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_source), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_source, m_sourceBufferAlloc->getMemory(), m_sourceBufferAlloc->getOffset()));
+	}
+
+	// Create destination image
+	{
+		const VkImageCreateInfo		destinationImageParams	=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u,										// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,						// VkImageType			imageType;
+			m_params.dst.image.format,				// VkFormat				format;
+			m_params.dst.image.extent,				// VkExtent3D			extent;
+			1u,										// deUint32				mipLevels;
+			1u,										// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,					// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,				// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
+				VK_IMAGE_USAGE_TRANSFER_DST_BIT,	// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+			1u,										// deUint32				queueFamilyCount;
+			&queueFamilyIndex,						// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout		initialLayout;
+		};
+
+		m_destination			= createImage(vk, vkDevice, &destinationImageParams);
+		m_destinationImageAlloc	= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_destination), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_destination, m_destinationImageAlloc->getMemory(), m_destinationImageAlloc->getOffset()));
+	}
+}
+
+tcu::TestStatus CopyBufferToImage::iterate()
+{
+	m_sourceTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(m_textureFormat, (int)m_params.src.buffer.size, 1));
+	generateBuffer(m_sourceTextureLevel->getAccess(), m_params.src.image.extent.width, m_params.src.image.extent.height, m_params.src.image.extent.depth, FILL_MODE_WHITE);
+	m_destinationTextureLevel = de::MovePtr<tcu::TextureLevel>(new tcu::TextureLevel(m_textureFormat,
+																					m_params.dst.image.extent.width,
+																					m_params.dst.image.extent.height,
+																					m_params.dst.image.extent.depth));
+	generateBuffer(m_destinationTextureLevel->getAccess(), (int)m_params.dst.buffer.size, 1, 1, FILL_MODE_SEQUENTIAL);
+
+	generateExpectedResult();
+
+	uploadBuffer(m_sourceTextureLevel->getAccess(), *m_sourceBufferAlloc);
+	uploadImage(m_destinationTextureLevel->getAccess(), *m_destination);
+
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	SimpleAllocator				memAlloc	(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	// Barriers for copying image to buffer
+	const VkBufferMemoryBarrier bufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*m_source,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		m_bufferSize								// VkDeviceSize		size;
+	};
+
+	const VkImageMemoryBarrier imageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_GENERAL,					// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		*m_destination,								// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			getAspectFlag(m_textureFormat),	// VkImageAspectFlags	aspectMask;
+			0u,								// deUint32				baseMipLevel;
+			1u,								// deUint32				mipLevels;
+			0u,								// deUint32				baseArraySlice;
+			1u								// deUint32				arraySize;
+		}
+	};
+
+	// Copy from buffer to image
+	VkBufferImageCopy* bufferImageCopies = ((VkBufferImageCopy*)deMalloc(m_params.regions.size() * sizeof(VkBufferImageCopy)));
+	for (deUint32 i = 0; i < m_params.regions.size(); i++)
+		bufferImageCopies[i] = m_params.regions[i].bufferImageCopy;
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	vk.cmdCopyBufferToImage(*m_cmdBuffer, m_source.get(), m_destination.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)m_params.regions.size(), bufferImageCopies);
+	vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+	const VkSubmitInfo				submitInfo		=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	// Read buffer data
+	de::MovePtr<tcu::TextureLevel>	resultLevel	= readImage(vk, vkDevice, queue, memAlloc, *m_destination, m_params.dst.image.format, m_params.dst.image.extent);
+	deFree(bufferImageCopies);
+
+	return checkTestResult(resultLevel->getAccess());
+}
+
+class CopyBufferToImageTestCase : public vkt::TestCase
+{
+public:
+							CopyBufferToImageTestCase	(tcu::TestContext&		testCtx,
+														 const std::string&		name,
+														 const std::string&		description,
+														 const TestParams		params)
+								: vkt::TestCase			(testCtx, name, description)
+								, m_params				(params)
+							{}
+
+	virtual					~CopyBufferToImageTestCase	(void) {}
+
+	virtual TestInstance*	createInstance				(Context&				context) const
+							{
+								return new CopyBufferToImage(context, m_params);
+							}
+private:
+	TestParams				m_params;
+};
+
+void CopyBufferToImage::copyRegionToTextureLevel(tcu::ConstPixelBufferAccess src, tcu::PixelBufferAccess dst, CopyRegion region)
+{
+	deUint32 rowLength = region.bufferImageCopy.bufferRowLength;
+	if (!rowLength)
+		rowLength = region.bufferImageCopy.imageExtent.width;
+
+	deUint32 imageHeight = region.bufferImageCopy.bufferImageHeight;
+	if (!imageHeight)
+		imageHeight = region.bufferImageCopy.imageExtent.height;
+
+	const int			texelSize	= dst.getFormat().getPixelSize();
+	const VkExtent3D	extent		= region.bufferImageCopy.imageExtent;
+	const VkOffset3D	dstOffset	= region.bufferImageCopy.imageOffset;
+	const int			texelOffset	= (int) region.bufferImageCopy.bufferOffset / texelSize;
+
+	for (deUint32 z = 0; z < extent.depth; z++)
+	{
+		for (deUint32 y = 0; y < extent.height; y++)
+		{
+			int									texelIndex		= texelOffset + (z * imageHeight + y) *  rowLength;
+			const tcu::ConstPixelBufferAccess	srcSubRegion	= tcu::getSubregion(src, texelIndex, 0, region.bufferImageCopy.imageExtent.width, 1);
+			const tcu::PixelBufferAccess		dstSubRegion	= tcu::getSubregion(dst, dstOffset.x, dstOffset.y + y, dstOffset.z + z,
+																					region.bufferImageCopy.imageExtent.width, 1, 1);
+			tcu::copy(dstSubRegion, srcSubRegion);
+		}
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createCopiesAndBlittingTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	copiesAndBlittingTests	(new tcu::TestCaseGroup(testCtx, "copy_and_blit", "Copies And Blitting Tests"));
+
+	const VkExtent3D				defaultExtent			= {256, 256, 1};
+	const VkImageSubresourceLayers	defaultSourceLayer		=
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+		0u,							// uint32_t				mipLevel;
+		0u,							// uint32_t				baseArrayLayer;
+		1u,							// uint32_t				layerCount;
+	};
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,							// uint32_t				mipLevel;
+				0u,							// uint32_t				baseArrayLayer;
+				1u							// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,	// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},		// VkOffset3D				srcOffset;
+				sourceLayer,	// VkImageSubresourceLayers	dstSubresource;
+				{0, 0, 0},		// VkOffset3D				dstOffset;
+				{256, 256, 1},	// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "imageToImage_whole", description.str(), params));
+	}
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_R32_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,							// uint32_t				mipLevel;
+				0u,							// uint32_t				baseArrayLayer;
+				1u							// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,	// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},		// VkOffset3D				srcOffset;
+				sourceLayer,	// VkImageSubresourceLayers	dstSubresource;
+				{0, 0, 0},		// VkOffset3D				dstOffset;
+				{256, 256, 1},	// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "image_to_image_whole_different_format_uncompressed", description.str(), params));
+	}
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,							// uint32_t				mipLevel;
+				0u,							// uint32_t				baseArrayLayer;
+				1u							// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,	// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},		// VkOffset3D				srcOffset;
+				sourceLayer,	// VkImageSubresourceLayers	dstSubresource;
+				{64, 98, 0},	// VkOffset3D				dstOffset;
+				{16, 16, 1},	// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "image_to_image_partial", description.str(), params));
+	}
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		for (deInt32 i = 0; i < 16; i++)
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,							// uint32_t				mipLevel;
+				0u,							// uint32_t				baseArrayLayer;
+				1u							// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,			// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},				// VkOffset3D				srcOffset;
+				sourceLayer,			// VkImageSubresourceLayers	dstSubresource;
+				{i*16, 240-i*16, 0},	// VkOffset3D				dstOffset;
+				{16, 16, 1},			// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "image_to_image_partial_multiple", description.str(), params));
+	}
+
+	// Copy image to buffer testcases.
+	{
+		std::ostringstream	description;
+		description << "Copy from image to buffer";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.buffer.size	= 256 * 256;
+
+		const VkBufferImageCopy			bufferImageCopy	=
+		{
+			0u,						// VkDeviceSize				bufferOffset;
+			0u,						// uint32_t					bufferRowLength;
+			0u,						// uint32_t					bufferImageHeight;
+			defaultSourceLayer,		// VkImageSubresourceLayers	imageSubresource;
+			{0, 0, 0},				// VkOffset3D				imageOffset;
+			{16, 16, 1}				// VkExtent3D				imageExtent;
+		};
+		CopyRegion copyRegion;
+		copyRegion.bufferImageCopy = bufferImageCopy;
+
+		params.regions.push_back(copyRegion);
+
+		copiesAndBlittingTests->addChild(new CopyImageToBufferTestCase(testCtx, "image_to_buffer", description.str(), params));
+	}
+
+	// Copy buffer to image testcases.
+	{
+		std::ostringstream	description;
+		description << "Copy from buffer to image";
+
+		TestParams	params;
+		params.src.buffer.size	= 256 * 256;
+		params.dst.image.format	= VK_FORMAT_R8G8B8A8_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		const VkBufferImageCopy			bufferImageCopy	=
+		{
+			0u,						// VkDeviceSize				bufferOffset;
+			0u,						// uint32_t					bufferRowLength;
+			0u,						// uint32_t					bufferImageHeight;
+			defaultSourceLayer,		// VkImageSubresourceLayers	imageSubresource;
+			{0, 0, 0},				// VkOffset3D				imageOffset;
+			{16, 16, 1}				// VkExtent3D				imageExtent;
+		};
+		CopyRegion copyRegion;
+		copyRegion.bufferImageCopy = bufferImageCopy;
+
+		params.regions.push_back(copyRegion);
+
+		copiesAndBlittingTests->addChild(new CopyBufferToImageTestCase(testCtx, "buffer_to_image", description.str(), params));
+	}
+
+	{
+		std::ostringstream	description;
+		description << "Copy from buffer to buffer: whole buffer.";
+
+		TestParams params;
+		params.src.buffer.size = 256;
+		params.dst.buffer.size = 256;
+		const VkBufferCopy bufferCopy = {
+			0u,		// VkDeviceSize	srcOffset;
+			0u,		// VkDeviceSize	dstOffset;
+			256u,	// VkDeviceSize	size;
+		};
+		CopyRegion copyRegion;
+		copyRegion.bufferCopy = bufferCopy;
+
+		params.regions.push_back(copyRegion);
+
+		copiesAndBlittingTests->addChild(new BufferToBufferTestCase(testCtx, "buffer_to_buffer_whole", description.str(), params));
+	}
+
+	{
+		std::ostringstream	description;
+		description << "Copy from buffer to buffer: small area.";
+
+		TestParams params;
+		params.src.buffer.size = 16;
+		params.dst.buffer.size = 16;
+		const VkBufferCopy bufferCopy = {
+			12u,	// VkDeviceSize	srcOffset;
+			4u,		// VkDeviceSize	dstOffset;
+			1u,		// VkDeviceSize	size;
+		};
+		CopyRegion copyRegion;
+		copyRegion.bufferCopy = bufferCopy;
+
+		params.regions.push_back(copyRegion);
+
+		copiesAndBlittingTests->addChild(new BufferToBufferTestCase(testCtx, "buffer_to_buffer_small", description.str(), params));
+	}
+
+	{
+		std::ostringstream	description;
+		description << "Copy from buffer to buffer: more regions.";
+
+		const deUint32 size = 16;
+
+		TestParams params;
+		params.src.buffer.size = size;
+		params.dst.buffer.size = size * (size + 1);
+
+		// Copy region with size 0..size
+		for (unsigned int i = 0; i <= size; i++)
+		{
+			const VkBufferCopy bufferCopy = {
+				0,		// VkDeviceSize	srcOffset;
+				i*size,	// VkDeviceSize	dstOffset;
+				i,		// VkDeviceSize	size;
+			};
+			CopyRegion copyRegion;
+			copyRegion.bufferCopy = bufferCopy;
+			params.regions.push_back(copyRegion);
+		}
+		copiesAndBlittingTests->addChild(new BufferToBufferTestCase(testCtx, "buffer_to_buffer_regions", description.str(), params));
+	}
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image depth";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_D32_SFLOAT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_D32_SFLOAT;
+		params.dst.image.extent	= defaultExtent;
+
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_DEPTH_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,							// uint32_t				mipLevel;
+				0u,							// uint32_t				baseArrayLayer;
+				1u							// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,	// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},		// VkOffset3D				srcOffset;
+				sourceLayer,	// VkImageSubresourceLayers	dstSubresource;
+				{64, 98, 0},	// VkOffset3D				dstOffset;
+				{16, 16, 1},	// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "image_to_image_depth", description.str(), params));
+	}
+
+	{
+		std::ostringstream description;
+		description << "Copy from image to image stencil";
+
+		TestParams	params;
+		params.src.image.format	= VK_FORMAT_S8_UINT;
+		params.src.image.extent	= defaultExtent;
+		params.dst.image.format	= VK_FORMAT_S8_UINT;
+		params.dst.image.extent	= defaultExtent;
+
+		{
+			const VkImageSubresourceLayers sourceLayer =
+			{
+				VK_IMAGE_ASPECT_STENCIL_BIT,	// VkImageAspectFlags	aspectMask;
+				0u,								// uint32_t				mipLevel;
+				0u,								// uint32_t				baseArrayLayer;
+				1u								// uint32_t				layerCount;
+			};
+			const VkImageCopy testCopy =
+			{
+				sourceLayer,	// VkImageSubresourceLayers	srcSubresource;
+				{0, 0, 0},		// VkOffset3D				srcOffset;
+				sourceLayer,	// VkImageSubresourceLayers	dstSubresource;
+				{64, 98, 0},	// VkOffset3D				dstOffset;
+				{16, 16, 1},	// VkExtent3D				extent;
+			};
+
+			CopyRegion imageCopy;
+			imageCopy.imageCopy = testCopy;
+
+			params.regions.push_back(imageCopy);
+		}
+
+		copiesAndBlittingTests->addChild(new CopyImageToImageTestCase(testCtx, "image_to_image_stencil", description.str(), params));
+	}
+
+	return copiesAndBlittingTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.hpp
new file mode 100644
index 0000000..c92ff24
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiCopiesAndBlittingTests.hpp
@@ -0,0 +1,52 @@
+#ifndef _VKTAPICOPIESANDBLITTINGTESTS_HPP
+#define _VKTAPICOPIESANDBLITTINGTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Copies And Blitting Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+namespace api
+{
+
+tcu::TestCaseGroup*		createCopiesAndBlittingTests	(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPICOPIESANDBLITTINGTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.cpp
new file mode 100644
index 0000000..0421ff6
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.cpp
@@ -0,0 +1,599 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Device Initialization Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiDeviceInitializationTests.hpp"
+#include "vktTestCaseUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkPlatform.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkApiVersion.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResultCollector.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+
+#include <vector>
+
+namespace vkt
+{
+namespace api
+{
+
+namespace
+{
+
+using namespace vk;
+using namespace std;
+using std::vector;
+using tcu::TestLog;
+
+tcu::TestStatus createInstanceTest (Context& context)
+{
+	tcu::TestLog&				log						= context.getTestContext().getLog();
+	tcu::ResultCollector		resultCollector			(log);
+	const char*					appNames[]				= { "appName", DE_NULL, "",  "app, name", "app(\"name\"", "app~!@#$%^&*()_+name", "app\nName", "app\r\nName" };
+	const char*					engineNames[]			= { "engineName", DE_NULL, "",  "engine. name", "engine\"(name)", "eng~!@#$%^&*()_+name", "engine\nName", "engine\r\nName" };
+	const deUint32				appVersions[]			= { 0, 1, (deUint32)-1 };
+	const deUint32				engineVersions[]		= { 0, 1, (deUint32)-1 };
+	const PlatformInterface&	platformInterface		= context.getPlatformInterface();
+	vector<VkApplicationInfo>	appInfos;
+
+	// test over appName
+	for (int appNameNdx = 0; appNameNdx < DE_LENGTH_OF_ARRAY(appNames); appNameNdx++)
+	{
+		const VkApplicationInfo appInfo =
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,		// VkStructureType				sType;
+			DE_NULL,								// const void*					pNext;
+			appNames[appNameNdx],					// const char*					pAppName;
+			0u,										// deUint32						appVersion;
+			"engineName",							// const char*					pEngineName;
+			0u,										// deUint32						engineVersion;
+			VK_API_VERSION,							// deUint32						apiVersion;
+		};
+
+		appInfos.push_back(appInfo);
+	}
+
+	// test over engineName
+	for (int engineNameNdx = 0; engineNameNdx < DE_LENGTH_OF_ARRAY(engineNames); engineNameNdx++)
+	{
+		const VkApplicationInfo appInfo =
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,		// VkStructureType				sType;
+			DE_NULL,								// const void*					pNext;
+			"appName",								// const char*					pAppName;
+			0u,										// deUint32						appVersion;
+			engineNames[engineNameNdx],				// const char*					pEngineName;
+			0u,										// deUint32						engineVersion;
+			VK_API_VERSION,							// deUint32						apiVersion;
+		};
+
+		appInfos.push_back(appInfo);
+	}
+
+	// test over appVersion
+	for (int appVersionNdx = 0; appVersionNdx < DE_LENGTH_OF_ARRAY(appVersions); appVersionNdx++)
+	{
+		const VkApplicationInfo appInfo =
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,		// VkStructureType				sType;
+			DE_NULL,								// const void*					pNext;
+			"appName",								// const char*					pAppName;
+			appVersions[appVersionNdx],				// deUint32						appVersion;
+			"engineName",							// const char*					pEngineName;
+			0u,										// deUint32						engineVersion;
+			VK_API_VERSION,							// deUint32						apiVersion;
+		};
+
+		appInfos.push_back(appInfo);
+	}
+
+	// test over engineVersion
+	for (int engineVersionNdx = 0; engineVersionNdx < DE_LENGTH_OF_ARRAY(engineVersions); engineVersionNdx++)
+	{
+		const VkApplicationInfo appInfo =
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,		// VkStructureType				sType;
+			DE_NULL,								// const void*					pNext;
+			"appName",								// const char*					pAppName;
+			0u,										// deUint32						appVersion;
+			"engineName",							// const char*					pEngineName;
+			engineVersions[engineVersionNdx],		// deUint32						engineVersion;
+			VK_API_VERSION,							// deUint32						apiVersion;
+		};
+
+		appInfos.push_back(appInfo);
+	}
+
+	// run the tests!
+	for (size_t appInfoNdx = 0; appInfoNdx < appInfos.size(); ++appInfoNdx)
+	{
+		const VkApplicationInfo&		appInfo					= appInfos[appInfoNdx];
+		const VkInstanceCreateInfo		instanceCreateInfo		=
+		{
+			VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,	// VkStructureType				sType;
+			DE_NULL,								// const void*					pNext;
+			(VkInstanceCreateFlags)0u,				// VkInstanceCreateFlags		flags;
+			&appInfo,								// const VkApplicationInfo*		pAppInfo;
+			0u,										// deUint32						layerCount;
+			DE_NULL,								// const char*const*			ppEnabledLayernames;
+			0u,										// deUint32						extensionCount;
+			DE_NULL,								// const char*const*			ppEnabledExtensionNames;
+		};
+
+		log << TestLog::Message << "Creating instance with appInfo: " << appInfo << TestLog::EndMessage;
+
+		try
+		{
+			const Unique<VkInstance> instance(createInstance(platformInterface, &instanceCreateInfo));
+			log << TestLog::Message << "Succeeded" << TestLog::EndMessage;
+		}
+		catch (const vk::Error& err)
+		{
+			resultCollector.fail("Failed, Error code: " + de::toString(err.getMessage()));
+		}
+	}
+
+	return tcu::TestStatus(resultCollector.getResult(), resultCollector.getMessage());
+}
+
+tcu::TestStatus createInstanceWithInvalidApiVersionTest (Context& context)
+{
+	tcu::TestLog&				log					= context.getTestContext().getLog();
+	tcu::ResultCollector		resultCollector		(log);
+	const PlatformInterface&	platformInterface	= context.getPlatformInterface();
+	const ApiVersion			apiVersion			= unpackVersion(VK_API_VERSION);
+	const deUint32				invalidMajorVersion	= (1 << 10) - 1;
+	const deUint32				invalidMinorVersion	= (1 << 10) - 1;
+	const deUint32				invalidPatchNum		= (1 << 12) - 1;
+	vector<ApiVersion>			invalidApiVersions;
+
+	invalidApiVersions.push_back(ApiVersion(invalidMajorVersion, apiVersion.minorNum, apiVersion.patchNum));
+	invalidApiVersions.push_back(ApiVersion(apiVersion.majorNum, invalidMinorVersion, apiVersion.patchNum));
+	invalidApiVersions.push_back(ApiVersion(apiVersion.majorNum, apiVersion.minorNum, invalidPatchNum));
+
+	for (size_t apiVersionNdx = 0; apiVersionNdx < invalidApiVersions.size(); apiVersionNdx++)
+	{
+		const VkApplicationInfo appInfo					=
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,			// VkStructureType				sType;
+			DE_NULL,									// const void*					pNext;
+			"appName",									// const char*					pAppName;
+			0u,											// deUint32						appVersion;
+			"engineName",								// const char*					pEngineName;
+			0u,											// deUint32						engineVersion;
+			pack(invalidApiVersions[apiVersionNdx]),	// deUint32						apiVersion;
+		};
+		const VkInstanceCreateInfo instanceCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,									// const void*					pNext;
+			(VkInstanceCreateFlags)0u,					// VkInstanceCreateFlags		flags;
+			&appInfo,									// const VkApplicationInfo*		pAppInfo;
+			0u,											// deUint32						layerCount;
+			DE_NULL,									// const char*const*			ppEnabledLayernames;
+			0u,											// deUint32						extensionCount;
+			DE_NULL,									// const char*const*			ppEnabledExtensionNames;
+		};
+
+
+		log << TestLog::Message
+			<<"VK_API_VERSION defined in vulkan.h: " << apiVersion
+			<< ", api version used to create instance: " << invalidApiVersions[apiVersionNdx]
+			<< TestLog::EndMessage;
+
+		{
+			VkInstance		instance	= (VkInstance)0;
+			const VkResult	result		= platformInterface.createInstance(&instanceCreateInfo, DE_NULL/*pAllocator*/, &instance);
+			const bool		gotInstance	= !!instance;
+
+			if (instance)
+			{
+				const InstanceDriver	instanceIface	(platformInterface, instance);
+				instanceIface.destroyInstance(instance, DE_NULL/*pAllocator*/);
+			}
+
+			if (result == VK_ERROR_INCOMPATIBLE_DRIVER)
+			{
+				TCU_CHECK(!gotInstance);
+				log << TestLog::Message << "Pass, instance creation with invalid apiVersion is rejected" << TestLog::EndMessage;
+			}
+			else
+				resultCollector.fail("Fail, instance creation with invalid apiVersion is not rejected");
+		}
+	}
+
+	return tcu::TestStatus(resultCollector.getResult(), resultCollector.getMessage());
+}
+
+tcu::TestStatus createInstanceWithUnsupportedExtensionsTest (Context& context)
+{
+	tcu::TestLog&						log						= context.getTestContext().getLog();
+	const PlatformInterface&			platformInterface		= context.getPlatformInterface();
+	const char*							enabledExtensions[]		= {"VK_UNSUPPORTED_EXTENSION", "THIS_IS_NOT_AN_EXTENSION"};
+	const VkApplicationInfo				appInfo					=
+	{
+		VK_STRUCTURE_TYPE_APPLICATION_INFO,						// VkStructureType				sType;
+		DE_NULL,												// const void*					pNext;
+		"appName",												// const char*					pAppName;
+		0u,														// deUint32						appVersion;
+		"engineName",											// const char*					pEngineName;
+		0u,														// deUint32						engineVersion;
+		VK_API_VERSION,											// deUint32						apiVersion;
+	};
+	const VkInstanceCreateInfo			instanceCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,					// VkStructureType				sType;
+		DE_NULL,												// const void*					pNext;
+		(VkInstanceCreateFlags)0u,								// VkInstanceCreateFlags		flags;
+		&appInfo,												// const VkApplicationInfo*		pAppInfo;
+		0u,														// deUint32						layerCount;
+		DE_NULL,												// const char*const*			ppEnabledLayernames;
+		DE_LENGTH_OF_ARRAY(enabledExtensions),					// deUint32						extensionCount;
+		enabledExtensions,										// const char*const*			ppEnabledExtensionNames;
+	};
+
+	log << TestLog::Message << "Enabled extensions are: " << TestLog::EndMessage;
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(enabledExtensions); ndx++)
+		log << TestLog::Message << enabledExtensions[ndx] <<  TestLog::EndMessage;
+
+	{
+		VkInstance		instance	= (VkInstance)0;
+		const VkResult	result		= platformInterface.createInstance(&instanceCreateInfo, DE_NULL/*pAllocator*/, &instance);
+		const bool		gotInstance	= !!instance;
+
+		if (instance)
+		{
+			const InstanceDriver	instanceIface	(platformInterface, instance);
+			instanceIface.destroyInstance(instance, DE_NULL/*pAllocator*/);
+		}
+
+		if (result == VK_ERROR_EXTENSION_NOT_PRESENT)
+		{
+			TCU_CHECK(!gotInstance);
+			return tcu::TestStatus::pass("Pass, creating instance with unsupported extension was rejected.");
+		}
+		else
+			return tcu::TestStatus::fail("Fail, creating instance with unsupported extensions succeeded.");
+	}
+}
+
+tcu::TestStatus createDeviceTest (Context& context)
+{
+	const PlatformInterface&		platformInterface		= context.getPlatformInterface();
+	const Unique<VkInstance>		instance				(createDefaultInstance(platformInterface));
+	const InstanceDriver			instanceDriver			(platformInterface, instance.get());
+	const VkPhysicalDevice			physicalDevice			= chooseDevice(instanceDriver, instance.get(), context.getTestContext().getCommandLine());
+	const deUint32					queueFamilyIndex		= 0;
+	const deUint32					queueCount				= 1;
+	const deUint32					queueIndex				= 0;
+	const float						queuePriority			= 1.0f;
+	const VkDeviceQueueCreateInfo	deviceQueueCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+		DE_NULL,
+		(VkDeviceQueueCreateFlags)0u,
+		queueFamilyIndex,						//queueFamilyIndex;
+		queueCount,								//queueCount;
+		&queuePriority,							//pQueuePriorities;
+	};
+	const VkDeviceCreateInfo		deviceCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,	//sType;
+		DE_NULL,								//pNext;
+		(VkDeviceCreateFlags)0u,
+		1,										//queueRecordCount;
+		&deviceQueueCreateInfo,					//pRequestedQueues;
+		0,										//layerCount;
+		DE_NULL,								//ppEnabledLayerNames;
+		0,										//extensionCount;
+		DE_NULL,								//ppEnabledExtensionNames;
+		DE_NULL,								//pEnabledFeatures;
+	};
+
+	const Unique<VkDevice>			device					(createDevice(instanceDriver, physicalDevice, &deviceCreateInfo));
+	const DeviceDriver				deviceDriver			(instanceDriver, device.get());
+	VkQueue							queue;
+
+	deviceDriver.getDeviceQueue(device.get(), queueFamilyIndex, queueIndex, &queue);
+	VK_CHECK(deviceDriver.queueWaitIdle(queue));
+
+	return tcu::TestStatus::pass("Pass");
+}
+
+tcu::TestStatus createMultipleDevicesTest (Context& context)
+{
+	tcu::TestLog&										log						= context.getTestContext().getLog();
+	tcu::ResultCollector								resultCollector			(log);
+	const int											numDevices				= 5;
+	const PlatformInterface&							platformInterface		= context.getPlatformInterface();
+	const Unique<VkInstance>							instance				(createDefaultInstance(platformInterface));
+	const InstanceDriver								instanceDriver			(platformInterface, instance.get());
+	const VkPhysicalDevice								physicalDevice			= chooseDevice(instanceDriver, instance.get(), context.getTestContext().getCommandLine());
+	const deUint32										queueFamilyIndex		= 0;
+	const deUint32										queueCount				= 1;
+	const deUint32										queueIndex				= 0;
+	const float											queuePriority			= 1.0f;
+	const VkDeviceQueueCreateInfo						deviceQueueCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+		DE_NULL,
+		(VkDeviceQueueCreateFlags)0u,					//flags;
+		queueFamilyIndex,								//queueFamilyIndex;
+		queueCount,										//queueCount;
+		&queuePriority,									//pQueuePriorities;
+	};
+	const VkDeviceCreateInfo							deviceCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,			//sType;
+		DE_NULL,										//pNext;
+		(VkDeviceCreateFlags)0u,
+		1,												//queueRecordCount;
+		&deviceQueueCreateInfo,							//pRequestedQueues;
+		0,												//layerCount;
+		DE_NULL,										//ppEnabledLayerNames;
+		0,												//extensionCount;
+		DE_NULL,										//ppEnabledExtensionNames;
+		DE_NULL,										//pEnabledFeatures;
+	};
+	vector<VkDevice>									devices(numDevices, (VkDevice)DE_NULL);
+
+	try
+	{
+		for (int deviceNdx = 0; deviceNdx < numDevices; deviceNdx++)
+		{
+			const VkResult result = instanceDriver.createDevice(physicalDevice, &deviceCreateInfo, DE_NULL/*pAllocator*/, &devices[deviceNdx]);
+
+			if (result != VK_SUCCESS)
+			{
+				resultCollector.fail("Failed to create Device No." + de::toString(deviceNdx) + ", Error Code: " + de::toString(result));
+				break;
+			}
+
+			{
+				const DeviceDriver	deviceDriver(instanceDriver, devices[deviceNdx]);
+				VkQueue				queue;
+
+				DE_ASSERT(queueIndex < queueCount);
+				deviceDriver.getDeviceQueue(devices[deviceNdx], queueFamilyIndex, queueIndex, &queue);
+				VK_CHECK(deviceDriver.queueWaitIdle(queue));
+			}
+		}
+	}
+	catch (const vk::Error& error)
+	{
+		resultCollector.fail(de::toString(error.getError()));
+	}
+	catch (...)
+	{
+		for (int deviceNdx = (int)devices.size()-1; deviceNdx >= 0; deviceNdx--)
+		{
+			if (devices[deviceNdx] != (VkDevice)DE_NULL)
+			{
+				DeviceDriver deviceDriver(instanceDriver, devices[deviceNdx]);
+				deviceDriver.destroyDevice(devices[deviceNdx], DE_NULL/*pAllocator*/);
+			}
+		}
+
+		throw;
+	}
+
+	for (int deviceNdx = (int)devices.size()-1; deviceNdx >= 0; deviceNdx--)
+	{
+		if (devices[deviceNdx] != (VkDevice)DE_NULL)
+		{
+			DeviceDriver deviceDriver(instanceDriver, devices[deviceNdx]);
+			deviceDriver.destroyDevice(devices[deviceNdx], DE_NULL/*pAllocator*/);
+		}
+	}
+
+	return tcu::TestStatus(resultCollector.getResult(), resultCollector.getMessage());
+}
+
+tcu::TestStatus createDeviceWithUnsupportedExtensionsTest (Context& context)
+{
+	tcu::TestLog&					log						= context.getTestContext().getLog();
+	const PlatformInterface&		platformInterface		= context.getPlatformInterface();
+	const Unique<VkInstance>		instance				(createDefaultInstance(platformInterface));
+	const InstanceDriver			instanceDriver			(platformInterface, instance.get());
+	const char*						enabledExtensions[]		= {"VK_UNSUPPORTED_EXTENSION", "THIS_IS_NOT_AN_EXTENSION", "VK_DONT_SUPPORT_ME"};
+	const VkPhysicalDevice			physicalDevice			= chooseDevice(instanceDriver, instance.get(), context.getTestContext().getCommandLine());
+	const float						queuePriority			= 1.0f;
+	const VkDeviceQueueCreateInfo	deviceQueueCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+		DE_NULL,
+		(VkDeviceQueueCreateFlags)0u,
+		0,										//queueFamiliIndex;
+		1,										//queueCount;
+		&queuePriority,							//pQueuePriorities;
+	};
+	const VkDeviceCreateInfo		deviceCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,	//sType;
+		DE_NULL,								//pNext;
+		(VkDeviceCreateFlags)0u,
+		1,										//queueRecordCount;
+		&deviceQueueCreateInfo,					//pRequestedQueues;
+		0,										//layerCount;
+		DE_NULL,								//ppEnabledLayerNames;
+		DE_LENGTH_OF_ARRAY(enabledExtensions),	//extensionCount;
+		enabledExtensions,						//ppEnabledExtensionNames;
+		DE_NULL,								//pEnabledFeatures;
+	};
+
+	log << TestLog::Message << "Enabled extensions are: " << TestLog::EndMessage;
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(enabledExtensions); ndx++)
+		log << TestLog::Message << enabledExtensions[ndx] <<  TestLog::EndMessage;
+
+	{
+		VkDevice		device		= (VkDevice)0;
+		const VkResult	result		= instanceDriver.createDevice(physicalDevice, &deviceCreateInfo, DE_NULL/*pAllocator*/, &device);
+		const bool		gotDevice	= !!device;
+
+		if (device)
+		{
+			const DeviceDriver	deviceIface	(instanceDriver, device);
+			deviceIface.destroyDevice(device, DE_NULL/*pAllocator*/);
+		}
+
+		if (result == VK_ERROR_EXTENSION_NOT_PRESENT)
+		{
+			TCU_CHECK(!gotDevice);
+			return tcu::TestStatus::pass("Pass, create device with unsupported extension is rejected.");
+		}
+		else
+			return tcu::TestStatus::fail("Fail, create device with unsupported extension but succeed.");
+	}
+}
+
+deUint32 getGlobalMaxQueueCount(const vector<VkQueueFamilyProperties>& queueFamilyProperties)
+{
+	deUint32 maxQueueCount = 0;
+
+	for (deUint32 queueFamilyNdx = 0; queueFamilyNdx < (deUint32)queueFamilyProperties.size(); queueFamilyNdx++)
+	{
+		maxQueueCount = de::max(maxQueueCount, queueFamilyProperties[queueFamilyNdx].queueCount);
+	}
+
+	return maxQueueCount;
+}
+
+tcu::TestStatus createDeviceWithVariousQueueCountsTest (Context& context)
+{
+	tcu::TestLog&							log						= context.getTestContext().getLog();
+	const int								queueCountDiff			= 1;
+	const PlatformInterface&				platformInterface		= context.getPlatformInterface();
+	const Unique<VkInstance>				instance				(createDefaultInstance(platformInterface));
+	const InstanceDriver					instanceDriver			(platformInterface, instance.get());
+	const VkPhysicalDevice					physicalDevice			= chooseDevice(instanceDriver, instance.get(), context.getTestContext().getCommandLine());
+	const vector<VkQueueFamilyProperties>	queueFamilyProperties	= getPhysicalDeviceQueueFamilyProperties(instanceDriver, physicalDevice);
+	const vector<float>						queuePriorities			(getGlobalMaxQueueCount(queueFamilyProperties), 1.0f);
+	vector<VkDeviceQueueCreateInfo>			deviceQueueCreateInfos;
+
+	for (deUint32 queueFamilyNdx = 0; queueFamilyNdx < (deUint32)queueFamilyProperties.size(); queueFamilyNdx++)
+	{
+		const deUint32 maxQueueCount = queueFamilyProperties[queueFamilyNdx].queueCount;
+
+		for (deUint32 queueCount = 1; queueCount <= maxQueueCount; queueCount += queueCountDiff)
+		{
+			const VkDeviceQueueCreateInfo queueCreateInfo =
+			{
+				VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+				DE_NULL,
+				(VkDeviceQueueCreateFlags)0u,
+				queueFamilyNdx,
+				queueCount,
+				queuePriorities.data()
+			};
+
+			deviceQueueCreateInfos.push_back(queueCreateInfo);
+		}
+	}
+
+	for (size_t testNdx = 0; testNdx < deviceQueueCreateInfos.size(); testNdx++)
+	{
+		const VkDeviceQueueCreateInfo&	queueCreateInfo		= deviceQueueCreateInfos[testNdx];
+		const VkDeviceCreateInfo		deviceCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,	//sType;
+			DE_NULL,								//pNext;
+			(VkDeviceCreateFlags)0u,
+			1,										//queueRecordCount;
+			&queueCreateInfo,						//pRequestedQueues;
+			0,										//layerCount;
+			DE_NULL,								//ppEnabledLayerNames;
+			0,										//extensionCount;
+			DE_NULL,								//ppEnabledExtensionNames;
+			DE_NULL,								//pEnabledFeatures;
+		};
+		const Unique<VkDevice>			device				(createDevice(instanceDriver, physicalDevice, &deviceCreateInfo));
+		const DeviceDriver				deviceDriver		(instanceDriver, device.get());
+		const deUint32					queueFamilyIndex	= deviceCreateInfo.pQueueCreateInfos->queueFamilyIndex;
+		const deUint32					queueCount			= deviceCreateInfo.pQueueCreateInfos->queueCount;
+
+		for (deUint32 queueIndex = 0; queueIndex < queueCount; queueIndex++)
+		{
+			VkQueue		queue;
+			VkResult	result;
+
+			deviceDriver.getDeviceQueue(device.get(), queueFamilyIndex, queueIndex, &queue);
+			TCU_CHECK(!!queue);
+
+			result = deviceDriver.queueWaitIdle(queue);
+			if (result != VK_SUCCESS)
+			{
+				log << TestLog::Message
+					<< "vkQueueWaitIdle failed"
+					<< ",  queueIndex = " << queueIndex
+					<< ", queueCreateInfo " << queueCreateInfo
+					<< ", Error Code: " << result
+					<< TestLog::EndMessage;
+				return tcu::TestStatus::fail("Fail");
+			}
+		}
+	}
+	return tcu::TestStatus::pass("Pass");
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createDeviceInitializationTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	deviceInitializationTests (new tcu::TestCaseGroup(testCtx, "device_init", "Device Initialization Tests"));
+
+	addFunctionCase(deviceInitializationTests.get(), "create_instance_name_version",			"", createInstanceTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_instance_invalid_api_version",		"", createInstanceWithInvalidApiVersionTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_instance_unsupported_extensions",	"", createInstanceWithUnsupportedExtensionsTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_device",							"", createDeviceTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_multiple_devices",					"", createMultipleDevicesTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_device_unsupported_extensions",	"", createDeviceWithUnsupportedExtensionsTest);
+	addFunctionCase(deviceInitializationTests.get(), "create_device_various_queue_counts",		"", createDeviceWithVariousQueueCountsTest);
+
+	return deviceInitializationTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.hpp
new file mode 100644
index 0000000..c4f459e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiDeviceInitializationTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTAPIDEVICEINITIALIZATIONTESTS_HPP
+#define _VKTAPIDEVICEINITIALIZATIONTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Device Initialization tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup*		createDeviceInitializationTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIDEVICEINITIALIZATIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.cpp b/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.cpp
new file mode 100644
index 0000000..9d92653
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.cpp
@@ -0,0 +1,1323 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Api Feature Query tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiFeatureInfo.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktTestGroupUtil.hpp"
+
+#include "vkPlatform.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRef.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkQueryUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deSTLUtil.hpp"
+#include "deMemory.h"
+#include "deMath.h"
+
+namespace vkt
+{
+namespace api
+{
+namespace
+{
+
+using namespace vk;
+using std::vector;
+using std::string;
+using tcu::TestLog;
+using tcu::ScopedLogSection;
+
+enum
+{
+	GUARD_SIZE	= 0x20,			//!< Number of bytes to check
+	GUARD_VALUE	= 0xcd,			//!< Data pattern
+};
+
+enum LimitFormat
+{
+	LIMIT_FORMAT_SIGNED_INT,
+	LIMIT_FORMAT_UNSIGNED_INT,
+	LIMIT_FORMAT_FLOAT,
+	LIMIT_FORMAT_DEVICE_SIZE,
+
+	LIMIT_FORMAT_LAST
+};
+
+enum LimitType
+{
+	LIMIT_TYPE_MIN,
+	LIMIT_TYPE_MAX,
+
+	LIMIT_TYPE_LAST
+};
+
+#define LIMIT(_X_)		DE_OFFSET_OF(VkPhysicalDeviceLimits, _X_),(char*)(#_X_)
+#define FEATURE(_X_)	DE_OFFSET_OF(VkPhysicalDeviceFeatures, _X_)
+
+bool validateFeatureLimits(VkPhysicalDeviceProperties* properties, VkPhysicalDeviceFeatures* features, TestLog& log)
+{
+	bool					limitsOk	= true;
+	VkPhysicalDeviceLimits* limits		= &properties->limits;
+	struct FeatureLimitTable
+	{
+		deUint32		offset;
+		char*			name;
+		deUint32		uintVal;			//!< Format is UNSIGNED_INT
+		deInt32			intVal;				//!< Format is SIGNED_INT
+		deUint64		deviceSizeVal;		//!< Format is DEVICE_SIZE
+		float			floatVal;			//!< Format is FLOAT
+		LimitFormat		format;
+		LimitType		type;
+		deInt32			unsuppTableNdx;
+	} featureLimitTable[] =   //!< From gitlab.khronos.org/vulkan/vulkan.git:doc/specs/vulkan/chapters/features.txt@63b23f3bb3ecd211cd6e448e2001ce1088dacd35
+	{
+		{ LIMIT(maxImageDimension1D),								4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxImageDimension2D),								4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxImageDimension3D),								256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxImageDimensionCube),								4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxImageArrayLayers),								256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN   , -1 },
+		{ LIMIT(maxTexelBufferElements),							65536, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxUniformBufferRange),								16384, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxPushConstantsSize),								128, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxMemoryAllocationCount),							4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(bufferImageGranularity),							0, 0, 131072, 0.0f, LIMIT_FORMAT_DEVICE_SIZE, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(sparseAddressSpaceSize),							0, 0, 2UL*1024*1024*1024, 0.0f, LIMIT_FORMAT_DEVICE_SIZE, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxBoundDescriptorSets),							4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxPerStageDescriptorSamplers),						16, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxPerStageDescriptorUniformBuffers),				12, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxPerStageDescriptorStorageBuffers),				4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxPerStageDescriptorSampledImages),				16, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxPerStageDescriptorStorageImages),				4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxPerStageDescriptorInputAttachments),				4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxDescriptorSetSamplers),							96, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxDescriptorSetUniformBuffers),					72, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxDescriptorSetUniformBuffersDynamic),				8, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxDescriptorSetStorageBuffers),					24, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxDescriptorSetStorageBuffersDynamic),				4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxDescriptorSetSampledImages),						96, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxDescriptorSetStorageImages),						24, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxVertexInputAttributes),							16, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxVertexInputBindings),							16, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxVertexInputAttributeOffset),						2047, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxVertexInputBindingStride),						2048, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxVertexOutputComponents),							64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationGenerationLevel),					64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationPatchSize),							32, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxTessellationControlPerVertexInputComponents),	64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationControlPerVertexOutputComponents),	64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationControlPerPatchOutputComponents),	120, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationControlTotalOutputComponents),		2048, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationEvaluationInputComponents),			64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxTessellationEvaluationOutputComponents),			64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxGeometryShaderInvocations),						32, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxGeometryInputComponents),						64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxGeometryOutputComponents),						64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxGeometryOutputVertices),							256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxGeometryTotalOutputComponents),					1024, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxFragmentInputComponents),						64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxFragmentOutputAttachments),						4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxFragmentDualSrcAttachments),						1, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxFragmentCombinedOutputResources),				4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN  , -1 },
+		{ LIMIT(maxComputeSharedMemorySize),						16384, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN   , -1 },
+		{ LIMIT(maxComputeWorkGroupCount[0]),						65535, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN   , -1 },
+		{ LIMIT(maxComputeWorkGroupCount[1]),						65535, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN   , -1 },
+		{ LIMIT(maxComputeWorkGroupCount[2]),						65535,  0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN   , -1 },
+		{ LIMIT(maxComputeWorkGroupInvocations),					128, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(maxComputeWorkGroupSize[0]),						128, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(maxComputeWorkGroupSize[1]),						128, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(maxComputeWorkGroupSize[2]),						64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(subPixelPrecisionBits),								4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(subTexelPrecisionBits),								4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(mipmapPrecisionBits),								4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(maxDrawIndexedIndexValue),							(deUint32)~0, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxDrawIndirectCount),								65535, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN    , -1 },
+		{ LIMIT(maxSamplerLodBias),									0, 0, 0, 2.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxSamplerAnisotropy),								0, 0, 0, 16.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxViewports),										16, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxViewportDimensions[0]),							4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(maxViewportDimensions[1]),							4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN , -1 },
+		{ LIMIT(viewportBoundsRange[0]),							0, 0, 0, -8192.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(viewportBoundsRange[1]),							0, 0, 0, 8191.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(viewportSubPixelBits),								0, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(minMemoryMapAlignment),								64, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(minTexelBufferOffsetAlignment),						256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(minUniformBufferOffsetAlignment),					256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(minStorageBufferOffsetAlignment),					256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(minTexelOffset),									0, -8, 0, 0.0f, LIMIT_FORMAT_SIGNED_INT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(maxTexelOffset),									7, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(minTexelGatherOffset),								0, -8, 0, 0.0f, LIMIT_FORMAT_SIGNED_INT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(maxTexelGatherOffset),								7, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(minInterpolationOffset),							0, 0, 0, -0.5f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(maxInterpolationOffset),							0, 0, 0, 0.5f - (1.0f/deFloatPow(2.0f, (float)limits->subPixelInterpolationOffsetBits)), LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(subPixelInterpolationOffsetBits),					4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxFramebufferWidth),								4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxFramebufferHeight),								4096, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxFramebufferLayers),								256, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxColorAttachments),								4, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxSampleMaskWords),								1, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxClipDistances),									8, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxCullDistances),									8, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(maxCombinedClipAndCullDistances),					8, 0, 0, 0.0f, LIMIT_FORMAT_UNSIGNED_INT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(pointSizeRange[0]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(pointSizeRange[1]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(pointSizeRange[0]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(pointSizeRange[1]),									0, 0, 0, 64.0f - limits->pointSizeGranularity , LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(lineWidthRange[0]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(lineWidthRange[1]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(lineWidthRange[0]),									0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(lineWidthRange[1]),									0, 0, 0, 8.0f - limits->lineWidthGranularity, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MIN, -1 },
+		{ LIMIT(pointSizeGranularity),								0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(lineWidthGranularity),								0, 0, 0, 1.0f, LIMIT_FORMAT_FLOAT, LIMIT_TYPE_MAX, -1 },
+		{ LIMIT(nonCoherentAtomSize),								0, 0, 128, 0.0f, LIMIT_FORMAT_DEVICE_SIZE, LIMIT_TYPE_MAX, -1 },
+	};
+
+	struct UnsupportedFeatureLimitTable
+	{
+		deUint32		limitOffset;
+		char*			name;
+		deUint32		featureOffset;
+		deUint32		uintVal;			//!< Format is UNSIGNED_INT
+		deInt32			intVal;				//!< Format is SIGNED_INT
+		deUint64		deviceSizeVal;		//!< Format is DEVICE_SIZE
+		float			floatVal;			//!< Format is FLOAT
+	} unsupportedFeatureTable[] =
+	{
+		{ LIMIT(sparseAddressSpaceSize),							FEATURE(sparseBinding),					0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationGenerationLevel),					FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationPatchSize),							FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationControlPerVertexInputComponents),	FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationControlPerVertexOutputComponents),	FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationControlPerPatchOutputComponents),	FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationControlTotalOutputComponents),		FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationEvaluationInputComponents),			FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxTessellationEvaluationOutputComponents),			FEATURE(tessellationShader),			0, 0, 0, 0.0f },
+		{ LIMIT(maxGeometryShaderInvocations),						FEATURE(geometryShader),				0, 0, 0, 0.0f },
+		{ LIMIT(maxGeometryInputComponents),						FEATURE(geometryShader),				0, 0, 0, 0.0f },
+		{ LIMIT(maxGeometryOutputComponents),						FEATURE(geometryShader),				0, 0, 0, 0.0f },
+		{ LIMIT(maxGeometryOutputVertices),							FEATURE(geometryShader),				0, 0, 0, 0.0f },
+		{ LIMIT(maxGeometryTotalOutputComponents),					FEATURE(geometryShader),				0, 0, 0, 0.0f },
+		{ LIMIT(maxFragmentDualSrcAttachments),						FEATURE(dualSrcBlend),					0, 0, 0, 0.0f },
+		{ LIMIT(maxDrawIndexedIndexValue),							FEATURE(fullDrawIndexUint32),			(1<<24)-1, 0, 0, 0.0f },
+		{ LIMIT(maxDrawIndirectCount),								FEATURE(multiDrawIndirect),				1, 0, 0, 0.0f },
+		{ LIMIT(maxSamplerAnisotropy),								FEATURE(samplerAnisotropy),				1, 0, 0, 0.0f },
+		{ LIMIT(maxViewports),										FEATURE(multiViewport),					1, 0, 0, 0.0f },
+		{ LIMIT(minTexelGatherOffset),								FEATURE(shaderImageGatherExtended),		0, 0, 0, 0.0f },
+		{ LIMIT(maxTexelGatherOffset),								FEATURE(shaderImageGatherExtended),		0, 0, 0, 0.0f },
+		{ LIMIT(minInterpolationOffset),							FEATURE(sampleRateShading),				0, 0, 0, 0.0f },
+		{ LIMIT(maxInterpolationOffset),							FEATURE(sampleRateShading),				0, 0, 0, 0.0f },
+		{ LIMIT(subPixelInterpolationOffsetBits),					FEATURE(sampleRateShading),				0, 0, 0, 0.0f },
+		{ LIMIT(storageImageSampleCounts),							FEATURE(shaderStorageImageMultisample),	0, 0, 0, 0.0f },
+		{ LIMIT(maxClipDistances),									FEATURE(shaderClipDistance),			0, 0, 0, 0.0f },
+		{ LIMIT(maxCullDistances),									FEATURE(shaderClipDistance),			0, 0, 0, 0.0f },
+		{ LIMIT(maxCombinedClipAndCullDistances),					FEATURE(shaderClipDistance),			0, 0, 0, 0.0f },
+		{ LIMIT(pointSizeRange[0]),									FEATURE(largePoints),					0, 0, 0, 1.0f },
+		{ LIMIT(pointSizeRange[1]),									FEATURE(largePoints),					0, 0, 0, 1.0f },
+		{ LIMIT(lineWidthRange[0]),									FEATURE(wideLines),						0, 0, 0, 1.0f },
+		{ LIMIT(lineWidthRange[1]),									FEATURE(wideLines),						0, 0, 0, 1.0f },
+		{ LIMIT(pointSizeGranularity),								FEATURE(largePoints),					0, 0, 0, 0.0f },
+		{ LIMIT(lineWidthGranularity),								FEATURE(wideLines),						0, 0, 0, 0.0f }
+	};
+
+	log << TestLog::Message << *limits << TestLog::EndMessage;
+
+	//!< First build a map from limit to unsupported table index
+	for (deUint32 ndx = 0; ndx < DE_LENGTH_OF_ARRAY(featureLimitTable); ndx++)
+	{
+		for (deUint32 unsuppNdx = 0; unsuppNdx < DE_LENGTH_OF_ARRAY(unsupportedFeatureTable); unsuppNdx++)
+		{
+			if (unsupportedFeatureTable[unsuppNdx].limitOffset == featureLimitTable[ndx].offset)
+			{
+				featureLimitTable[ndx].unsuppTableNdx = unsuppNdx;
+				break;
+			}
+		}
+	}
+
+	for (deUint32 ndx = 0; ndx < DE_LENGTH_OF_ARRAY(featureLimitTable); ndx++)
+	{
+		switch (featureLimitTable[ndx].format)
+		{
+			case LIMIT_FORMAT_UNSIGNED_INT:
+			{
+				deUint32 limitToCheck = featureLimitTable[ndx].uintVal;
+				if (featureLimitTable[ndx].unsuppTableNdx != -1)
+				{
+					if (*((VkBool32*)((char*)features+unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].featureOffset)) == VK_FALSE)
+						limitToCheck = unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].uintVal;
+				}
+
+				if ( featureLimitTable[ndx].type == LIMIT_TYPE_MIN )
+				{
+
+					if (*((deUint32*)((char*)limits+featureLimitTable[ndx].offset)) < limitToCheck)
+					{
+						log << TestLog::Message << "limit Validation failed " << featureLimitTable[ndx].name
+							<< " not valid-limit type MIN - actual is "
+							<< *((deUint32*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				else
+				{
+					if (*((deUint32*)((char*)limits+featureLimitTable[ndx].offset)) > limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed,  " << featureLimitTable[ndx].name
+							<< " not valid-limit type MAX - actual is "
+							<< *((deUint32*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				break;
+			}
+
+			case LIMIT_FORMAT_FLOAT:
+			{
+				float limitToCheck = featureLimitTable[ndx].floatVal;
+				if (featureLimitTable[ndx].unsuppTableNdx != -1)
+				{
+					if (*((VkBool32*)((char*)features+unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].featureOffset)) == VK_FALSE)
+						limitToCheck = unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].floatVal;
+				}
+
+				if ( featureLimitTable[ndx].type == LIMIT_TYPE_MIN )
+				{
+					if (*((float*)((char*)limits+featureLimitTable[ndx].offset)) < limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MIN - actual is "
+							<< *((float*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				else
+				{
+					if (*((float*)((char*)limits+featureLimitTable[ndx].offset)) > limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MAX actual is "
+							<< *((float*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				break;
+			}
+
+			case LIMIT_FORMAT_SIGNED_INT:
+			{
+				deInt32 limitToCheck = featureLimitTable[ndx].intVal;
+				if (featureLimitTable[ndx].unsuppTableNdx != -1)
+				{
+					if (*((VkBool32*)((char*)features+unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].featureOffset)) == VK_FALSE)
+						limitToCheck = unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].intVal;
+				}
+				if (featureLimitTable[ndx].type == LIMIT_TYPE_MIN)
+				{
+					if (*((deInt32*)((char*)limits+featureLimitTable[ndx].offset)) < limitToCheck)
+					{
+						log << TestLog::Message <<  "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MIN actual is "
+							<< *((deInt32*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				else
+				{
+					if (*((deInt32*)((char*)limits+featureLimitTable[ndx].offset)) > limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MAX actual is "
+							<< *((deInt32*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				break;
+			}
+
+			case LIMIT_FORMAT_DEVICE_SIZE:
+			{
+				deUint64 limitToCheck = featureLimitTable[ndx].deviceSizeVal;
+				if (featureLimitTable[ndx].unsuppTableNdx != -1)
+				{
+					if (*((VkBool32*)((char*)features+unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].featureOffset)) == VK_FALSE)
+						limitToCheck = unsupportedFeatureTable[featureLimitTable[ndx].unsuppTableNdx].deviceSizeVal;
+				}
+
+				if ( featureLimitTable[ndx].type == LIMIT_TYPE_MIN )
+				{
+					if (*((deUint64*)((char*)limits+featureLimitTable[ndx].offset)) < limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MIN actual is "
+							<< *((deUint64*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				else
+				{
+					if (*((deUint64*)((char*)limits+featureLimitTable[ndx].offset)) > limitToCheck)
+					{
+						log << TestLog::Message << "limit validation failed, " << featureLimitTable[ndx].name
+							<< " not valid-limit type MAX actual is "
+							<< *((deUint64*)((char*)limits + featureLimitTable[ndx].offset)) << TestLog::EndMessage;
+						limitsOk = false;
+					}
+				}
+				break;
+			}
+
+			default:
+				DE_ASSERT(0);
+				limitsOk = false;
+		}
+	}
+
+	return limitsOk;
+}
+
+tcu::TestStatus enumeratePhysicalDevices (Context& context)
+{
+	TestLog&						log		= context.getTestContext().getLog();
+	const vector<VkPhysicalDevice>	devices	= enumeratePhysicalDevices(context.getInstanceInterface(), context.getInstance());
+
+	log << TestLog::Integer("NumDevices", "Number of devices", "", QP_KEY_TAG_NONE, deInt64(devices.size()));
+
+	for (size_t ndx = 0; ndx < devices.size(); ndx++)
+		log << TestLog::Message << ndx << ": " << devices[ndx] << TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Enumerating devices succeeded");
+}
+
+tcu::TestStatus enumerateInstanceLayers (Context& context)
+{
+	TestLog&						log			= context.getTestContext().getLog();
+	const vector<VkLayerProperties>	properties	= enumerateInstanceLayerProperties(context.getPlatformInterface());
+
+	for (size_t ndx = 0; ndx < properties.size(); ndx++)
+		log << TestLog::Message << ndx << ": " << properties[ndx] << TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Enumerating layers succeeded");
+}
+
+tcu::TestStatus enumerateInstanceExtensions (Context& context)
+{
+	TestLog&	log		= context.getTestContext().getLog();
+
+	{
+		const ScopedLogSection				section		(log, "Global", "Global Extensions");
+		const vector<VkExtensionProperties>	properties	= enumerateInstanceExtensionProperties(context.getPlatformInterface(), DE_NULL);
+
+		for (size_t ndx = 0; ndx < properties.size(); ndx++)
+			log << TestLog::Message << ndx << ": " << properties[ndx] << TestLog::EndMessage;
+	}
+
+	{
+		const vector<VkLayerProperties>	layers	= enumerateInstanceLayerProperties(context.getPlatformInterface());
+
+		for (vector<VkLayerProperties>::const_iterator layer = layers.begin(); layer != layers.end(); ++layer)
+		{
+			const ScopedLogSection				section		(log, layer->layerName, string("Layer: ") + layer->layerName);
+			const vector<VkExtensionProperties>	properties	= enumerateInstanceExtensionProperties(context.getPlatformInterface(), layer->layerName);
+
+			for (size_t extNdx = 0; extNdx < properties.size(); extNdx++)
+				log << TestLog::Message << extNdx << ": " << properties[extNdx] << TestLog::EndMessage;
+		}
+	}
+
+	return tcu::TestStatus::pass("Enumerating extensions succeeded");
+}
+
+tcu::TestStatus enumerateDeviceLayers (Context& context)
+{
+	TestLog&						log			= context.getTestContext().getLog();
+	const vector<VkLayerProperties>	properties	= vk::enumerateDeviceLayerProperties(context.getInstanceInterface(), context.getPhysicalDevice());
+
+	for (size_t ndx = 0; ndx < properties.size(); ndx++)
+		log << TestLog::Message << ndx << ": " << properties[ndx] << TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Enumerating layers succeeded");
+}
+
+tcu::TestStatus enumerateDeviceExtensions (Context& context)
+{
+	TestLog&	log		= context.getTestContext().getLog();
+
+	{
+		const ScopedLogSection				section		(log, "Global", "Global Extensions");
+		const vector<VkExtensionProperties>	properties	= enumerateDeviceExtensionProperties(context.getInstanceInterface(), context.getPhysicalDevice(), DE_NULL);
+
+		for (size_t ndx = 0; ndx < properties.size(); ndx++)
+			log << TestLog::Message << ndx << ": " << properties[ndx] << TestLog::EndMessage;
+	}
+
+	{
+		const vector<VkLayerProperties>	layers	= enumerateDeviceLayerProperties(context.getInstanceInterface(), context.getPhysicalDevice());
+
+		for (vector<VkLayerProperties>::const_iterator layer = layers.begin(); layer != layers.end(); ++layer)
+		{
+			const ScopedLogSection				section		(log, layer->layerName, string("Layer: ") + layer->layerName);
+			const vector<VkExtensionProperties>	properties	= enumerateDeviceExtensionProperties(context.getInstanceInterface(), context.getPhysicalDevice(), layer->layerName);
+
+			for (size_t extNdx = 0; extNdx < properties.size(); extNdx++)
+				log << TestLog::Message << extNdx << ": " << properties[extNdx] << TestLog::EndMessage;
+		}
+	}
+
+	return tcu::TestStatus::pass("Enumerating extensions succeeded");
+}
+
+tcu::TestStatus deviceFeatures (Context& context)
+{
+	TestLog&						log			= context.getTestContext().getLog();
+	VkPhysicalDeviceFeatures*		features;
+	deUint8							buffer[sizeof(VkPhysicalDeviceFeatures) + GUARD_SIZE];
+
+	deMemset(buffer, GUARD_VALUE, sizeof(buffer));
+	features = reinterpret_cast<VkPhysicalDeviceFeatures*>(buffer);
+
+	context.getInstanceInterface().getPhysicalDeviceFeatures(context.getPhysicalDevice(), features);
+
+	log << TestLog::Message << "device = " << context.getPhysicalDevice() << TestLog::EndMessage
+		<< TestLog::Message << *features << TestLog::EndMessage;
+
+	for (int ndx = 0; ndx < GUARD_SIZE; ndx++)
+	{
+		if (buffer[ndx + sizeof(VkPhysicalDeviceFeatures)] != GUARD_VALUE)
+		{
+			log << TestLog::Message << "deviceFeatures - Guard offset " << ndx << " not valid" << TestLog::EndMessage;
+			return tcu::TestStatus::fail("deviceFeatures buffer overflow");
+		}
+	}
+
+	return tcu::TestStatus::pass("Query succeeded");
+}
+
+tcu::TestStatus deviceProperties (Context& context)
+{
+	TestLog&						log			= context.getTestContext().getLog();
+	VkPhysicalDeviceProperties*		props;
+	VkPhysicalDeviceFeatures		features;
+	deUint8							buffer[sizeof(VkPhysicalDeviceProperties) + GUARD_SIZE];
+
+	props = reinterpret_cast<VkPhysicalDeviceProperties*>(buffer);
+	deMemset(props, GUARD_VALUE, sizeof(buffer));
+
+	context.getInstanceInterface().getPhysicalDeviceProperties(context.getPhysicalDevice(), props);
+	context.getInstanceInterface().getPhysicalDeviceFeatures(context.getPhysicalDevice(), &features);
+
+	log << TestLog::Message << "device = " << context.getPhysicalDevice() << TestLog::EndMessage
+		<< TestLog::Message << *props << TestLog::EndMessage;
+
+	if (!validateFeatureLimits(props, &features, log))
+		return tcu::TestStatus::fail("deviceProperties - feature limits failed");
+
+	for (int ndx = 0; ndx < GUARD_SIZE; ndx++)
+	{
+		if (buffer[ndx + sizeof(VkPhysicalDeviceProperties)] != GUARD_VALUE)
+		{
+			log << TestLog::Message << "deviceProperties - Guard offset " << ndx << " not valid" << TestLog::EndMessage;
+			return tcu::TestStatus::fail("deviceProperties buffer overflow");
+		}
+	}
+
+	return tcu::TestStatus::pass("DeviceProperites query succeeded");
+}
+
+tcu::TestStatus deviceQueueFamilyProperties (Context& context)
+{
+	TestLog&								log					= context.getTestContext().getLog();
+	const vector<VkQueueFamilyProperties>	queueProperties		= getPhysicalDeviceQueueFamilyProperties(context.getInstanceInterface(), context.getPhysicalDevice());
+
+	log << TestLog::Message << "device = " << context.getPhysicalDevice() << TestLog::EndMessage;
+
+	for (size_t queueNdx = 0; queueNdx < queueProperties.size(); queueNdx++)
+		log << TestLog::Message << queueNdx << ": " << queueProperties[queueNdx] << TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Querying queue properties succeeded");
+}
+
+tcu::TestStatus deviceMemoryProperties (Context& context)
+{
+	TestLog&							log			= context.getTestContext().getLog();
+	VkPhysicalDeviceMemoryProperties*	memProps;
+	deUint8								buffer[sizeof(VkPhysicalDeviceMemoryProperties) + GUARD_SIZE];
+
+	memProps = reinterpret_cast<VkPhysicalDeviceMemoryProperties*>(buffer);
+	deMemset(buffer, GUARD_VALUE, sizeof(buffer));
+
+	context.getInstanceInterface().getPhysicalDeviceMemoryProperties(context.getPhysicalDevice(), memProps);
+
+	log << TestLog::Message << "device = " << context.getPhysicalDevice() << TestLog::EndMessage
+		<< TestLog::Message << *memProps << TestLog::EndMessage;
+
+	for (deInt32 ndx = 0; ndx < GUARD_SIZE; ndx++)
+	{
+		if (buffer[ndx + sizeof(VkPhysicalDeviceMemoryProperties)] != GUARD_VALUE)
+		{
+			log << TestLog::Message << "deviceMemoryProperties - Guard offset " << ndx << " not valid" << TestLog::EndMessage;
+			return tcu::TestStatus::fail("deviceMemoryProperties buffer overflow");
+		}
+	}
+
+	return tcu::TestStatus::pass("Querying memory properties succeeded");
+}
+
+// \todo [2016-01-22 pyry] Optimize by doing format -> flags mapping instead
+
+VkFormatFeatureFlags getRequiredOptimalTilingFeatures (VkFormat format)
+{
+	static const VkFormat s_requiredSampledImageBlitSrcFormats[] =
+	{
+		VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_B8G8R8A8_SRGB,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_UINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+		VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_A2B10G10R10_UINT_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+		VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+		VK_FORMAT_D16_UNORM,
+		VK_FORMAT_D32_SFLOAT
+	};
+	static const VkFormat s_requiredStorageImageFormats[] =
+	{
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT
+	};
+	static const VkFormat s_requiredStorageImageAtomicFormats[] =
+	{
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT
+	};
+	static const VkFormat s_requiredColorAttachmentBlitDstFormats[] =
+	{
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_B8G8R8A8_SRGB,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_UINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_A2B10G10R10_UINT_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT
+	};
+	static const VkFormat s_requiredColorAttachmentBlendFormats[] =
+	{
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_B8G8R8A8_SRGB,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_SFLOAT
+	};
+	static const VkFormat s_requiredDepthStencilAttachmentFormats[] =
+	{
+		VK_FORMAT_D16_UNORM
+	};
+
+	VkFormatFeatureFlags	flags	= (VkFormatFeatureFlags)0;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredSampledImageBlitSrcFormats), DE_ARRAY_END(s_requiredSampledImageBlitSrcFormats), format))
+		flags |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT|VK_FORMAT_FEATURE_BLIT_SRC_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredStorageImageFormats), DE_ARRAY_END(s_requiredStorageImageFormats), format))
+		flags |= VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredStorageImageAtomicFormats), DE_ARRAY_END(s_requiredStorageImageAtomicFormats), format))
+		flags |= VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredColorAttachmentBlitDstFormats), DE_ARRAY_END(s_requiredColorAttachmentBlitDstFormats), format))
+		flags |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT|VK_FORMAT_FEATURE_BLIT_DST_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredColorAttachmentBlendFormats), DE_ARRAY_END(s_requiredColorAttachmentBlendFormats), format))
+		flags |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredDepthStencilAttachmentFormats), DE_ARRAY_END(s_requiredDepthStencilAttachmentFormats), format))
+		flags |= VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+	return flags;
+}
+
+VkFormatFeatureFlags getRequiredBufferFeatures (VkFormat format)
+{
+	static const VkFormat s_requiredVertexBufferFormats[] =
+	{
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_UINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SINT_PACK32,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R32G32B32_SINT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT
+	};
+	static const VkFormat s_requiredUniformTexelBufferFormats[] =
+	{
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_UINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SINT_PACK32,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_A2B10G10R10_UINT_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32
+	};
+	static const VkFormat s_requiredStorageTexelBufferFormats[] =
+	{
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+		VK_FORMAT_A8B8G8R8_UINT_PACK32,
+		VK_FORMAT_A8B8G8R8_SINT_PACK32,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT
+	};
+	static const VkFormat s_requiredStorageTexelBufferAtomicFormats[] =
+	{
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT
+	};
+
+	VkFormatFeatureFlags	flags	= (VkFormatFeatureFlags)0;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredVertexBufferFormats), DE_ARRAY_END(s_requiredVertexBufferFormats), format))
+		flags |= VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredUniformTexelBufferFormats), DE_ARRAY_END(s_requiredUniformTexelBufferFormats), format))
+		flags |= VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredStorageTexelBufferFormats), DE_ARRAY_END(s_requiredStorageTexelBufferFormats), format))
+		flags |= VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT;
+
+	if (de::contains(DE_ARRAY_BEGIN(s_requiredStorageTexelBufferAtomicFormats), DE_ARRAY_END(s_requiredStorageTexelBufferAtomicFormats), format))
+		flags |= VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT;
+
+	return flags;
+}
+
+tcu::TestStatus formatProperties (Context& context, VkFormat format)
+{
+	TestLog&					log				= context.getTestContext().getLog();
+	const VkFormatProperties	properties		= getPhysicalDeviceFormatProperties(context.getInstanceInterface(), context.getPhysicalDevice(), format);
+	bool						allOk			= true;
+
+	const struct
+	{
+		VkFormatFeatureFlags VkFormatProperties::*	field;
+		const char*									fieldName;
+		VkFormatFeatureFlags						requiredFeatures;
+	} fields[] =
+	{
+		{ &VkFormatProperties::linearTilingFeatures,	"linearTilingFeatures",		(VkFormatFeatureFlags)0						},
+		{ &VkFormatProperties::optimalTilingFeatures,	"optimalTilingFeatures",	getRequiredOptimalTilingFeatures(format)	},
+		{ &VkFormatProperties::bufferFeatures,			"buffeFeatures",			getRequiredBufferFeatures(format)			}
+	};
+
+	log << TestLog::Message << properties << TestLog::EndMessage;
+
+	for (int fieldNdx = 0; fieldNdx < DE_LENGTH_OF_ARRAY(fields); fieldNdx++)
+	{
+		const char* const				fieldName	= fields[fieldNdx].fieldName;
+		const VkFormatFeatureFlags		supported	= properties.*fields[fieldNdx].field;
+		const VkFormatFeatureFlags		required	= fields[fieldNdx].requiredFeatures;
+
+		if ((supported & required) != required)
+		{
+			log << TestLog::Message << "ERROR in " << fieldName << ":\n"
+								    << "  required: " << getFormatFeatureFlagsStr(required) << "\n  "
+									<< "  missing: " << getFormatFeatureFlagsStr(~supported & required)
+				<< TestLog::EndMessage;
+			allOk = false;
+		}
+	}
+
+	if (allOk)
+		return tcu::TestStatus::pass("Query and validation passed");
+	else
+		return tcu::TestStatus::fail("Required features not supported");
+}
+
+bool optimalTilingFeaturesSupported (Context& context, VkFormat format, VkFormatFeatureFlags features)
+{
+	const VkFormatProperties	properties	= getPhysicalDeviceFormatProperties(context.getInstanceInterface(), context.getPhysicalDevice(), format);
+
+	return (properties.optimalTilingFeatures & features) == features;
+}
+
+bool optimalTilingFeaturesSupportedForAll (Context& context, const VkFormat* begin, const VkFormat* end, VkFormatFeatureFlags features)
+{
+	for (const VkFormat* cur = begin; cur != end; ++cur)
+	{
+		if (!optimalTilingFeaturesSupported(context, *cur, features))
+			return false;
+	}
+
+	return true;
+}
+
+tcu::TestStatus testDepthStencilSupported (Context& context)
+{
+	if (!optimalTilingFeaturesSupported(context, VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) &&
+		!optimalTilingFeaturesSupported(context, VK_FORMAT_D32_SFLOAT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
+		return tcu::TestStatus::fail("Doesn't support one of VK_FORMAT_X8_D24_UNORM_PACK32 or VK_FORMAT_D32_SFLOAT");
+
+	if (!optimalTilingFeaturesSupported(context, VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) &&
+		!optimalTilingFeaturesSupported(context, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT))
+		return tcu::TestStatus::fail("Doesn't support one of VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT");
+
+	return tcu::TestStatus::pass("Required depth/stencil formats supported");
+}
+
+tcu::TestStatus testCompressedFormatsSupported (Context& context)
+{
+	static const VkFormat s_allBcFormats[] =
+	{
+		VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+		VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+		VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+		VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+		VK_FORMAT_BC2_UNORM_BLOCK,
+		VK_FORMAT_BC2_SRGB_BLOCK,
+		VK_FORMAT_BC3_UNORM_BLOCK,
+		VK_FORMAT_BC3_SRGB_BLOCK,
+		VK_FORMAT_BC4_UNORM_BLOCK,
+		VK_FORMAT_BC4_SNORM_BLOCK,
+		VK_FORMAT_BC5_UNORM_BLOCK,
+		VK_FORMAT_BC5_SNORM_BLOCK,
+		VK_FORMAT_BC6H_UFLOAT_BLOCK,
+		VK_FORMAT_BC6H_SFLOAT_BLOCK,
+		VK_FORMAT_BC7_UNORM_BLOCK,
+		VK_FORMAT_BC7_SRGB_BLOCK,
+	};
+	static const VkFormat s_allEtcEacFormats[] =
+	{
+		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+		VK_FORMAT_EAC_R11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11_SNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+	};
+	static const VkFormat s_allAstcFormats[] =
+	{
+		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+	};
+
+	const bool	bcFormatsSupported		= optimalTilingFeaturesSupportedForAll(context,
+																			   DE_ARRAY_BEGIN(s_allBcFormats),
+																			   DE_ARRAY_END(s_allBcFormats),
+																			   VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
+	const bool	etcEacFormatsSupported	= optimalTilingFeaturesSupportedForAll(context,
+																			   DE_ARRAY_BEGIN(s_allEtcEacFormats),
+																			   DE_ARRAY_END(s_allEtcEacFormats),
+																			   VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
+	const bool	astcFormatsSupported	= optimalTilingFeaturesSupportedForAll(context,
+																			   DE_ARRAY_BEGIN(s_allAstcFormats),
+																			   DE_ARRAY_END(s_allAstcFormats),
+																			   VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT);
+	TestLog&	log						= context.getTestContext().getLog();
+
+	log << TestLog::Message << "All BC* formats supported: " << (bcFormatsSupported ? "true" : "false") << TestLog::EndMessage;
+	log << TestLog::Message << "All ETC2/EAC formats supported: " << (etcEacFormatsSupported ? "true" : "false") << TestLog::EndMessage;
+	log << TestLog::Message << "All ASTC formats supported: " << (astcFormatsSupported ? "true" : "false") << TestLog::EndMessage;
+
+	if (bcFormatsSupported || etcEacFormatsSupported || astcFormatsSupported)
+		return tcu::TestStatus::pass("At least one set of compressed formats supported");
+	else
+		return tcu::TestStatus::fail("Compressed formats not supported");
+}
+
+void createFormatTests (tcu::TestCaseGroup* testGroup)
+{
+	DE_STATIC_ASSERT(VK_FORMAT_UNDEFINED == 0);
+
+	for (deUint32 formatNdx = VK_FORMAT_UNDEFINED+1; formatNdx < VK_FORMAT_LAST; ++formatNdx)
+	{
+		const VkFormat		format			= (VkFormat)formatNdx;
+		const char* const	enumName		= getFormatName(format);
+		const string		caseName		= de::toLower(string(enumName).substr(10));
+
+		addFunctionCase(testGroup, caseName, enumName, formatProperties, format);
+	}
+
+	addFunctionCase(testGroup, "depth_stencil",			"",	testDepthStencilSupported);
+	addFunctionCase(testGroup, "compressed_formats",	"",	testCompressedFormatsSupported);
+}
+
+VkImageUsageFlags getValidImageUsageFlags (VkFormat, VkFormatFeatureFlags supportedFeatures)
+{
+	VkImageUsageFlags	flags	= (VkImageUsageFlags)0;
+
+	// If format is supported at all, it must be valid transfer src+dst
+	if (supportedFeatures != 0)
+		flags |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT|VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+	if ((supportedFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0)
+		flags |= VK_IMAGE_USAGE_SAMPLED_BIT;
+
+	if ((supportedFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) != 0)
+		flags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT|VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+
+	if ((supportedFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0)
+		flags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+	if ((supportedFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) != 0)
+		flags |= VK_IMAGE_USAGE_STORAGE_BIT;
+
+	return flags;
+}
+
+bool isValidImageUsageFlagCombination (VkImageUsageFlags usage)
+{
+	return usage != 0;
+}
+
+VkImageCreateFlags getValidImageCreateFlags (const VkPhysicalDeviceFeatures& deviceFeatures, VkFormat, VkFormatFeatureFlags, VkImageType type, VkImageUsageFlags usage)
+{
+	VkImageCreateFlags	flags	= (VkImageCreateFlags)0;
+
+	if ((usage & VK_IMAGE_USAGE_SAMPLED_BIT) != 0)
+	{
+		flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
+
+		if (type == VK_IMAGE_TYPE_2D)
+			flags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+	}
+
+	if ((usage & (VK_IMAGE_USAGE_SAMPLED_BIT|VK_IMAGE_USAGE_STORAGE_BIT)) != 0 &&
+		(usage & VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT) == 0)
+	{
+		if (deviceFeatures.sparseBinding)
+			flags |= VK_IMAGE_CREATE_SPARSE_BINDING_BIT|VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT;
+
+		if (deviceFeatures.sparseResidencyAliased)
+			flags |= VK_IMAGE_CREATE_SPARSE_ALIASED_BIT;
+	}
+
+	return flags;
+}
+
+bool isValidImageCreateFlagCombination (VkImageCreateFlags)
+{
+	return true;
+}
+
+struct ImageFormatPropertyCase
+{
+	VkFormat		format;
+	VkImageType		imageType;
+	VkImageTiling	tiling;
+
+	ImageFormatPropertyCase (VkFormat format_, VkImageType imageType_, VkImageTiling tiling_)
+		: format	(format_)
+		, imageType	(imageType_)
+		, tiling	(tiling_)
+	{}
+
+	ImageFormatPropertyCase (void)
+		: format	(VK_FORMAT_LAST)
+		, imageType	(VK_IMAGE_TYPE_LAST)
+		, tiling	(VK_IMAGE_TILING_LAST)
+	{}
+};
+
+tcu::TestStatus imageFormatProperties (Context& context, ImageFormatPropertyCase params)
+{
+	TestLog&						log					= context.getTestContext().getLog();
+	const VkFormat					format				= params.format;
+	const VkImageType				imageType			= params.imageType;
+	const VkImageTiling				tiling				= params.tiling;
+	const VkPhysicalDeviceFeatures&	deviceFeatures		= context.getDeviceFeatures();
+	const VkFormatProperties		formatProperties	= getPhysicalDeviceFormatProperties(context.getInstanceInterface(), context.getPhysicalDevice(), format);
+
+	const VkFormatFeatureFlags		supportedFeatures	= tiling == VK_IMAGE_TILING_LINEAR ? formatProperties.linearTilingFeatures : formatProperties.optimalTilingFeatures;
+	const VkImageUsageFlags			usageFlagSet		= getValidImageUsageFlags(format, supportedFeatures);
+
+	for (VkImageUsageFlags curUsageFlags = 0; curUsageFlags <= usageFlagSet; curUsageFlags++)
+	{
+		if ((curUsageFlags & ~usageFlagSet) != 0 ||
+			!isValidImageUsageFlagCombination(curUsageFlags))
+			continue;
+
+		const VkImageCreateFlags	createFlagSet		= getValidImageCreateFlags(deviceFeatures, format, supportedFeatures, imageType, curUsageFlags);
+
+		for (VkImageCreateFlags curCreateFlags = 0; curCreateFlags <= createFlagSet; curCreateFlags++)
+		{
+			if ((curCreateFlags & ~createFlagSet) != 0 ||
+				!isValidImageCreateFlagCombination(curCreateFlags))
+				continue;
+
+			log << TestLog::Message << "Testing " << getImageTypeStr(imageType) << ", "
+									<< getImageTilingStr(tiling) << ", "
+									<< getImageUsageFlagsStr(curUsageFlags) << ", "
+									<< getImageCreateFlagsStr(curCreateFlags)
+				<< TestLog::EndMessage;
+
+			try
+			{
+				const VkImageFormatProperties	properties	= getPhysicalDeviceImageFormatProperties(context.getInstanceInterface(),
+																										context.getPhysicalDevice(),
+																										format,
+																										imageType,
+																										tiling,
+																										curUsageFlags,
+																										curCreateFlags);
+				log << TestLog::Message << properties << "\n" << TestLog::EndMessage;
+
+				// \todo [2016-01-24 pyry] Expand validation
+				TCU_CHECK((properties.sampleCounts & VK_SAMPLE_COUNT_1_BIT) != 0);
+				TCU_CHECK(imageType != VK_IMAGE_TYPE_1D || (properties.maxExtent.width >= 1 && properties.maxExtent.height == 1 && properties.maxExtent.depth == 1));
+				TCU_CHECK(imageType != VK_IMAGE_TYPE_2D || (properties.maxExtent.width >= 1 && properties.maxExtent.height >= 1 && properties.maxExtent.depth == 1));
+				TCU_CHECK(imageType != VK_IMAGE_TYPE_3D || (properties.maxExtent.width >= 1 && properties.maxExtent.height >= 1 && properties.maxExtent.depth >= 1));
+			}
+			catch (const Error& error)
+			{
+				// \todo [2016-01-22 pyry] Check if this is indeed optional image type / flag combination
+				if (error.getError() == VK_ERROR_FORMAT_NOT_SUPPORTED)
+					log << TestLog::Message << "Got VK_ERROR_FORMAT_NOT_SUPPORTED" << TestLog::EndMessage;
+				else
+					throw;
+			}
+		}
+	}
+
+	return tcu::TestStatus::pass("All queries succeeded");
+}
+
+void createImageFormatTypeTilingTests (tcu::TestCaseGroup* testGroup, ImageFormatPropertyCase params)
+{
+	DE_ASSERT(params.format == VK_FORMAT_LAST);
+
+	for (deUint32 formatNdx = VK_FORMAT_UNDEFINED+1; formatNdx < VK_FORMAT_LAST; ++formatNdx)
+	{
+		const VkFormat		format			= (VkFormat)formatNdx;
+		const char* const	enumName		= getFormatName(format);
+		const string		caseName		= de::toLower(string(enumName).substr(10));
+
+		params.format = format;
+
+		addFunctionCase(testGroup, caseName, enumName, imageFormatProperties, params);
+	}
+}
+
+void createImageFormatTypeTests (tcu::TestCaseGroup* testGroup, ImageFormatPropertyCase params)
+{
+	DE_ASSERT(params.tiling == VK_IMAGE_TILING_LAST);
+
+	testGroup->addChild(createTestGroup(testGroup->getTestContext(), "optimal",	"",	createImageFormatTypeTilingTests, ImageFormatPropertyCase(VK_FORMAT_LAST, params.imageType, VK_IMAGE_TILING_OPTIMAL)));
+	testGroup->addChild(createTestGroup(testGroup->getTestContext(), "linear",	"",	createImageFormatTypeTilingTests, ImageFormatPropertyCase(VK_FORMAT_LAST, params.imageType, VK_IMAGE_TILING_LINEAR)));
+}
+
+void createImageFormatTests (tcu::TestCaseGroup* testGroup)
+{
+	testGroup->addChild(createTestGroup(testGroup->getTestContext(), "1d", "", createImageFormatTypeTests, ImageFormatPropertyCase(VK_FORMAT_LAST, VK_IMAGE_TYPE_1D, VK_IMAGE_TILING_LAST)));
+	testGroup->addChild(createTestGroup(testGroup->getTestContext(), "2d", "", createImageFormatTypeTests, ImageFormatPropertyCase(VK_FORMAT_LAST, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_LAST)));
+	testGroup->addChild(createTestGroup(testGroup->getTestContext(), "3d", "", createImageFormatTypeTests, ImageFormatPropertyCase(VK_FORMAT_LAST, VK_IMAGE_TYPE_3D, VK_IMAGE_TILING_LAST)));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createFeatureInfoTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	infoTests	(new tcu::TestCaseGroup(testCtx, "info", "Platform Information Tests"));
+
+	{
+		de::MovePtr<tcu::TestCaseGroup> instanceInfoTests	(new tcu::TestCaseGroup(testCtx, "instance", "Instance Information Tests"));
+
+		addFunctionCase(instanceInfoTests.get(), "physical_devices",		"Physical devices",			enumeratePhysicalDevices);
+		addFunctionCase(instanceInfoTests.get(), "layers",					"Layers",					enumerateInstanceLayers);
+		addFunctionCase(instanceInfoTests.get(), "extensions",				"Extensions",				enumerateInstanceExtensions);
+
+		infoTests->addChild(instanceInfoTests.release());
+	}
+
+	{
+		de::MovePtr<tcu::TestCaseGroup> deviceInfoTests	(new tcu::TestCaseGroup(testCtx, "device", "Device Information Tests"));
+
+		addFunctionCase(deviceInfoTests.get(), "features",					"Device Features",			deviceFeatures);
+		addFunctionCase(deviceInfoTests.get(), "properties",				"Device Properties",		deviceProperties);
+		addFunctionCase(deviceInfoTests.get(), "queue_family_properties",	"Queue family properties",	deviceQueueFamilyProperties);
+		addFunctionCase(deviceInfoTests.get(), "memory_properties",			"Memory properties",		deviceMemoryProperties);
+		addFunctionCase(deviceInfoTests.get(), "layers",					"Layers",					enumerateDeviceLayers);
+		addFunctionCase(deviceInfoTests.get(), "extensions",				"Extensions",				enumerateDeviceExtensions);
+
+		infoTests->addChild(deviceInfoTests.release());
+	}
+
+	infoTests->addChild(createTestGroup(testCtx, "format_properties",		"VkGetPhysicalDeviceFormatProperties() Tests",		createFormatTests));
+	infoTests->addChild(createTestGroup(testCtx, "image_format_properties",	"VkGetPhysicalDeviceImageFormatProperties() Tests",	createImageFormatTests));
+
+	return infoTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.hpp b/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.hpp
new file mode 100644
index 0000000..a88aff1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiFeatureInfo.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTAPIFEATUREINFO_HPP
+#define _VKTAPIFEATUREINFO_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief API Feature Query tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup*		createFeatureInfoTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIFEATUREINFO_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.cpp
new file mode 100644
index 0000000..50a3ecc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.cpp
@@ -0,0 +1,2712 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Object management tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiObjectManagementTests.hpp"
+#include "vktTestCaseUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkStrUtil.hpp"
+#include "vkAllocationCallbackUtil.hpp"
+
+#include "tcuVector.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuCommandLine.hpp"
+#include "tcuTestLog.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+#include "deArrayUtil.hpp"
+#include "deSpinBarrier.hpp"
+#include "deThread.hpp"
+#include "deInt32.h"
+
+namespace vkt
+{
+namespace api
+{
+
+namespace
+{
+
+using namespace vk;
+
+using de::UniquePtr;
+using de::MovePtr;
+using de::SharedPtr;
+
+using tcu::IVec3;
+using tcu::UVec3;
+using tcu::ResultCollector;
+using tcu::TestStatus;
+using tcu::TestLog;
+
+using std::string;
+using std::vector;
+
+class ThreadGroupThread;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Thread group
+ *
+ * Thread group manages collection of threads that are expected to be
+ * launched simultaneously as a group.
+ *
+ * Shared barrier is provided for synchronizing execution. Terminating thread
+ * early either by returning from ThreadGroupThread::runThread() or throwing
+ * an exception is safe, and other threads will continue execution. The
+ * thread that has been terminated is simply removed from the synchronization
+ * group.
+ *
+ * TestException-based exceptions are collected and translated into a
+ * tcu::TestStatus by using tcu::ResultCollector.
+ *
+ * Use cases for ThreadGroup include for example testing thread-safety of
+ * certain API operations by poking API simultaneously from multiple
+ * threads.
+ *//*--------------------------------------------------------------------*/
+class ThreadGroup
+{
+public:
+							ThreadGroup			(void);
+							~ThreadGroup		(void);
+
+	void					add					(de::MovePtr<ThreadGroupThread> thread);
+	TestStatus				run					(void);
+
+private:
+	typedef std::vector<de::SharedPtr<ThreadGroupThread> >	ThreadVector;
+
+	ThreadVector			m_threads;
+	de::SpinBarrier			m_barrier;
+} DE_WARN_UNUSED_TYPE;
+
+class ThreadGroupThread : private de::Thread
+{
+public:
+							ThreadGroupThread	(void);
+	virtual					~ThreadGroupThread	(void);
+
+	void					start				(de::SpinBarrier* groupBarrier);
+
+	ResultCollector&		getResultCollector	(void) { return m_resultCollector; }
+
+	using de::Thread::join;
+
+protected:
+	virtual void			runThread			(void) = 0;
+
+	void					barrier				(void);
+
+private:
+							ThreadGroupThread	(const ThreadGroupThread&);
+	ThreadGroupThread&		operator=			(const ThreadGroupThread&);
+
+	void					run					(void);
+
+	ResultCollector			m_resultCollector;
+	de::SpinBarrier*		m_barrier;
+};
+
+// ThreadGroup
+
+ThreadGroup::ThreadGroup (void)
+	: m_barrier(1)
+{
+}
+
+ThreadGroup::~ThreadGroup (void)
+{
+}
+
+void ThreadGroup::add (de::MovePtr<ThreadGroupThread> thread)
+{
+	m_threads.push_back(de::SharedPtr<ThreadGroupThread>(thread.release()));
+}
+
+tcu::TestStatus ThreadGroup::run (void)
+{
+	tcu::ResultCollector	resultCollector;
+
+	m_barrier.reset((int)m_threads.size());
+
+	for (ThreadVector::iterator threadIter = m_threads.begin(); threadIter != m_threads.end(); ++threadIter)
+		(*threadIter)->start(&m_barrier);
+
+	for (ThreadVector::iterator threadIter = m_threads.begin(); threadIter != m_threads.end(); ++threadIter)
+	{
+		tcu::ResultCollector&	threadResult	= (*threadIter)->getResultCollector();
+		(*threadIter)->join();
+		resultCollector.addResult(threadResult.getResult(), threadResult.getMessage());
+	}
+
+	return tcu::TestStatus(resultCollector.getResult(), resultCollector.getMessage());
+}
+
+// ThreadGroupThread
+
+ThreadGroupThread::ThreadGroupThread (void)
+	: m_barrier(DE_NULL)
+{
+}
+
+ThreadGroupThread::~ThreadGroupThread (void)
+{
+}
+
+void ThreadGroupThread::start (de::SpinBarrier* groupBarrier)
+{
+	m_barrier = groupBarrier;
+	de::Thread::start();
+}
+
+void ThreadGroupThread::run (void)
+{
+	try
+	{
+		runThread();
+	}
+	catch (const tcu::TestException& e)
+	{
+		getResultCollector().addResult(e.getTestResult(), e.getMessage());
+	}
+	catch (const std::exception& e)
+	{
+		getResultCollector().addResult(QP_TEST_RESULT_FAIL, e.what());
+	}
+	catch (...)
+	{
+		getResultCollector().addResult(QP_TEST_RESULT_FAIL, "Exception");
+	}
+
+	m_barrier->removeThread(de::SpinBarrier::WAIT_MODE_AUTO);
+}
+
+inline void ThreadGroupThread::barrier (void)
+{
+	m_barrier->sync(de::SpinBarrier::WAIT_MODE_AUTO);
+}
+
+deUint32 getDefaultTestThreadCount (void)
+{
+	return de::clamp(deGetNumAvailableLogicalCores(), 2u, 8u);
+}
+
+// Utilities
+
+struct Environment
+{
+	const PlatformInterface&		vkp;
+	const DeviceInterface&			vkd;
+	VkDevice						device;
+	deUint32						queueFamilyIndex;
+	const BinaryCollection&			programBinaries;
+	const VkAllocationCallbacks*	allocationCallbacks;
+	deUint32						maxResourceConsumers;		// Maximum number of objects using same Object::Resources concurrently
+
+	Environment (Context& context, deUint32 maxResourceConsumers_)
+		: vkp					(context.getPlatformInterface())
+		, vkd					(context.getDeviceInterface())
+		, device				(context.getDevice())
+		, queueFamilyIndex		(context.getUniversalQueueFamilyIndex())
+		, programBinaries		(context.getBinaryCollection())
+		, allocationCallbacks	(DE_NULL)
+		, maxResourceConsumers	(maxResourceConsumers_)
+	{
+	}
+
+	Environment (const PlatformInterface&		vkp_,
+				 const DeviceInterface&			vkd_,
+				 VkDevice						device_,
+				 deUint32						queueFamilyIndex_,
+				 const BinaryCollection&		programBinaries_,
+				 const VkAllocationCallbacks*	allocationCallbacks_,
+				 deUint32						maxResourceConsumers_)
+		: vkp					(vkp_)
+		, vkd					(vkd_)
+		, device				(device_)
+		, queueFamilyIndex		(queueFamilyIndex_)
+		, programBinaries		(programBinaries_)
+		, allocationCallbacks	(allocationCallbacks_)
+		, maxResourceConsumers	(maxResourceConsumers_)
+	{
+	}
+};
+
+template<typename Case>
+struct Dependency
+{
+	typename Case::Resources		resources;
+	Unique<typename Case::Type>		object;
+
+	Dependency (const Environment& env, const typename Case::Parameters& params)
+		: resources	(env, params)
+		, object	(Case::create(env, resources, params))
+	{}
+};
+
+// Object definitions
+
+enum
+{
+	DEFAULT_MAX_CONCURRENT_OBJECTS	= 16*1024
+};
+
+struct Instance
+{
+	typedef VkInstance Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return 32;
+	}
+
+	struct Parameters
+	{
+		Parameters (void) {}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkInstance> create (const Environment& env, const Resources&, const Parameters&)
+	{
+		const VkApplicationInfo		appInfo			=
+		{
+			VK_STRUCTURE_TYPE_APPLICATION_INFO,
+			DE_NULL,
+			DE_NULL,							// pApplicationName
+			0u,									// applicationVersion
+			DE_NULL,							// pEngineName
+			0u,									// engineVersion
+			VK_API_VERSION
+		};
+		const VkInstanceCreateInfo	instanceInfo	=
+		{
+			VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+			DE_NULL,
+			(VkInstanceCreateFlags)0,
+			&appInfo,
+			0u,									// enabledLayerNameCount
+			DE_NULL,							// ppEnabledLayerNames
+			0u,									// enabledExtensionNameCount
+			DE_NULL,							// ppEnabledExtensionNames
+		};
+
+		return createInstance(env.vkp, &instanceInfo, env.allocationCallbacks);
+	}
+};
+
+struct Device
+{
+	typedef VkDevice Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return 32;
+	}
+
+	struct Parameters
+	{
+		deUint32		deviceIndex;
+		VkQueueFlags	queueFlags;
+
+		Parameters (deUint32 deviceIndex_, VkQueueFlags queueFlags_)
+			: deviceIndex	(deviceIndex_)
+			, queueFlags	(queueFlags_)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<Instance>	instance;
+		InstanceDriver			vki;
+		VkPhysicalDevice		physicalDevice;
+		deUint32				queueFamilyIndex;
+
+		Resources (const Environment& env, const Parameters& params)
+			: instance			(env, Instance::Parameters())
+			, vki				(env.vkp, *instance.object)
+			, physicalDevice	(0)
+			, queueFamilyIndex	(~0u)
+		{
+			{
+				const vector<VkPhysicalDevice>	physicalDevices	= enumeratePhysicalDevices(vki, *instance.object);
+
+				if (physicalDevices.size() <= (size_t)params.deviceIndex)
+					TCU_THROW(NotSupportedError, "Device not found");
+
+				physicalDevice = physicalDevices[params.deviceIndex];
+			}
+
+			{
+				const vector<VkQueueFamilyProperties>	queueProps		= getPhysicalDeviceQueueFamilyProperties(vki, physicalDevice);
+				bool									foundMatching	= false;
+
+				for (size_t curQueueNdx = 0; curQueueNdx < queueProps.size(); curQueueNdx++)
+				{
+					if ((queueProps[curQueueNdx].queueFlags & params.queueFlags) == params.queueFlags)
+					{
+						queueFamilyIndex	= (deUint32)curQueueNdx;
+						foundMatching		= true;
+					}
+				}
+
+				if (!foundMatching)
+					TCU_THROW(NotSupportedError, "Matching queue not found");
+			}
+		}
+	};
+
+	static Move<VkDevice> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const float	queuePriority	= 1.0;
+
+		const VkDeviceQueueCreateInfo	queues[]	=
+		{
+			{
+				VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+				DE_NULL,
+				(VkDeviceQueueCreateFlags)0,
+				res.queueFamilyIndex,
+				1u,									// queueCount
+				&queuePriority,						// pQueuePriorities
+			}
+		};
+		const VkDeviceCreateInfo	deviceInfo	=
+		{
+			VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+			DE_NULL,
+			(VkDeviceCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(queues),
+			queues,
+			0u,										// enabledLayerNameCount
+			DE_NULL,								// ppEnabledLayerNames
+			0u,										// enabledExtensionNameCount
+			DE_NULL,								// ppEnabledExtensionNames
+			DE_NULL,								// pEnabledFeatures
+		};
+
+		return createDevice(res.vki, res.physicalDevice, &deviceInfo, env.allocationCallbacks);
+	}
+};
+
+struct DeviceMemory
+{
+	typedef VkDeviceMemory Type;
+
+	static deUint32 getMaxConcurrent (Context& context)
+	{
+		return de::min(context.getDeviceProperties().limits.maxMemoryAllocationCount, 4096u);
+	}
+
+	struct Parameters
+	{
+		VkDeviceSize	size;
+		deUint32		memoryTypeIndex;
+
+		Parameters (VkDeviceSize size_, deUint32 memoryTypeIndex_)
+			: size				(size_)
+			, memoryTypeIndex	(memoryTypeIndex_)
+		{
+			DE_ASSERT(memoryTypeIndex < VK_MAX_MEMORY_TYPES);
+		}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkDeviceMemory> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkMemoryAllocateInfo	allocInfo	=
+		{
+			VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+			DE_NULL,
+			params.size,
+			params.memoryTypeIndex
+		};
+
+		return allocateMemory(env.vkd, env.device, &allocInfo, env.allocationCallbacks);
+	}
+};
+
+DeviceMemory::Parameters getDeviceMemoryParameters (const VkMemoryRequirements& memReqs)
+{
+	return DeviceMemory::Parameters(memReqs.size, deCtz32(memReqs.memoryTypeBits));
+}
+
+DeviceMemory::Parameters getDeviceMemoryParameters (const Environment& env, VkImage image)
+{
+	return getDeviceMemoryParameters(getImageMemoryRequirements(env.vkd, env.device, image));
+}
+
+DeviceMemory::Parameters getDeviceMemoryParameters (const Environment& env, VkBuffer image)
+{
+	return getDeviceMemoryParameters(getBufferMemoryRequirements(env.vkd, env.device, image));
+}
+
+struct Buffer
+{
+	typedef VkBuffer Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkDeviceSize		size;
+		VkBufferUsageFlags	usage;
+
+		Parameters (VkDeviceSize		size_,
+					VkBufferUsageFlags	usage_)
+			: size	(size_)
+			, usage	(usage_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkBuffer> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkBufferCreateInfo	bufferInfo	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+			DE_NULL,
+			(VkBufferCreateFlags)0,
+			params.size,
+			params.usage,
+			VK_SHARING_MODE_EXCLUSIVE,
+			1u,
+			&env.queueFamilyIndex
+		};
+
+		return createBuffer(env.vkd, env.device, &bufferInfo, env.allocationCallbacks);
+	}
+};
+
+struct BufferView
+{
+	typedef VkBufferView Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		Buffer::Parameters	buffer;
+		VkFormat			format;
+		VkDeviceSize		offset;
+		VkDeviceSize		range;
+
+		Parameters (const Buffer::Parameters&	buffer_,
+					VkFormat					format_,
+					VkDeviceSize				offset_,
+					VkDeviceSize				range_)
+			: buffer	(buffer_)
+			, format	(format_)
+			, offset	(offset_)
+			, range		(range_)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<Buffer>			buffer;
+		Dependency<DeviceMemory>	memory;
+
+		Resources (const Environment& env, const Parameters& params)
+			: buffer(env, params.buffer)
+			, memory(env, getDeviceMemoryParameters(env, *buffer.object))
+		{
+			VK_CHECK(env.vkd.bindBufferMemory(env.device, *buffer.object, *memory.object, 0));
+		}
+	};
+
+	static Move<VkBufferView> create (const Environment& env, const Resources& res, const Parameters& params)
+	{
+		const VkBufferViewCreateInfo	bufferViewInfo	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+			DE_NULL,
+			(VkBufferViewCreateFlags)0,
+			*res.buffer.object,
+			params.format,
+			params.offset,
+			params.range
+		};
+
+		return createBufferView(env.vkd, env.device, &bufferViewInfo, env.allocationCallbacks);
+	}
+};
+
+struct Image
+{
+	typedef VkImage Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkImageCreateFlags		flags;
+		VkImageType				imageType;
+		VkFormat				format;
+		VkExtent3D				extent;
+		deUint32				mipLevels;
+		deUint32				arraySize;
+		VkSampleCountFlagBits	samples;
+		VkImageTiling			tiling;
+		VkImageUsageFlags		usage;
+		VkImageLayout			initialLayout;
+
+		Parameters (VkImageCreateFlags		flags_,
+					VkImageType				imageType_,
+					VkFormat				format_,
+					VkExtent3D				extent_,
+					deUint32				mipLevels_,
+					deUint32				arraySize_,
+					VkSampleCountFlagBits	samples_,
+					VkImageTiling			tiling_,
+					VkImageUsageFlags		usage_,
+					VkImageLayout			initialLayout_)
+			: flags			(flags_)
+			, imageType		(imageType_)
+			, format		(format_)
+			, extent		(extent_)
+			, mipLevels		(mipLevels_)
+			, arraySize		(arraySize_)
+			, samples		(samples_)
+			, tiling		(tiling_)
+			, usage			(usage_)
+			, initialLayout	(initialLayout_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkImage> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkImageCreateInfo		imageInfo	=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+			params.flags,
+			params.imageType,
+			params.format,
+			params.extent,
+			params.mipLevels,
+			params.arraySize,
+			params.samples,
+			params.tiling,
+			params.usage,
+			VK_SHARING_MODE_EXCLUSIVE,		// sharingMode
+			1u,								// queueFamilyIndexCount
+			&env.queueFamilyIndex,			// pQueueFamilyIndices
+			params.initialLayout
+		};
+
+		return createImage(env.vkd, env.device, &imageInfo, env.allocationCallbacks);
+	}
+};
+
+struct ImageView
+{
+	typedef VkImageView Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		Image::Parameters		image;
+		VkImageViewType			viewType;
+		VkFormat				format;
+		VkComponentMapping		components;
+		VkImageSubresourceRange	subresourceRange;
+
+		Parameters (const Image::Parameters&	image_,
+					VkImageViewType				viewType_,
+					VkFormat					format_,
+					VkComponentMapping			components_,
+					VkImageSubresourceRange		subresourceRange_)
+			: image				(image_)
+			, viewType			(viewType_)
+			, format			(format_)
+			, components		(components_)
+			, subresourceRange	(subresourceRange_)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<Image>			image;
+		Dependency<DeviceMemory>	memory;
+
+		Resources (const Environment& env, const Parameters& params)
+			: image	(env, params.image)
+			, memory(env, getDeviceMemoryParameters(env, *image.object))
+		{
+			VK_CHECK(env.vkd.bindImageMemory(env.device, *image.object, *memory.object, 0));
+		}
+	};
+
+	static Move<VkImageView> create (const Environment& env, const Resources& res, const Parameters& params)
+	{
+		const VkImageViewCreateInfo	imageViewInfo	=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+			DE_NULL,
+			(VkImageViewCreateFlags)0,
+			*res.image.object,
+			params.viewType,
+			params.format,
+			params.components,
+			params.subresourceRange,
+		};
+
+		return createImageView(env.vkd, env.device, &imageViewInfo, env.allocationCallbacks);
+	}
+};
+
+struct Semaphore
+{
+	typedef VkSemaphore Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return 100;
+	}
+
+	struct Parameters
+	{
+		VkSemaphoreCreateFlags	flags;
+
+		Parameters (VkSemaphoreCreateFlags flags_)
+			: flags(flags_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkSemaphore> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkSemaphoreCreateInfo	semaphoreInfo	=
+		{
+			VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+			DE_NULL,
+			params.flags
+		};
+
+		return createSemaphore(env.vkd, env.device, &semaphoreInfo, env.allocationCallbacks);
+	}
+};
+
+struct Fence
+{
+	typedef VkFence Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return 100;
+	}
+
+	struct Parameters
+	{
+		VkFenceCreateFlags	flags;
+
+		Parameters (VkFenceCreateFlags flags_)
+			: flags(flags_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkFence> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkFenceCreateInfo	fenceInfo	=
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+			DE_NULL,
+			params.flags
+		};
+
+		return createFence(env.vkd, env.device, &fenceInfo, env.allocationCallbacks);
+	}
+};
+
+struct Event
+{
+	typedef VkEvent Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return 100;
+	}
+
+	struct Parameters
+	{
+		VkEventCreateFlags	flags;
+
+		Parameters (VkEventCreateFlags flags_)
+			: flags(flags_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkEvent> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkEventCreateInfo	eventInfo	=
+		{
+			VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+			DE_NULL,
+			params.flags
+		};
+
+		return createEvent(env.vkd, env.device, &eventInfo, env.allocationCallbacks);
+	}
+};
+
+struct QueryPool
+{
+	typedef VkQueryPool Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkQueryType						queryType;
+		deUint32						entryCount;
+		VkQueryPipelineStatisticFlags	pipelineStatistics;
+
+		Parameters (VkQueryType						queryType_,
+					deUint32						entryCount_,
+					VkQueryPipelineStatisticFlags	pipelineStatistics_)
+			: queryType				(queryType_)
+			, entryCount			(entryCount_)
+			, pipelineStatistics	(pipelineStatistics_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkQueryPool> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkQueryPoolCreateInfo	queryPoolInfo	=
+		{
+			VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+			DE_NULL,
+			(VkQueryPoolCreateFlags)0,
+			params.queryType,
+			params.entryCount,
+			params.pipelineStatistics
+		};
+
+		return createQueryPool(env.vkd, env.device, &queryPoolInfo, env.allocationCallbacks);
+	}
+};
+
+struct ShaderModule
+{
+	typedef VkShaderModule Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkShaderStageFlagBits	shaderStage;
+		string					binaryName;
+
+		Parameters (VkShaderStageFlagBits	shaderStage_,
+					const std::string&		binaryName_)
+			: shaderStage	(shaderStage_)
+			, binaryName	(binaryName_)
+		{}
+	};
+
+	struct Resources
+	{
+		const ProgramBinary&	binary;
+
+		Resources (const Environment& env, const Parameters& params)
+			: binary(env.programBinaries.get(params.binaryName))
+		{}
+	};
+
+	static const char* getSource (VkShaderStageFlagBits stage)
+	{
+		switch (stage)
+		{
+			case VK_SHADER_STAGE_VERTEX_BIT:
+				return "#version 310 es\n"
+					   "layout(location = 0) in highp vec4 a_position;\n"
+					   "void main () { gl_Position = a_position; }\n";
+
+			case VK_SHADER_STAGE_FRAGMENT_BIT:
+				return "#version 310 es\n"
+					   "layout(location = 0) out mediump vec4 o_color;\n"
+					   "void main () { o_color = vec4(1.0, 0.5, 0.25, 1.0); }";
+
+			case VK_SHADER_STAGE_COMPUTE_BIT:
+				return "#version 310 es\n"
+					   "layout(binding = 0) buffer Input { highp uint dataIn[]; };\n"
+					   "layout(binding = 1) buffer Output { highp uint dataOut[]; };\n"
+					   "void main (void)\n"
+					   "{\n"
+					   "	dataOut[gl_GlobalInvocationID.x] = ~dataIn[gl_GlobalInvocationID.x];\n"
+					   "}\n";
+
+			default:
+				DE_FATAL("Not implemented");
+				return DE_NULL;
+		}
+	}
+
+	static void initPrograms (SourceCollections& dst, Parameters params)
+	{
+		const char* const	source	= getSource(params.shaderStage);
+
+		DE_ASSERT(source);
+
+		dst.glslSources.add(params.binaryName)
+			<< glu::ShaderSource(getGluShaderType(params.shaderStage), source);
+	}
+
+	static Move<VkShaderModule> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkShaderModuleCreateInfo	shaderModuleInfo	=
+		{
+			VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+			DE_NULL,
+			(VkShaderModuleCreateFlags)0,
+			res.binary.getSize(),
+			(const deUint32*)res.binary.getBinary(),
+		};
+
+		return createShaderModule(env.vkd, env.device, &shaderModuleInfo, env.allocationCallbacks);
+	}
+};
+
+struct PipelineCache
+{
+	typedef VkPipelineCache Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		Parameters (void) {}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkPipelineCache> create (const Environment& env, const Resources&, const Parameters&)
+	{
+		const VkPipelineCacheCreateInfo	pipelineCacheInfo	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineCacheCreateFlags)0u,
+			0u,								// initialDataSize
+			DE_NULL,						// pInitialData
+		};
+
+		return createPipelineCache(env.vkd, env.device, &pipelineCacheInfo, env.allocationCallbacks);
+	}
+};
+
+struct Sampler
+{
+	typedef VkSampler Type;
+
+	static deUint32 getMaxConcurrent (Context& context)
+	{
+		return context.getDeviceProperties().limits.maxSamplerAllocationCount;
+	}
+
+	struct Parameters
+	{
+		VkFilter				magFilter;
+		VkFilter				minFilter;
+		VkSamplerMipmapMode		mipmapMode;
+		VkSamplerAddressMode	addressModeU;
+		VkSamplerAddressMode	addressModeV;
+		VkSamplerAddressMode	addressModeW;
+		float					mipLodBias;
+		VkBool32				anisotropyEnable;
+		float					maxAnisotropy;
+		VkBool32				compareEnable;
+		VkCompareOp				compareOp;
+		float					minLod;
+		float					maxLod;
+		VkBorderColor			borderColor;
+		VkBool32				unnormalizedCoordinates;
+
+		// \todo [2015-09-17 pyry] Other configurations
+		Parameters (void)
+			: magFilter					(VK_FILTER_NEAREST)
+			, minFilter					(VK_FILTER_NEAREST)
+			, mipmapMode				(VK_SAMPLER_MIPMAP_MODE_NEAREST)
+			, addressModeU				(VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE)
+			, addressModeV				(VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE)
+			, addressModeW				(VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE)
+			, mipLodBias				(0.0f)
+			, anisotropyEnable			(VK_FALSE)
+			, maxAnisotropy				(1.0f)
+			, compareEnable				(VK_FALSE)
+			, compareOp					(VK_COMPARE_OP_ALWAYS)
+			, minLod					(-1000.f)
+			, maxLod					(+1000.f)
+			, borderColor				(VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK)
+			, unnormalizedCoordinates	(VK_FALSE)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkSampler> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkSamplerCreateInfo	samplerInfo	=
+		{
+			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+			DE_NULL,
+			(VkSamplerCreateFlags)0,
+			params.magFilter,
+			params.minFilter,
+			params.mipmapMode,
+			params.addressModeU,
+			params.addressModeV,
+			params.addressModeW,
+			params.mipLodBias,
+			params.anisotropyEnable,
+			params.maxAnisotropy,
+			params.compareEnable,
+			params.compareOp,
+			params.minLod,
+			params.maxLod,
+			params.borderColor,
+			params.unnormalizedCoordinates
+		};
+
+		return createSampler(env.vkd, env.device, &samplerInfo, env.allocationCallbacks);
+	}
+};
+
+struct DescriptorSetLayout
+{
+	typedef VkDescriptorSetLayout Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		struct Binding
+		{
+			deUint32			binding;
+			VkDescriptorType	descriptorType;
+			deUint32			descriptorCount;
+			VkShaderStageFlags	stageFlags;
+			bool				useImmutableSampler;
+
+			Binding (deUint32			binding_,
+					 VkDescriptorType	descriptorType_,
+					 deUint32			descriptorCount_,
+					 VkShaderStageFlags	stageFlags_,
+					 bool				useImmutableSampler_)
+				: binding				(binding_)
+				, descriptorType		(descriptorType_)
+				, descriptorCount		(descriptorCount_)
+				, stageFlags			(stageFlags_)
+				, useImmutableSampler	(useImmutableSampler_)
+			{}
+
+			Binding (void) {}
+		};
+
+		vector<Binding>	bindings;
+
+		Parameters (const vector<Binding>& bindings_)
+			: bindings(bindings_)
+		{}
+
+		static Parameters empty (void)
+		{
+			return Parameters(vector<Binding>());
+		}
+
+		static Parameters single (deUint32				binding,
+								  VkDescriptorType		descriptorType,
+								  deUint32				descriptorCount,
+								  VkShaderStageFlags	stageFlags,
+								  bool					useImmutableSampler = false)
+		{
+			vector<Binding> bindings;
+			bindings.push_back(Binding(binding, descriptorType, descriptorCount, stageFlags, useImmutableSampler));
+			return Parameters(bindings);
+		}
+	};
+
+	struct Resources
+	{
+		vector<VkDescriptorSetLayoutBinding>	bindings;
+		MovePtr<Dependency<Sampler> >			immutableSampler;
+		vector<VkSampler>						immutableSamplersPtr;
+
+		Resources (const Environment& env, const Parameters& params)
+		{
+			// Create immutable sampler if needed
+			for (vector<Parameters::Binding>::const_iterator cur = params.bindings.begin(); cur != params.bindings.end(); cur++)
+			{
+				if (cur->useImmutableSampler && !immutableSampler)
+				{
+					immutableSampler = de::newMovePtr<Dependency<Sampler> >(env, Sampler::Parameters());
+
+					if (cur->useImmutableSampler && immutableSamplersPtr.size() < (size_t)cur->descriptorCount)
+						immutableSamplersPtr.resize(cur->descriptorCount, *immutableSampler->object);
+				}
+			}
+
+			for (vector<Parameters::Binding>::const_iterator cur = params.bindings.begin(); cur != params.bindings.end(); cur++)
+			{
+				const VkDescriptorSetLayoutBinding	binding	=
+				{
+					cur->binding,
+					cur->descriptorType,
+					cur->descriptorCount,
+					cur->stageFlags,
+					(cur->useImmutableSampler ? &immutableSamplersPtr[0] : DE_NULL)
+				};
+
+				bindings.push_back(binding);
+			}
+		}
+	};
+
+	static Move<VkDescriptorSetLayout> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkDescriptorSetLayoutCreateInfo	descriptorSetLayoutInfo	=
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+			DE_NULL,
+			(VkDescriptorSetLayoutCreateFlags)0,
+			(deUint32)res.bindings.size(),
+			(res.bindings.empty() ? DE_NULL : &res.bindings[0])
+		};
+
+		return createDescriptorSetLayout(env.vkd, env.device, &descriptorSetLayoutInfo, env.allocationCallbacks);
+	}
+};
+
+struct PipelineLayout
+{
+	typedef VkPipelineLayout Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		vector<DescriptorSetLayout::Parameters>	descriptorSetLayouts;
+		vector<VkPushConstantRange>				pushConstantRanges;
+
+		Parameters (void) {}
+
+		static Parameters empty (void)
+		{
+			return Parameters();
+		}
+
+		static Parameters singleDescriptorSet (const DescriptorSetLayout::Parameters& descriptorSetLayout)
+		{
+			Parameters params;
+			params.descriptorSetLayouts.push_back(descriptorSetLayout);
+			return params;
+		}
+	};
+
+	struct Resources
+	{
+		typedef SharedPtr<Dependency<DescriptorSetLayout> >	DescriptorSetLayoutDepSp;
+		typedef vector<DescriptorSetLayoutDepSp>			DescriptorSetLayouts;
+
+		DescriptorSetLayouts			descriptorSetLayouts;
+		vector<VkDescriptorSetLayout>	pSetLayouts;
+
+		Resources (const Environment& env, const Parameters& params)
+		{
+			for (vector<DescriptorSetLayout::Parameters>::const_iterator dsParams = params.descriptorSetLayouts.begin();
+				 dsParams != params.descriptorSetLayouts.end();
+				 ++dsParams)
+			{
+				descriptorSetLayouts.push_back(DescriptorSetLayoutDepSp(new Dependency<DescriptorSetLayout>(env, *dsParams)));
+				pSetLayouts.push_back(*descriptorSetLayouts.back()->object);
+			}
+		}
+	};
+
+	static Move<VkPipelineLayout> create (const Environment& env, const Resources& res, const Parameters& params)
+	{
+		const VkPipelineLayoutCreateInfo	pipelineLayoutInfo	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineLayoutCreateFlags)0,
+			(deUint32)res.pSetLayouts.size(),
+			(res.pSetLayouts.empty() ? DE_NULL : &res.pSetLayouts[0]),
+			(deUint32)params.pushConstantRanges.size(),
+			(params.pushConstantRanges.empty() ? DE_NULL : &params.pushConstantRanges[0]),
+		};
+
+		return createPipelineLayout(env.vkd, env.device, &pipelineLayoutInfo, env.allocationCallbacks);
+	}
+};
+
+struct RenderPass
+{
+	typedef VkRenderPass Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	// \todo [2015-09-17 pyry] More interesting configurations
+	struct Parameters
+	{
+		Parameters (void) {}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkRenderPass> create (const Environment& env, const Resources&, const Parameters&)
+	{
+		const VkAttachmentDescription	attachments[]		=
+		{
+			{
+				(VkAttachmentDescriptionFlags)0,
+				VK_FORMAT_R8G8B8A8_UNORM,
+				VK_SAMPLE_COUNT_1_BIT,
+				VK_ATTACHMENT_LOAD_OP_CLEAR,
+				VK_ATTACHMENT_STORE_OP_STORE,
+				VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+			},
+			{
+				(VkAttachmentDescriptionFlags)0,
+				VK_FORMAT_D16_UNORM,
+				VK_SAMPLE_COUNT_1_BIT,
+				VK_ATTACHMENT_LOAD_OP_CLEAR,
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,
+				VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,
+				VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+				VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+			}
+		};
+		const VkAttachmentReference		colorAttachments[]	=
+		{
+			{
+				0u,											// attachment
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+			}
+		};
+		const VkAttachmentReference		dsAttachment		=
+		{
+			1u,											// attachment
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
+		};
+		const VkSubpassDescription		subpasses[]			=
+		{
+			{
+				(VkSubpassDescriptionFlags)0,
+				VK_PIPELINE_BIND_POINT_GRAPHICS,
+				0u,											// inputAttachmentCount
+				DE_NULL,									// pInputAttachments
+				DE_LENGTH_OF_ARRAY(colorAttachments),
+				colorAttachments,
+				DE_NULL,									// pResolveAttachments
+				&dsAttachment,
+				0u,											// preserveAttachmentCount
+				DE_NULL,									// pPreserveAttachments
+			}
+		};
+		const VkRenderPassCreateInfo	renderPassInfo		=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+			DE_NULL,
+			(VkRenderPassCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(attachments),
+			attachments,
+			DE_LENGTH_OF_ARRAY(subpasses),
+			subpasses,
+			0u,												// dependencyCount
+			DE_NULL											// pDependencies
+		};
+
+		return createRenderPass(env.vkd, env.device, &renderPassInfo, env.allocationCallbacks);
+	}
+};
+
+struct GraphicsPipeline
+{
+	typedef VkPipeline Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		// \todo [2016-01-26 pyry] Scale this based on pipeline memory usage and available system memory
+		return 256;
+	}
+
+	// \todo [2015-09-17 pyry] More interesting configurations
+	struct Parameters
+	{
+		Parameters (void) {}
+	};
+
+	struct Resources
+	{
+		Dependency<ShaderModule>	vertexShader;
+		Dependency<ShaderModule>	fragmentShader;
+		Dependency<PipelineLayout>	layout;
+		Dependency<RenderPass>		renderPass;
+		Dependency<PipelineCache>	pipelineCache;
+
+		Resources (const Environment& env, const Parameters&)
+			: vertexShader		(env, ShaderModule::Parameters(VK_SHADER_STAGE_VERTEX_BIT, "vert"))
+			, fragmentShader	(env, ShaderModule::Parameters(VK_SHADER_STAGE_FRAGMENT_BIT, "frag"))
+			, layout			(env, PipelineLayout::Parameters::singleDescriptorSet(
+										DescriptorSetLayout::Parameters::single(0u, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1u, VK_SHADER_STAGE_FRAGMENT_BIT, true)))
+			, renderPass		(env, RenderPass::Parameters())
+			, pipelineCache		(env, PipelineCache::Parameters())
+		{}
+	};
+
+	static void initPrograms (SourceCollections& dst, Parameters)
+	{
+		ShaderModule::initPrograms(dst, ShaderModule::Parameters(VK_SHADER_STAGE_VERTEX_BIT, "vert"));
+		ShaderModule::initPrograms(dst, ShaderModule::Parameters(VK_SHADER_STAGE_FRAGMENT_BIT, "frag"));
+	}
+
+	static Move<VkPipeline> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkPipelineShaderStageCreateInfo			stages[]			=
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+				DE_NULL,
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_VERTEX_BIT,
+				*res.vertexShader.object,
+				"main",
+				DE_NULL,							// pSpecializationInfo
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+				DE_NULL,
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_FRAGMENT_BIT,
+				*res.fragmentShader.object,
+				"main",
+				DE_NULL,							// pSpecializationInfo
+			}
+		};
+		const VkVertexInputBindingDescription			vertexBindings[]	=
+		{
+			{
+				0u,									// binding
+				16u,								// stride
+				VK_VERTEX_INPUT_RATE_VERTEX
+			}
+		};
+		const VkVertexInputAttributeDescription			vertexAttribs[]		=
+		{
+			{
+				0u,									// location
+				0u,									// binding
+				VK_FORMAT_R32G32B32A32_SFLOAT,
+				0u,									// offset
+			}
+		};
+		const VkPipelineVertexInputStateCreateInfo		vertexInputState	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineVertexInputStateCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(vertexBindings),
+			vertexBindings,
+			DE_LENGTH_OF_ARRAY(vertexAttribs),
+			vertexAttribs
+		};
+		const VkPipelineInputAssemblyStateCreateInfo	inputAssemblyState	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineInputAssemblyStateCreateFlags)0,
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+			VK_FALSE								// primitiveRestartEnable
+		};
+		const VkViewport								viewports[]			=
+		{
+			{ 0.0f, 0.0f, 64.f, 64.f, 0.0f, 1.0f }
+		};
+		const VkRect2D									scissors[]			=
+		{
+			{ { 0, 0 }, { 64, 64 } }
+		};
+		const VkPipelineViewportStateCreateInfo			viewportState		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineViewportStateCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(viewports),
+			viewports,
+			DE_LENGTH_OF_ARRAY(scissors),
+			scissors,
+		};
+		const VkPipelineRasterizationStateCreateInfo	rasterState			=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineRasterizationStateCreateFlags)0,
+			VK_TRUE,								// depthClampEnable
+			VK_FALSE,								// rasterizerDiscardEnable
+			VK_POLYGON_MODE_FILL,
+			VK_CULL_MODE_BACK_BIT,
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,
+			VK_FALSE,								// depthBiasEnable
+			0.0f,									// depthBiasConstantFactor
+			0.0f,									// depthBiasClamp
+			0.0f,									// depthBiasSlopeFactor
+			1.0f,									// lineWidth
+		};
+		const VkPipelineMultisampleStateCreateInfo		multisampleState	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineMultisampleStateCreateFlags)0,
+			VK_SAMPLE_COUNT_1_BIT,
+			VK_FALSE,								// sampleShadingEnable
+			1.0f,									// minSampleShading
+			DE_NULL,								// pSampleMask
+			VK_FALSE,								// alphaToCoverageEnable
+			VK_FALSE,								// alphaToOneEnable
+		};
+		const VkPipelineDepthStencilStateCreateInfo		depthStencilState	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineDepthStencilStateCreateFlags)0,
+			VK_TRUE,								// depthTestEnable
+			VK_TRUE,								// depthWriteEnable
+			VK_COMPARE_OP_LESS,						// depthCompareOp
+			VK_FALSE,								// depthBoundsTestEnable
+			VK_FALSE,								// stencilTestEnable
+			{ VK_STENCIL_OP_KEEP, VK_STENCIL_OP_KEEP, VK_STENCIL_OP_KEEP, VK_COMPARE_OP_ALWAYS, 0u, 0u, 0u },
+			{ VK_STENCIL_OP_KEEP, VK_STENCIL_OP_KEEP, VK_STENCIL_OP_KEEP, VK_COMPARE_OP_ALWAYS, 0u, 0u, 0u },
+			-1.0f,									// minDepthBounds
+			+1.0f,									// maxDepthBounds
+		};
+		const VkPipelineColorBlendAttachmentState		colorBlendAttState[]=
+		{
+			{
+				VK_FALSE,							// blendEnable
+				VK_BLEND_FACTOR_ONE,
+				VK_BLEND_FACTOR_ZERO,
+				VK_BLEND_OP_ADD,
+				VK_BLEND_FACTOR_ONE,
+				VK_BLEND_FACTOR_ZERO,
+				VK_BLEND_OP_ADD,
+				VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT
+			}
+		};
+		const VkPipelineColorBlendStateCreateInfo		colorBlendState		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineColorBlendStateCreateFlags)0,
+			VK_FALSE,								// logicOpEnable
+			VK_LOGIC_OP_COPY,
+			DE_LENGTH_OF_ARRAY(colorBlendAttState),
+			colorBlendAttState,
+			{ 0.0f, 0.0f, 0.0f, 0.0f }				// blendConstants
+		};
+		const VkPipelineDynamicStateCreateInfo			dynamicState		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineDynamicStateCreateFlags)0,
+			0u,										// dynamicStateCount
+			DE_NULL,								// pDynamicStates
+		};
+		const VkGraphicsPipelineCreateInfo				pipelineInfo		=
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineCreateFlags)0,
+			DE_LENGTH_OF_ARRAY(stages),
+			stages,
+			&vertexInputState,
+			&inputAssemblyState,
+			DE_NULL,								// pTessellationState
+			&viewportState,
+			&rasterState,
+			&multisampleState,
+			&depthStencilState,
+			&colorBlendState,
+			&dynamicState,
+			*res.layout.object,
+			*res.renderPass.object,
+			0u,										// subpass
+			(VkPipeline)0,							// basePipelineHandle
+			0,										// basePipelineIndex
+		};
+
+		return createGraphicsPipeline(env.vkd, env.device, *res.pipelineCache.object, &pipelineInfo, env.allocationCallbacks);
+	}
+};
+
+struct ComputePipeline
+{
+	typedef VkPipeline Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		// \todo [2016-01-26 pyry] Scale this based on pipeline memory usage and available system memory
+		return 256;
+	}
+
+	// \todo [2015-09-17 pyry] More interesting configurations
+	struct Parameters
+	{
+		Parameters (void) {}
+	};
+
+	struct Resources
+	{
+		Dependency<ShaderModule>	shaderModule;
+		Dependency<PipelineLayout>	layout;
+		Dependency<PipelineCache>	pipelineCache;
+
+		static DescriptorSetLayout::Parameters getDescriptorSetLayout (void)
+		{
+			typedef DescriptorSetLayout::Parameters::Binding Binding;
+
+			vector<Binding> bindings;
+
+			bindings.push_back(Binding(0, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, VK_SHADER_STAGE_COMPUTE_BIT, false));
+			bindings.push_back(Binding(1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u, VK_SHADER_STAGE_COMPUTE_BIT, false));
+
+			return DescriptorSetLayout::Parameters(bindings);
+		}
+
+		Resources (const Environment& env, const Parameters&)
+			: shaderModule		(env, ShaderModule::Parameters(VK_SHADER_STAGE_COMPUTE_BIT, "comp"))
+			, layout			(env, PipelineLayout::Parameters::singleDescriptorSet(getDescriptorSetLayout()))
+			, pipelineCache		(env, PipelineCache::Parameters())
+		{}
+	};
+
+	static void initPrograms (SourceCollections& dst, Parameters)
+	{
+		ShaderModule::initPrograms(dst, ShaderModule::Parameters(VK_SHADER_STAGE_COMPUTE_BIT, "comp"));
+	}
+
+	static Move<VkPipeline> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkComputePipelineCreateInfo	pipelineInfo	=
+		{
+			VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+			DE_NULL,
+			(VkPipelineCreateFlags)0,
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+				DE_NULL,
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_COMPUTE_BIT,
+				*res.shaderModule.object,
+				"main",
+				DE_NULL					// pSpecializationInfo
+			},
+			*res.layout.object,
+			(VkPipeline)0,				// basePipelineHandle
+			0u,							// basePipelineIndex
+		};
+
+		return createComputePipeline(env.vkd, env.device, *res.pipelineCache.object, &pipelineInfo, env.allocationCallbacks);
+	}
+};
+
+struct DescriptorPool
+{
+	typedef VkDescriptorPool Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkDescriptorPoolCreateFlags		flags;
+		deUint32						maxSets;
+		vector<VkDescriptorPoolSize>	poolSizes;
+
+		Parameters (VkDescriptorPoolCreateFlags				flags_,
+					deUint32								maxSets_,
+					const vector<VkDescriptorPoolSize>&		poolSizes_)
+			: flags		(flags_)
+			, maxSets	(maxSets_)
+			, poolSizes	(poolSizes_)
+		{}
+
+		static Parameters singleType (VkDescriptorPoolCreateFlags	flags,
+									  deUint32						maxSets,
+									  VkDescriptorType				type,
+									  deUint32						count)
+		{
+			vector<VkDescriptorPoolSize> poolSizes;
+			poolSizes.push_back(makeDescriptorPoolSize(type, count));
+			return Parameters(flags, maxSets, poolSizes);
+		}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkDescriptorPool> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkDescriptorPoolCreateInfo	descriptorPoolInfo	=
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+			DE_NULL,
+			params.flags,
+			params.maxSets,
+			(deUint32)params.poolSizes.size(),
+			(params.poolSizes.empty() ? DE_NULL : &params.poolSizes[0])
+		};
+
+		return createDescriptorPool(env.vkd, env.device, &descriptorPoolInfo, env.allocationCallbacks);
+	}
+};
+
+struct DescriptorSet
+{
+	typedef VkDescriptorSet Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		DescriptorSetLayout::Parameters	descriptorSetLayout;
+
+		Parameters (const DescriptorSetLayout::Parameters& descriptorSetLayout_)
+			: descriptorSetLayout(descriptorSetLayout_)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<DescriptorPool>		descriptorPool;
+		Dependency<DescriptorSetLayout>	descriptorSetLayout;
+
+		static vector<VkDescriptorPoolSize> computePoolSizes (const DescriptorSetLayout::Parameters& layout, int maxSets)
+		{
+			deUint32						countByType[VK_DESCRIPTOR_TYPE_LAST];
+			vector<VkDescriptorPoolSize>	typeCounts;
+
+			std::fill(DE_ARRAY_BEGIN(countByType), DE_ARRAY_END(countByType), 0u);
+
+			for (vector<DescriptorSetLayout::Parameters::Binding>::const_iterator cur = layout.bindings.begin();
+				 cur != layout.bindings.end();
+				 ++cur)
+			{
+				DE_ASSERT((deUint32)cur->descriptorType < VK_DESCRIPTOR_TYPE_LAST);
+				countByType[cur->descriptorType] += cur->descriptorCount * maxSets;
+			}
+
+			for (deUint32 type = 0; type < VK_DESCRIPTOR_TYPE_LAST; ++type)
+			{
+				if (countByType[type] > 0)
+					typeCounts.push_back(makeDescriptorPoolSize((VkDescriptorType)type, countByType[type]));
+			}
+
+			return typeCounts;
+		}
+
+		Resources (const Environment& env, const Parameters& params)
+			: descriptorPool		(env, DescriptorPool::Parameters(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, env.maxResourceConsumers, computePoolSizes(params.descriptorSetLayout, env.maxResourceConsumers)))
+			, descriptorSetLayout	(env, params.descriptorSetLayout)
+		{
+		}
+	};
+
+	static Move<VkDescriptorSet> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkDescriptorSetAllocateInfo	allocateInfo	=
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+			DE_NULL,
+			*res.descriptorPool.object,
+			1u,
+			&res.descriptorSetLayout.object.get(),
+		};
+
+		return allocateDescriptorSet(env.vkd, env.device, &allocateInfo);
+	}
+};
+
+struct Framebuffer
+{
+	typedef VkFramebuffer Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		Parameters (void)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<ImageView>	colorAttachment;
+		Dependency<ImageView>	depthStencilAttachment;
+		Dependency<RenderPass>	renderPass;
+
+		Resources (const Environment& env, const Parameters&)
+			: colorAttachment			(env, ImageView::Parameters(Image::Parameters(0u, VK_IMAGE_TYPE_2D, VK_FORMAT_R8G8B8A8_UNORM,
+																					  makeExtent3D(256, 256, 1),
+																					  1u, 1u,
+																					  VK_SAMPLE_COUNT_1_BIT,
+																					  VK_IMAGE_TILING_OPTIMAL,
+																					  VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+																					  VK_IMAGE_LAYOUT_UNDEFINED),
+																		 VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_R8G8B8A8_UNORM,
+																		 makeComponentMappingRGBA(),
+																		 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)))
+			, depthStencilAttachment	(env, ImageView::Parameters(Image::Parameters(0u, VK_IMAGE_TYPE_2D, VK_FORMAT_D16_UNORM,
+																					  makeExtent3D(256, 256, 1),
+																					  1u, 1u,
+																					  VK_SAMPLE_COUNT_1_BIT,
+																					  VK_IMAGE_TILING_OPTIMAL,
+																					  VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+																					  VK_IMAGE_LAYOUT_UNDEFINED),
+																		 VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_D16_UNORM,
+																		 makeComponentMappingRGBA(),
+																		 makeImageSubresourceRange(VK_IMAGE_ASPECT_DEPTH_BIT, 0u, 1u, 0u, 1u)))
+			, renderPass				(env, RenderPass::Parameters())
+		{}
+	};
+
+	static Move<VkFramebuffer> create (const Environment& env, const Resources& res, const Parameters&)
+	{
+		const VkImageView				attachments[]	=
+		{
+			*res.colorAttachment.object,
+			*res.depthStencilAttachment.object,
+		};
+		const VkFramebufferCreateInfo	framebufferInfo	=
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+			DE_NULL,
+			(VkFramebufferCreateFlags)0,
+			*res.renderPass.object,
+			(deUint32)DE_LENGTH_OF_ARRAY(attachments),
+			attachments,
+			256u,										// width
+			256u,										// height
+			1u											// layers
+		};
+
+		return createFramebuffer(env.vkd, env.device, &framebufferInfo, env.allocationCallbacks);
+	}
+};
+
+struct CommandPool
+{
+	typedef VkCommandPool Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		VkCommandPoolCreateFlags	flags;
+
+		Parameters (VkCommandPoolCreateFlags flags_)
+			: flags(flags_)
+		{}
+	};
+
+	struct Resources
+	{
+		Resources (const Environment&, const Parameters&) {}
+	};
+
+	static Move<VkCommandPool> create (const Environment& env, const Resources&, const Parameters& params)
+	{
+		const VkCommandPoolCreateInfo	cmdPoolInfo	=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+			DE_NULL,
+			params.flags,
+			env.queueFamilyIndex,
+		};
+
+		return createCommandPool(env.vkd, env.device, &cmdPoolInfo, env.allocationCallbacks);
+	}
+};
+
+struct CommandBuffer
+{
+	typedef VkCommandBuffer Type;
+
+	static deUint32 getMaxConcurrent (Context&)
+	{
+		return DEFAULT_MAX_CONCURRENT_OBJECTS;
+	}
+
+	struct Parameters
+	{
+		CommandPool::Parameters		commandPool;
+		VkCommandBufferLevel		level;
+
+		Parameters (const CommandPool::Parameters&	commandPool_,
+					VkCommandBufferLevel			level_)
+			: commandPool	(commandPool_)
+			, level			(level_)
+		{}
+	};
+
+	struct Resources
+	{
+		Dependency<CommandPool>	commandPool;
+
+		Resources (const Environment& env, const Parameters& params)
+			: commandPool(env, params.commandPool)
+		{}
+	};
+
+	static Move<VkCommandBuffer> create (const Environment& env, const Resources& res, const Parameters& params)
+	{
+		const VkCommandBufferAllocateInfo	cmdBufferInfo	=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+			DE_NULL,
+			*res.commandPool.object,
+			params.level,
+			1,							// bufferCount
+		};
+
+		return allocateCommandBuffer(env.vkd, env.device, &cmdBufferInfo);
+	}
+};
+
+// Test cases
+
+template<typename Object>
+tcu::TestStatus createSingleTest (Context& context, typename Object::Parameters params)
+{
+	const Environment					env	(context, 1u);
+	const typename Object::Resources	res	(env, params);
+
+	{
+		Unique<typename Object::Type>	obj	(Object::create(env, res, params));
+	}
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+template<typename Object>
+tcu::TestStatus createMultipleUniqueResourcesTest (Context& context, typename Object::Parameters params)
+{
+	const Environment					env		(context, 1u);
+	const typename Object::Resources	res0	(env, params);
+	const typename Object::Resources	res1	(env, params);
+	const typename Object::Resources	res2	(env, params);
+	const typename Object::Resources	res3	(env, params);
+
+	{
+		Unique<typename Object::Type>	obj0	(Object::create(env, res0, params));
+		Unique<typename Object::Type>	obj1	(Object::create(env, res1, params));
+		Unique<typename Object::Type>	obj2	(Object::create(env, res2, params));
+		Unique<typename Object::Type>	obj3	(Object::create(env, res3, params));
+	}
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+template<typename Object>
+tcu::TestStatus createMultipleSharedResourcesTest (Context& context, typename Object::Parameters params)
+{
+	const Environment					env	(context, 4u);
+	const typename Object::Resources	res	(env, params);
+
+	{
+		Unique<typename Object::Type>	obj0	(Object::create(env, res, params));
+		Unique<typename Object::Type>	obj1	(Object::create(env, res, params));
+		Unique<typename Object::Type>	obj2	(Object::create(env, res, params));
+		Unique<typename Object::Type>	obj3	(Object::create(env, res, params));
+	}
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+template<typename Object>
+tcu::TestStatus createMaxConcurrentTest (Context& context, typename Object::Parameters params)
+{
+	typedef Unique<typename Object::Type>	UniqueObject;
+	typedef SharedPtr<UniqueObject>			ObjectPtr;
+
+	const deUint32						numObjects	= Object::getMaxConcurrent(context);
+	const Environment					env			(context, numObjects);
+	const typename Object::Resources	res			(env, params);
+	vector<ObjectPtr>					objects		(numObjects);
+
+	context.getTestContext().getLog()
+		<< TestLog::Message << "Creating " << numObjects << " " << getTypeName<typename Object::Type>() << "s" << TestLog::EndMessage;
+
+	for (deUint32 ndx = 0; ndx < numObjects; ndx++)
+		objects[ndx] = ObjectPtr(new UniqueObject(Object::create(env, res, params)));
+
+	objects.clear();
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+template<typename Object>
+class CreateThread : public ThreadGroupThread
+{
+public:
+	CreateThread (const Environment& env, const typename Object::Resources& resources, const typename Object::Parameters& params)
+		: m_env			(env)
+		, m_resources	(resources)
+		, m_params		(params)
+	{}
+
+	void runThread (void)
+	{
+		const int	numIters			= 100;
+		const int	itersBetweenSyncs	= 20;
+
+		for (int iterNdx = 0; iterNdx < numIters; iterNdx++)
+		{
+			// Sync every Nth iteration to make entering driver at the same time more likely
+			if ((iterNdx % itersBetweenSyncs) == 0)
+				barrier();
+
+			{
+				Unique<typename Object::Type>	obj	(Object::create(m_env, m_resources, m_params));
+			}
+		}
+	}
+
+private:
+	const Environment&					m_env;
+	const typename Object::Resources&	m_resources;
+	const typename Object::Parameters&	m_params;
+};
+
+template<typename Object>
+tcu::TestStatus multithreadedCreateSharedResourcesTest (Context& context, typename Object::Parameters params)
+{
+	const deUint32						numThreads	= getDefaultTestThreadCount();
+	const Environment					env			(context, numThreads);
+	const typename Object::Resources	res			(env, params);
+	ThreadGroup							threads;
+
+	for (deUint32 ndx = 0; ndx < numThreads; ndx++)
+		threads.add(MovePtr<ThreadGroupThread>(new CreateThread<Object>(env, res, params)));
+
+	return threads.run();
+}
+
+template<typename Object>
+tcu::TestStatus multithreadedCreatePerThreadResourcesTest (Context& context, typename Object::Parameters params)
+{
+	typedef SharedPtr<typename Object::Resources>	ResPtr;
+
+	const deUint32		numThreads	= getDefaultTestThreadCount();
+	const Environment	env			(context, 1u);
+	vector<ResPtr>		resources	(numThreads);
+	ThreadGroup			threads;
+
+	for (deUint32 ndx = 0; ndx < numThreads; ndx++)
+	{
+		resources[ndx] = ResPtr(new typename Object::Resources(env, params));
+		threads.add(MovePtr<ThreadGroupThread>(new CreateThread<Object>(env, *resources[ndx], params)));
+	}
+
+	return threads.run();
+}
+
+struct EnvClone
+{
+	Device::Resources	deviceRes;
+	Unique<VkDevice>	device;
+	DeviceDriver		vkd;
+	Environment			env;
+
+	EnvClone (const Environment& parent, const Device::Parameters& deviceParams, deUint32 maxResourceConsumers)
+		: deviceRes	(parent, deviceParams)
+		, device	(Device::create(parent, deviceRes, deviceParams))
+		, vkd		(deviceRes.vki, *device)
+		, env		(parent.vkp, vkd, *device, deviceRes.queueFamilyIndex, parent.programBinaries, parent.allocationCallbacks, maxResourceConsumers)
+	{
+	}
+};
+
+Device::Parameters getDefaulDeviceParameters (Context& context)
+{
+	return Device::Parameters(context.getTestContext().getCommandLine().getVKDeviceId()-1u,
+							  VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT);
+}
+
+template<typename Object>
+tcu::TestStatus multithreadedCreatePerThreadDeviceTest (Context& context, typename Object::Parameters params)
+{
+	typedef SharedPtr<EnvClone>						EnvPtr;
+	typedef SharedPtr<typename Object::Resources>	ResPtr;
+
+	const deUint32				numThreads		= getDefaultTestThreadCount();
+	const Device::Parameters	deviceParams	= getDefaulDeviceParameters(context);
+	const Environment			sharedEnv		(context, numThreads);			// For creating Device's
+	vector<EnvPtr>				perThreadEnv	(numThreads);
+	vector<ResPtr>				resources		(numThreads);
+	ThreadGroup					threads;
+
+	for (deUint32 ndx = 0; ndx < numThreads; ndx++)
+	{
+		perThreadEnv[ndx]	= EnvPtr(new EnvClone(sharedEnv, deviceParams, 1u));
+		resources[ndx]		= ResPtr(new typename Object::Resources(perThreadEnv[ndx]->env, params));
+
+		threads.add(MovePtr<ThreadGroupThread>(new CreateThread<Object>(perThreadEnv[ndx]->env, *resources[ndx], params)));
+	}
+
+	return threads.run();
+}
+
+template<typename Object>
+tcu::TestStatus createSingleAllocCallbacksTest (Context& context, typename Object::Parameters params)
+{
+	const deUint32						noCmdScope		= (1u << VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE)
+														| (1u << VK_SYSTEM_ALLOCATION_SCOPE_DEVICE)
+														| (1u << VK_SYSTEM_ALLOCATION_SCOPE_CACHE)
+														| (1u << VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
+
+	// Callbacks used by resources
+	AllocationCallbackRecorder			resCallbacks	(getSystemAllocator(), 128);
+
+	// Root environment still uses default instance and device, created without callbacks
+	const Environment					rootEnv			(context.getPlatformInterface(),
+														 context.getDeviceInterface(),
+														 context.getDevice(),
+														 context.getUniversalQueueFamilyIndex(),
+														 context.getBinaryCollection(),
+														 resCallbacks.getCallbacks(),
+														 1u);
+
+	{
+		// Test env has instance & device created with callbacks
+		const EnvClone						resEnv		(rootEnv, getDefaulDeviceParameters(context), 1u);
+		const typename Object::Resources	res			(resEnv.env, params);
+
+		// Supply a separate callback recorder just for object construction
+		AllocationCallbackRecorder			objCallbacks(getSystemAllocator(), 128);
+		const Environment					objEnv		(resEnv.env.vkp,
+														 resEnv.env.vkd,
+														 resEnv.env.device,
+														 resEnv.env.queueFamilyIndex,
+														 resEnv.env.programBinaries,
+														 objCallbacks.getCallbacks(),
+														 resEnv.env.maxResourceConsumers);
+
+		{
+			Unique<typename Object::Type>	obj	(Object::create(objEnv, res, params));
+
+			// Validate that no command-level allocations are live
+			if (!validateAndLog(context.getTestContext().getLog(), objCallbacks, noCmdScope))
+				return tcu::TestStatus::fail("Invalid allocation callback");
+		}
+
+		// At this point all allocations made against object callbacks must have been freed
+		if (!validateAndLog(context.getTestContext().getLog(), objCallbacks, 0u))
+			return tcu::TestStatus::fail("Invalid allocation callback");
+	}
+
+	if (!validateAndLog(context.getTestContext().getLog(), resCallbacks, 0u))
+		return tcu::TestStatus::fail("Invalid allocation callback");
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+template<typename Object>
+tcu::TestStatus allocCallbackFailTest (Context& context, typename Object::Parameters params)
+{
+	AllocationCallbackRecorder			resCallbacks	(getSystemAllocator(), 128);
+	const Environment					rootEnv			(context.getPlatformInterface(),
+														 context.getDeviceInterface(),
+														 context.getDevice(),
+														 context.getUniversalQueueFamilyIndex(),
+														 context.getBinaryCollection(),
+														 resCallbacks.getCallbacks(),
+														 1u);
+
+	{
+		const EnvClone						resEnv				(rootEnv, getDefaulDeviceParameters(context), 1u);
+		const typename Object::Resources	res					(resEnv.env, params);
+		deUint32							numPassingAllocs	= 0;
+		const deUint32						maxTries			= 1u<<10;
+
+		// Iterate over test until object allocation succeeds
+		for (; numPassingAllocs < maxTries; ++numPassingAllocs)
+		{
+			DeterministicFailAllocator			objAllocator(getSystemAllocator(), numPassingAllocs);
+			AllocationCallbackRecorder			recorder	(objAllocator.getCallbacks(), 128);
+			const Environment					objEnv		(resEnv.env.vkp,
+															 resEnv.env.vkd,
+															 resEnv.env.device,
+															 resEnv.env.queueFamilyIndex,
+															 resEnv.env.programBinaries,
+															 recorder.getCallbacks(),
+															 resEnv.env.maxResourceConsumers);
+			bool								createOk	= false;
+
+			context.getTestContext().getLog()
+				<< TestLog::Message
+				<< "Trying to create object with " << numPassingAllocs << " allocation" << (numPassingAllocs != 1 ? "s" : "") << " passing"
+				<< TestLog::EndMessage;
+
+			try
+			{
+				Unique<typename Object::Type>	obj	(Object::create(objEnv, res, params));
+				createOk = true;
+			}
+			catch (const vk::OutOfMemoryError& e)
+			{
+				if (e.getError() != VK_ERROR_OUT_OF_HOST_MEMORY)
+				{
+					context.getTestContext().getLog() << e;
+					return tcu::TestStatus::fail("Got invalid error code");
+				}
+			}
+
+			if (!validateAndLog(context.getTestContext().getLog(), recorder, 0u))
+				return tcu::TestStatus::fail("Invalid allocation callback");
+
+			if (createOk)
+			{
+				context.getTestContext().getLog()
+					<< TestLog::Message << "Object construction succeeded! " << TestLog::EndMessage;
+				break;
+			}
+		}
+	}
+
+	if (!validateAndLog(context.getTestContext().getLog(), resCallbacks, 0u))
+		return tcu::TestStatus::fail("Invalid allocation callback");
+
+	return tcu::TestStatus::pass("Ok");
+}
+
+// Utilities for creating groups
+
+template<typename Object>
+struct NamedParameters
+{
+	const char*						name;
+	typename Object::Parameters		parameters;
+};
+
+template<typename Object>
+struct CaseDescription
+{
+	typename FunctionInstance1<typename Object::Parameters>::Function	function;
+	const NamedParameters<Object>*										paramsBegin;
+	const NamedParameters<Object>*										paramsEnd;
+};
+
+#define EMPTY_CASE_DESC(OBJECT)	\
+	{ (FunctionInstance1<OBJECT::Parameters>::Function)DE_NULL, DE_NULL, DE_NULL }
+
+#define CASE_DESC(FUNCTION, CASES)	\
+	{ FUNCTION, DE_ARRAY_BEGIN(CASES), DE_ARRAY_END(CASES)	}
+
+struct CaseDescriptions
+{
+	CaseDescription<Instance>				instance;
+	CaseDescription<Device>					device;
+	CaseDescription<DeviceMemory>			deviceMemory;
+	CaseDescription<Buffer>					buffer;
+	CaseDescription<BufferView>				bufferView;
+	CaseDescription<Image>					image;
+	CaseDescription<ImageView>				imageView;
+	CaseDescription<Semaphore>				semaphore;
+	CaseDescription<Event>					event;
+	CaseDescription<Fence>					fence;
+	CaseDescription<QueryPool>				queryPool;
+	CaseDescription<ShaderModule>			shaderModule;
+	CaseDescription<PipelineCache>			pipelineCache;
+	CaseDescription<PipelineLayout>			pipelineLayout;
+	CaseDescription<RenderPass>				renderPass;
+	CaseDescription<GraphicsPipeline>		graphicsPipeline;
+	CaseDescription<ComputePipeline>		computePipeline;
+	CaseDescription<DescriptorSetLayout>	descriptorSetLayout;
+	CaseDescription<Sampler>				sampler;
+	CaseDescription<DescriptorPool>			descriptorPool;
+	CaseDescription<DescriptorSet>			descriptorSet;
+	CaseDescription<Framebuffer>			framebuffer;
+	CaseDescription<CommandPool>			commandPool;
+	CaseDescription<CommandBuffer>			commandBuffer;
+};
+
+template<typename Object>
+void addCases (const MovePtr<tcu::TestCaseGroup>& group, const CaseDescription<Object>& cases)
+{
+	for (const NamedParameters<Object>* cur = cases.paramsBegin; cur != cases.paramsEnd; ++cur)
+		addFunctionCase(group.get(), cur->name, "", cases.function, cur->parameters);
+}
+
+template<typename Object>
+void addCasesWithProgs (const MovePtr<tcu::TestCaseGroup>& group, const CaseDescription<Object>& cases)
+{
+	for (const NamedParameters<Object>* cur = cases.paramsBegin; cur != cases.paramsEnd; ++cur)
+		addFunctionCaseWithPrograms(group.get(), cur->name, "", Object::initPrograms, cases.function, cur->parameters);
+}
+
+tcu::TestCaseGroup* createGroup (tcu::TestContext& testCtx, const char* name, const char* desc, const CaseDescriptions& cases)
+{
+	MovePtr<tcu::TestCaseGroup>	group	(new tcu::TestCaseGroup(testCtx, name, desc));
+
+	addCases			(group, cases.instance);
+	addCases			(group, cases.device);
+	addCases			(group, cases.deviceMemory);
+	addCases			(group, cases.buffer);
+	addCases			(group, cases.bufferView);
+	addCases			(group, cases.image);
+	addCases			(group, cases.imageView);
+	addCases			(group, cases.semaphore);
+	addCases			(group, cases.event);
+	addCases			(group, cases.fence);
+	addCases			(group, cases.queryPool);
+	addCases			(group, cases.sampler);
+	addCasesWithProgs	(group, cases.shaderModule);
+	addCases			(group, cases.pipelineCache);
+	addCases			(group, cases.pipelineLayout);
+	addCases			(group, cases.renderPass);
+	addCasesWithProgs	(group, cases.graphicsPipeline);
+	addCasesWithProgs	(group, cases.computePipeline);
+	addCases			(group, cases.descriptorSetLayout);
+	addCases			(group, cases.descriptorPool);
+	addCases			(group, cases.descriptorSet);
+	addCases			(group, cases.framebuffer);
+	addCases			(group, cases.commandPool);
+	addCases			(group, cases.commandBuffer);
+
+	return group.release();
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createObjectManagementTests (tcu::TestContext& testCtx)
+{
+	MovePtr<tcu::TestCaseGroup>	objectMgmtTests	(new tcu::TestCaseGroup(testCtx, "object_management", "Object management tests"));
+
+	const Image::Parameters		img1D			(0u, VK_IMAGE_TYPE_1D, VK_FORMAT_R8G8B8A8_UNORM, makeExtent3D(256,   1, 1), 1u,  4u, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_LAYOUT_UNDEFINED);
+	const Image::Parameters		img2D			(0u, VK_IMAGE_TYPE_2D, VK_FORMAT_R8G8B8A8_UNORM, makeExtent3D( 64,  64, 1), 1u, 12u, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_SAMPLED_BIT|VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_LAYOUT_UNDEFINED);
+	const Image::Parameters		imgCube			(VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT, VK_IMAGE_TYPE_2D, VK_FORMAT_R8G8B8A8_UNORM, makeExtent3D( 64,  64, 1), 1u, 12u, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_SAMPLED_BIT|VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_LAYOUT_UNDEFINED);
+	const Image::Parameters		img3D			(0u, VK_IMAGE_TYPE_3D, VK_FORMAT_R8G8B8A8_UNORM, makeExtent3D( 64,  64, 4), 1u,  1u, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_LAYOUT_UNDEFINED);
+	const ImageView::Parameters	imgView1D		(img1D, VK_IMAGE_VIEW_TYPE_1D,			img1D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u));
+	const ImageView::Parameters	imgView1DArr	(img1D, VK_IMAGE_VIEW_TYPE_1D_ARRAY,	img1D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 4u));
+	const ImageView::Parameters	imgView2D		(img2D, VK_IMAGE_VIEW_TYPE_2D,			img2D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u));
+	const ImageView::Parameters	imgView2DArr	(img2D, VK_IMAGE_VIEW_TYPE_2D_ARRAY,	img2D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 8u));
+	const ImageView::Parameters	imgViewCube		(imgCube, VK_IMAGE_VIEW_TYPE_CUBE,		img2D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 6u));
+	const ImageView::Parameters	imgViewCubeArr	(imgCube, VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	img2D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 12u));
+	const ImageView::Parameters	imgView3D		(img3D, VK_IMAGE_VIEW_TYPE_3D,			img3D.format, makeComponentMappingRGBA(), makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u));
+
+	const DescriptorSetLayout::Parameters	singleUboDescLayout	= DescriptorSetLayout::Parameters::single(0u, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1u, VK_SHADER_STAGE_VERTEX_BIT);
+
+	static NamedParameters<Instance>				s_instanceCases[]			=
+	{
+		{ "instance",					Instance::Parameters() },
+	};
+	// \note Device index may change - must not be static
+	const NamedParameters<Device>					s_deviceCases[]				=
+	{
+		{ "device",						Device::Parameters(testCtx.getCommandLine().getVKDeviceId()-1u, VK_QUEUE_GRAPHICS_BIT)	},
+	};
+	static const NamedParameters<DeviceMemory>			s_deviceMemCases[]				=
+	{
+		{ "device_memory_small",		DeviceMemory::Parameters(1024, 0u)	},
+	};
+	static const NamedParameters<Buffer>				s_bufferCases[]					=
+	{
+		{ "buffer_uniform_small",		Buffer::Parameters(1024u,			VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT),	},
+		{ "buffer_uniform_large",		Buffer::Parameters(1024u*1024u*16u,	VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT),	},
+		{ "buffer_storage_small",		Buffer::Parameters(1024u,			VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),	},
+		{ "buffer_storage_large",		Buffer::Parameters(1024u*1024u*16u,	VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),	},
+	};
+	static const NamedParameters<BufferView>			s_bufferViewCases[]				=
+	{
+		{ "buffer_view_uniform_r8g8b8a8_unorm",	BufferView::Parameters(Buffer::Parameters(8192u, VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT), VK_FORMAT_R8G8B8A8_UNORM, 0u, 4096u)	},
+		{ "buffer_view_storage_r8g8b8a8_unorm",	BufferView::Parameters(Buffer::Parameters(8192u, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT), VK_FORMAT_R8G8B8A8_UNORM, 0u, 4096u)	},
+	};
+	static const NamedParameters<Image>					s_imageCases[]					=
+	{
+		{ "image_1d",					img1D		},
+		{ "image_2d",					img2D		},
+		{ "image_3d",					img3D		},
+	};
+	static const NamedParameters<ImageView>				s_imageViewCases[]				=
+	{
+		{ "image_view_1d",				imgView1D		},
+		{ "image_view_1d_arr",			imgView1DArr	},
+		{ "image_view_2d",				imgView2D		},
+		{ "image_view_2d_arr",			imgView2DArr	},
+		{ "image_view_cube",			imgViewCube		},
+		{ "image_view_cube_arr",		imgViewCubeArr	},
+		{ "image_view_3d",				imgView3D		},
+	};
+	static const NamedParameters<Semaphore>				s_semaphoreCases[]				=
+	{
+		{ "semaphore",					Semaphore::Parameters(0u),	}
+	};
+	static const NamedParameters<Event>					s_eventCases[]					=
+	{
+		{ "event",						Event::Parameters(0u)		}
+	};
+	static const NamedParameters<Fence>					s_fenceCases[]					=
+	{
+		{ "fence",						Fence::Parameters(0u)								},
+		{ "fence_signaled",				Fence::Parameters(VK_FENCE_CREATE_SIGNALED_BIT)		}
+	};
+	static const NamedParameters<QueryPool>				s_queryPoolCases[]				=
+	{
+		{ "query_pool",					QueryPool::Parameters(VK_QUERY_TYPE_OCCLUSION, 1u, 0u)	}
+	};
+	static const NamedParameters<ShaderModule>			s_shaderModuleCases[]			=
+	{
+		{ "shader_module",				ShaderModule::Parameters(VK_SHADER_STAGE_COMPUTE_BIT, "test")	}
+	};
+	static const NamedParameters<PipelineCache>			s_pipelineCacheCases[]			=
+	{
+		{ "pipeline_cache",				PipelineCache::Parameters()		}
+	};
+	static const NamedParameters<PipelineLayout>		s_pipelineLayoutCases[]			=
+	{
+		{ "pipeline_layout_empty",		PipelineLayout::Parameters::empty()										},
+		{ "pipeline_layout_single",		PipelineLayout::Parameters::singleDescriptorSet(singleUboDescLayout)	}
+	};
+	static const NamedParameters<RenderPass>			s_renderPassCases[]				=
+	{
+		{ "render_pass",				RenderPass::Parameters()		}
+	};
+	static const NamedParameters<GraphicsPipeline>		s_graphicsPipelineCases[]		=
+	{
+		{ "graphics_pipeline",			GraphicsPipeline::Parameters()	}
+	};
+	static const NamedParameters<ComputePipeline>		s_computePipelineCases[]		=
+	{
+		{ "compute_pipeline",			ComputePipeline::Parameters()	}
+	};
+	static const NamedParameters<DescriptorSetLayout>	s_descriptorSetLayoutCases[]	=
+	{
+		{ "descriptor_set_layout_empty",	DescriptorSetLayout::Parameters::empty()	},
+		{ "descriptor_set_layout_single",	singleUboDescLayout							}
+	};
+	static const NamedParameters<Sampler>				s_samplerCases[]				=
+	{
+		{ "sampler",					Sampler::Parameters()	}
+	};
+	static const NamedParameters<DescriptorPool>		s_descriptorPoolCases[]			=
+	{
+		{ "descriptor_pool",						DescriptorPool::Parameters::singleType((VkDescriptorPoolCreateFlags)0,						4u, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3u)	},
+		{ "descriptor_pool_free_descriptor_set",	DescriptorPool::Parameters::singleType(VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,	4u, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3u)	}
+	};
+	static const NamedParameters<DescriptorSet>			s_descriptorSetCases[]			=
+	{
+		{ "descriptor_set",				DescriptorSet::Parameters(singleUboDescLayout)	}
+	};
+	static const NamedParameters<Framebuffer>			s_framebufferCases[]			=
+	{
+		{ "framebuffer",				Framebuffer::Parameters()	}
+	};
+	static const NamedParameters<CommandPool>			s_commandPoolCases[]			=
+	{
+		{ "command_pool",				CommandPool::Parameters((VkCommandPoolCreateFlags)0)			},
+		{ "command_pool_transient",		CommandPool::Parameters(VK_COMMAND_POOL_CREATE_TRANSIENT_BIT)	}
+	};
+	static const NamedParameters<CommandBuffer>			s_commandBufferCases[]			=
+	{
+		{ "command_buffer_primary",		CommandBuffer::Parameters(CommandPool::Parameters((VkCommandPoolCreateFlags)0u), VK_COMMAND_BUFFER_LEVEL_PRIMARY)	},
+		{ "command_buffer_secondary",	CommandBuffer::Parameters(CommandPool::Parameters((VkCommandPoolCreateFlags)0u), VK_COMMAND_BUFFER_LEVEL_SECONDARY)	}
+	};
+
+	static const CaseDescriptions	s_createSingleGroup	=
+	{
+		CASE_DESC(createSingleTest	<Instance>,					s_instanceCases),
+		CASE_DESC(createSingleTest	<Device>,					s_deviceCases),
+		CASE_DESC(createSingleTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(createSingleTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(createSingleTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(createSingleTest	<Image>,					s_imageCases),
+		CASE_DESC(createSingleTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(createSingleTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(createSingleTest	<Event>,					s_eventCases),
+		CASE_DESC(createSingleTest	<Fence>,					s_fenceCases),
+		CASE_DESC(createSingleTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(createSingleTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(createSingleTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(createSingleTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(createSingleTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(createSingleTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(createSingleTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(createSingleTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(createSingleTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(createSingleTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(createSingleTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(createSingleTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(createSingleTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(createSingleTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "single", "Create single object", s_createSingleGroup));
+
+	static const CaseDescriptions	s_createMultipleUniqueResourcesGroup	=
+	{
+		CASE_DESC(createMultipleUniqueResourcesTest	<Instance>,					s_instanceCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Device>,					s_deviceCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Image>,					s_imageCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Event>,					s_eventCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Fence>,					s_fenceCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(createMultipleUniqueResourcesTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "multiple_unique_resources", "Multiple objects with per-object unique resources", s_createMultipleUniqueResourcesGroup));
+
+	static const CaseDescriptions	s_createMultipleSharedResourcesGroup	=
+	{
+		EMPTY_CASE_DESC(Instance), // No resources used
+		CASE_DESC(createMultipleSharedResourcesTest	<Device>,					s_deviceCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Image>,					s_imageCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Event>,					s_eventCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Fence>,					s_fenceCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(createMultipleSharedResourcesTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "multiple_shared_resources", "Multiple objects with shared resources", s_createMultipleSharedResourcesGroup));
+
+	static const CaseDescriptions	s_createMaxConcurrentGroup	=
+	{
+		CASE_DESC(createMaxConcurrentTest	<Instance>,					s_instanceCases),
+		CASE_DESC(createMaxConcurrentTest	<Device>,					s_deviceCases),
+		CASE_DESC(createMaxConcurrentTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(createMaxConcurrentTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(createMaxConcurrentTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(createMaxConcurrentTest	<Image>,					s_imageCases),
+		CASE_DESC(createMaxConcurrentTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(createMaxConcurrentTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(createMaxConcurrentTest	<Event>,					s_eventCases),
+		CASE_DESC(createMaxConcurrentTest	<Fence>,					s_fenceCases),
+		CASE_DESC(createMaxConcurrentTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(createMaxConcurrentTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(createMaxConcurrentTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(createMaxConcurrentTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(createMaxConcurrentTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(createMaxConcurrentTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(createMaxConcurrentTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(createMaxConcurrentTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(createMaxConcurrentTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(createMaxConcurrentTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(createMaxConcurrentTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(createMaxConcurrentTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(createMaxConcurrentTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(createMaxConcurrentTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "max_concurrent", "Maximum number of concurrently live objects", s_createMaxConcurrentGroup));
+
+	static const CaseDescriptions	s_multithreadedCreatePerThreadDeviceGroup	=
+	{
+		EMPTY_CASE_DESC(Instance),	// Does not make sense
+		EMPTY_CASE_DESC(Device),	// Does not make sense
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Image>,					s_imageCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Event>,					s_eventCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Fence>,					s_fenceCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadDeviceTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "multithreaded_per_thread_device", "Multithreaded object construction with per-thread device ", s_multithreadedCreatePerThreadDeviceGroup));
+
+	static const CaseDescriptions	s_multithreadedCreatePerThreadResourcesGroup	=
+	{
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Instance>,					s_instanceCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Device>,					s_deviceCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Image>,					s_imageCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Event>,					s_eventCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Fence>,					s_fenceCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(multithreadedCreatePerThreadResourcesTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "multithreaded_per_thread_resources", "Multithreaded object construction with per-thread resources", s_multithreadedCreatePerThreadResourcesGroup));
+
+	static const CaseDescriptions	s_multithreadedCreateSharedResourcesGroup	=
+	{
+		EMPTY_CASE_DESC(Instance),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Device>,					s_deviceCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Image>,					s_imageCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Event>,					s_eventCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Fence>,					s_fenceCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<DescriptorPool>,			s_descriptorPoolCases),
+		EMPTY_CASE_DESC(DescriptorSet),		// \note Needs per-thread DescriptorPool
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(multithreadedCreateSharedResourcesTest	<CommandPool>,				s_commandPoolCases),
+		EMPTY_CASE_DESC(CommandBuffer),			// \note Needs per-thread CommandPool
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "multithreaded_shared_resources", "Multithreaded object construction with shared resources", s_multithreadedCreateSharedResourcesGroup));
+
+	static const CaseDescriptions	s_createSingleAllocCallbacksGroup	=
+	{
+		CASE_DESC(createSingleAllocCallbacksTest	<Instance>,					s_instanceCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Device>,					s_deviceCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Image>,					s_imageCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Event>,					s_eventCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Fence>,					s_fenceCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(createSingleAllocCallbacksTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "single_alloc_callbacks", "Create single object", s_createSingleAllocCallbacksGroup));
+
+	static const CaseDescriptions	s_allocCallbackFailGroup	=
+	{
+		CASE_DESC(allocCallbackFailTest	<Instance>,					s_instanceCases),
+		CASE_DESC(allocCallbackFailTest	<Device>,					s_deviceCases),
+		CASE_DESC(allocCallbackFailTest	<DeviceMemory>,				s_deviceMemCases),
+		CASE_DESC(allocCallbackFailTest	<Buffer>,					s_bufferCases),
+		CASE_DESC(allocCallbackFailTest	<BufferView>,				s_bufferViewCases),
+		CASE_DESC(allocCallbackFailTest	<Image>,					s_imageCases),
+		CASE_DESC(allocCallbackFailTest	<ImageView>,				s_imageViewCases),
+		CASE_DESC(allocCallbackFailTest	<Semaphore>,				s_semaphoreCases),
+		CASE_DESC(allocCallbackFailTest	<Event>,					s_eventCases),
+		CASE_DESC(allocCallbackFailTest	<Fence>,					s_fenceCases),
+		CASE_DESC(allocCallbackFailTest	<QueryPool>,				s_queryPoolCases),
+		CASE_DESC(allocCallbackFailTest	<ShaderModule>,				s_shaderModuleCases),
+		CASE_DESC(allocCallbackFailTest	<PipelineCache>,			s_pipelineCacheCases),
+		CASE_DESC(allocCallbackFailTest	<PipelineLayout>,			s_pipelineLayoutCases),
+		CASE_DESC(allocCallbackFailTest	<RenderPass>,				s_renderPassCases),
+		CASE_DESC(allocCallbackFailTest	<GraphicsPipeline>,			s_graphicsPipelineCases),
+		CASE_DESC(allocCallbackFailTest	<ComputePipeline>,			s_computePipelineCases),
+		CASE_DESC(allocCallbackFailTest	<DescriptorSetLayout>,		s_descriptorSetLayoutCases),
+		CASE_DESC(allocCallbackFailTest	<Sampler>,					s_samplerCases),
+		CASE_DESC(allocCallbackFailTest	<DescriptorPool>,			s_descriptorPoolCases),
+		CASE_DESC(allocCallbackFailTest	<DescriptorSet>,			s_descriptorSetCases),
+		CASE_DESC(allocCallbackFailTest	<Framebuffer>,				s_framebufferCases),
+		CASE_DESC(allocCallbackFailTest	<CommandPool>,				s_commandPoolCases),
+		CASE_DESC(allocCallbackFailTest	<CommandBuffer>,			s_commandBufferCases),
+	};
+	objectMgmtTests->addChild(createGroup(testCtx, "alloc_callback_fail", "Allocation callback failure", s_allocCallbackFailGroup));
+
+	return objectMgmtTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.hpp
new file mode 100644
index 0000000..c3bfade
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiObjectManagementTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTAPIOBJECTMANAGEMENTTESTS_HPP
+#define _VKTAPIOBJECTMANAGEMENTTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Object management tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup*		createObjectManagementTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPIOBJECTMANAGEMENTTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.cpp
new file mode 100644
index 0000000..a694381
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.cpp
@@ -0,0 +1,810 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple Smoke Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkPlatform.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+
+#include "deUniquePtr.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+namespace
+{
+
+using namespace vk;
+using std::vector;
+using tcu::TestLog;
+using de::UniquePtr;
+
+tcu::TestStatus createSamplerTest (Context& context)
+{
+	const VkDevice			vkDevice	= context.getDevice();
+	const DeviceInterface&	vk			= context.getDeviceInterface();
+
+	{
+		const struct VkSamplerCreateInfo		samplerInfo	=
+		{
+			VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,		// sType
+			DE_NULL,									// pNext
+			0u,											// flags
+			VK_FILTER_NEAREST,							// magFilter
+			VK_FILTER_NEAREST,							// minFilter
+			VK_SAMPLER_MIPMAP_MODE_NEAREST,				// mipmapMode
+			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		// addressModeU
+			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		// addressModeV
+			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		// addressModeW
+			0.0f,										// mipLodBias
+			VK_FALSE,									// anisotropyEnable
+			1.0f,										// maxAnisotropy
+			DE_FALSE,									// compareEnable
+			VK_COMPARE_OP_ALWAYS,						// compareOp
+			0.0f,										// minLod
+			0.0f,										// maxLod
+			VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,	// borderColor
+			VK_FALSE,									// unnormalizedCoords
+		};
+
+		Move<VkSampler>			tmpSampler	= createSampler(vk, vkDevice, &samplerInfo);
+		Move<VkSampler>			tmp2Sampler;
+
+		tmp2Sampler = tmpSampler;
+
+		const Unique<VkSampler>	sampler		(tmp2Sampler);
+	}
+
+	return tcu::TestStatus::pass("Creating sampler succeeded");
+}
+
+void createShaderProgs (SourceCollections& dst)
+{
+	dst.glslSources.add("test") << glu::VertexSource(
+		"#version 300 es\n"
+		"in highp vec4 a_position;\n"
+		"void main (void) { gl_Position = a_position; }\n");
+}
+
+tcu::TestStatus createShaderModuleTest (Context& context)
+{
+	const VkDevice					vkDevice	= context.getDevice();
+	const DeviceInterface&			vk			= context.getDeviceInterface();
+	const Unique<VkShaderModule>	shader		(createShaderModule(vk, vkDevice, context.getBinaryCollection().get("test"), 0));
+
+	return tcu::TestStatus::pass("Creating shader module succeeded");
+}
+
+void createTriangleAsmProgs (SourceCollections& dst)
+{
+	dst.spirvAsmSources.add("vert") <<
+		"		 OpCapability Shader\n"
+		"%1 =	 OpExtInstImport \"GLSL.std.450\"\n"
+		"		 OpMemoryModel Logical GLSL450\n"
+		"		 OpEntryPoint Vertex %4 \"main\" %10 %12 %16 %17\n"
+		"		 OpSource ESSL 300\n"
+		"		 OpName %4 \"main\"\n"
+		"		 OpName %10 \"gl_Position\"\n"
+		"		 OpName %12 \"a_position\"\n"
+		"		 OpName %16 \"gl_VertexIndex\"\n"
+		"		 OpName %17 \"gl_InstanceIndex\"\n"
+		"		 OpDecorate %10 BuiltIn Position\n"
+		"		 OpDecorate %12 Location 0\n"
+		"		 OpDecorate %16 BuiltIn VertexIndex\n"
+		"		 OpDecorate %17 BuiltIn InstanceIndex\n"
+		"%2 =	 OpTypeVoid\n"
+		"%3 =	 OpTypeFunction %2\n"
+		"%7 =	 OpTypeFloat 32\n"
+		"%8 =	 OpTypeVector %7 4\n"
+		"%9 =	 OpTypePointer Output %8\n"
+		"%10 =	 OpVariable %9 Output\n"
+		"%11 =	 OpTypePointer Input %8\n"
+		"%12 =	 OpVariable %11 Input\n"
+		"%14 =	 OpTypeInt 32 1\n"
+		"%15 =	 OpTypePointer Input %14\n"
+		"%16 =	 OpVariable %15 Input\n"
+		"%17 =	 OpVariable %15 Input\n"
+		"%4 =	 OpFunction %2 None %3\n"
+		"%5 =	 OpLabel\n"
+		"%13 =	 OpLoad %8 %12\n"
+		"		 OpStore %10 %13\n"
+		"		 OpBranch %6\n"
+		"%6 =	 OpLabel\n"
+		"		 OpReturn\n"
+		"		 OpFunctionEnd\n";
+	dst.spirvAsmSources.add("frag") <<
+		"		OpCapability Shader\n"
+		"%1 =	OpExtInstImport \"GLSL.std.450\"\n"
+		"		OpMemoryModel Logical GLSL450\n"
+		"		OpEntryPoint Fragment %4 \"main\" %10\n"
+		"		OpExecutionMode %4 OriginLowerLeft\n"
+		"		OpSource ESSL 300\n"
+		"		OpName %4 \"main\"\n"
+		"		OpName %10 \"o_color\"\n"
+		"		OpDecorate %10 RelaxedPrecision\n"
+		"		OpDecorate %10 Location 0\n"
+		"%2 =	OpTypeVoid\n"
+		"%3 =	OpTypeFunction %2\n"
+		"%7 =	OpTypeFloat 32\n"
+		"%8 =	OpTypeVector %7 4\n"
+		"%9 =	OpTypePointer Output %8\n"
+		"%10 =	OpVariable %9 Output\n"
+		"%11 =	OpConstant %7 1065353216\n"
+		"%12 =	OpConstant %7 0\n"
+		"%13 =	OpConstantComposite %8 %11 %12 %11 %11\n"
+		"%4 =	OpFunction %2 None %3\n"
+		"%5 =	OpLabel\n"
+		"		OpStore %10 %13\n"
+		"		OpBranch %6\n"
+		"%6 =	OpLabel\n"
+		"		OpReturn\n"
+		"		OpFunctionEnd\n";
+}
+
+void createTriangleProgs (SourceCollections& dst)
+{
+	dst.glslSources.add("vert") << glu::VertexSource(
+		"#version 300 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"void main (void) { gl_Position = a_position; }\n");
+	dst.glslSources.add("frag") << glu::FragmentSource(
+		"#version 300 es\n"
+		"layout(location = 0) out lowp vec4 o_color;\n"
+		"void main (void) { o_color = vec4(1.0, 0.0, 1.0, 1.0); }\n");
+}
+
+tcu::TestStatus renderTriangleTest (Context& context)
+{
+	const VkDevice							vkDevice				= context.getDevice();
+	const DeviceInterface&					vk						= context.getDeviceInterface();
+	const VkQueue							queue					= context.getUniversalQueue();
+	const deUint32							queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator							memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const tcu::IVec2						renderSize				(256, 256);
+
+	const tcu::Vec4							vertices[]				=
+	{
+		tcu::Vec4(-0.5f, -0.5f, 0.0f, 1.0f),
+		tcu::Vec4(+0.5f, -0.5f, 0.0f, 1.0f),
+		tcu::Vec4( 0.0f, +0.5f, 0.0f, 1.0f)
+	};
+
+	const VkBufferCreateInfo				vertexBufferParams		=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// sType
+		DE_NULL,								// pNext
+		0u,										// flags
+		(VkDeviceSize)sizeof(vertices),			// size
+		VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,		// usage
+		VK_SHARING_MODE_EXCLUSIVE,				// sharingMode
+		1u,										// queueFamilyIndexCount
+		&queueFamilyIndex,						// pQueueFamilyIndices
+	};
+	const Unique<VkBuffer>					vertexBuffer			(createBuffer(vk, vkDevice, &vertexBufferParams));
+	const UniquePtr<Allocation>				vertexBufferMemory		(memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *vertexBuffer), MemoryRequirement::HostVisible));
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *vertexBuffer, vertexBufferMemory->getMemory(), vertexBufferMemory->getOffset()));
+
+	const VkDeviceSize						imageSizeBytes			= (VkDeviceSize)(sizeof(deUint32)*renderSize.x()*renderSize.y());
+	const VkBufferCreateInfo				readImageBufferParams	=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// sType
+		DE_NULL,									// pNext
+		(VkBufferCreateFlags)0u,					// flags
+		imageSizeBytes,								// size
+		VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// usage
+		VK_SHARING_MODE_EXCLUSIVE,					// sharingMode
+		1u,											// queueFamilyIndexCount
+		&queueFamilyIndex,							// pQueueFamilyIndices
+	};
+	const Unique<VkBuffer>					readImageBuffer			(createBuffer(vk, vkDevice, &readImageBufferParams));
+	const UniquePtr<Allocation>				readImageBufferMemory	(memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *readImageBuffer), MemoryRequirement::HostVisible));
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *readImageBuffer, readImageBufferMemory->getMemory(), readImageBufferMemory->getOffset()));
+
+	const VkImageCreateInfo					imageParams				=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,									// sType
+		DE_NULL,																// pNext
+		0u,																		// flags
+		VK_IMAGE_TYPE_2D,														// imageType
+		VK_FORMAT_R8G8B8A8_UNORM,												// format
+		{ (deUint32)renderSize.x(), (deUint32)renderSize.y(), 1 },				// extent
+		1u,																		// mipLevels
+		1u,																		// arraySize
+		VK_SAMPLE_COUNT_1_BIT,													// samples
+		VK_IMAGE_TILING_OPTIMAL,												// tiling
+		VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_TRANSFER_SRC_BIT,	// usage
+		VK_SHARING_MODE_EXCLUSIVE,												// sharingMode
+		1u,																		// queueFamilyIndexCount
+		&queueFamilyIndex,														// pQueueFamilyIndices
+		VK_IMAGE_LAYOUT_UNDEFINED,												// initialLayout
+	};
+
+	const Unique<VkImage>					image					(createImage(vk, vkDevice, &imageParams));
+	const UniquePtr<Allocation>				imageMemory				(memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *image), MemoryRequirement::Any));
+
+	VK_CHECK(vk.bindImageMemory(vkDevice, *image, imageMemory->getMemory(), imageMemory->getOffset()));
+
+	const VkAttachmentDescription			colorAttDesc			=
+	{
+		0u,												// flags
+		VK_FORMAT_R8G8B8A8_UNORM,						// format
+		VK_SAMPLE_COUNT_1_BIT,							// samples
+		VK_ATTACHMENT_LOAD_OP_CLEAR,					// loadOp
+		VK_ATTACHMENT_STORE_OP_STORE,					// storeOp
+		VK_ATTACHMENT_LOAD_OP_DONT_CARE,				// stencilLoadOp
+		VK_ATTACHMENT_STORE_OP_DONT_CARE,				// stencilStoreOp
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		// initialLayout
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		// finalLayout
+	};
+	const VkAttachmentReference				colorAttRef				=
+	{
+		0u,												// attachment
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		// layout
+	};
+	const VkSubpassDescription				subpassDesc				=
+	{
+		(VkSubpassDescriptionFlags)0u,					// flags
+		VK_PIPELINE_BIND_POINT_GRAPHICS,				// pipelineBindPoint
+		0u,												// inputAttachmentCount
+		DE_NULL,										// pInputAttachments
+		1u,												// colorAttachmentCount
+		&colorAttRef,									// pColorAttachments
+		DE_NULL,										// pResolveAttachments
+		DE_NULL,										// depthStencilAttachment
+		0u,												// preserveAttachmentCount
+		DE_NULL,										// pPreserveAttachments
+	};
+	const VkRenderPassCreateInfo			renderPassParams		=
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,		// sType
+		DE_NULL,										// pNext
+		0u,												// flags
+		1u,												// attachmentCount
+		&colorAttDesc,									// pAttachments
+		1u,												// subpassCount
+		&subpassDesc,									// pSubpasses
+		0u,												// dependencyCount
+		DE_NULL,										// pDependencies
+	};
+	const Unique<VkRenderPass>				renderPass				(createRenderPass(vk, vkDevice, &renderPassParams));
+
+	const VkImageViewCreateInfo				colorAttViewParams		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// sType
+		DE_NULL,										// pNext
+		0u,												// flags
+		*image,											// image
+		VK_IMAGE_VIEW_TYPE_2D,							// viewType
+		VK_FORMAT_R8G8B8A8_UNORM,						// format
+		{
+			VK_COMPONENT_SWIZZLE_R,
+			VK_COMPONENT_SWIZZLE_G,
+			VK_COMPONENT_SWIZZLE_B,
+			VK_COMPONENT_SWIZZLE_A
+		},												// components
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,						// aspectMask
+			0u,												// baseMipLevel
+			1u,												// levelCount
+			0u,												// baseArrayLayer
+			1u,												// layerCount
+		},												// subresourceRange
+	};
+	const Unique<VkImageView>				colorAttView			(createImageView(vk, vkDevice, &colorAttViewParams));
+
+	// Pipeline layout
+	const VkPipelineLayoutCreateInfo		pipelineLayoutParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,			// sType
+		DE_NULL,												// pNext
+		(vk::VkPipelineLayoutCreateFlags)0,
+		0u,														// setLayoutCount
+		DE_NULL,												// pSetLayouts
+		0u,														// pushConstantRangeCount
+		DE_NULL,												// pPushConstantRanges
+	};
+	const Unique<VkPipelineLayout>			pipelineLayout			(createPipelineLayout(vk, vkDevice, &pipelineLayoutParams));
+
+	// Shaders
+	const Unique<VkShaderModule>			vertShaderModule		(createShaderModule(vk, vkDevice, context.getBinaryCollection().get("vert"), 0));
+	const Unique<VkShaderModule>			fragShaderModule		(createShaderModule(vk, vkDevice, context.getBinaryCollection().get("frag"), 0));
+
+	// Pipeline
+	const VkSpecializationInfo				emptyShaderSpecParams	=
+	{
+		0u,														// mapEntryCount
+		DE_NULL,												// pMap
+		0,														// dataSize
+		DE_NULL,												// pData
+	};
+	const VkPipelineShaderStageCreateInfo	shaderStageParams[]	=
+	{
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+			DE_NULL,												// pNext
+			0u,														// flags
+			VK_SHADER_STAGE_VERTEX_BIT,								// stage
+			*vertShaderModule,										// module
+			"main",													// pName
+			&emptyShaderSpecParams,									// pSpecializationInfo
+		},
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+			DE_NULL,												// pNext
+			0u,														// flags
+			VK_SHADER_STAGE_FRAGMENT_BIT,							// stage
+			*fragShaderModule,										// module
+			"main",													// pName
+			&emptyShaderSpecParams,									// pSpecializationInfo
+		}
+	};
+	const VkPipelineDepthStencilStateCreateInfo	depthStencilParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		DE_FALSE,													// depthTestEnable
+		DE_FALSE,													// depthWriteEnable
+		VK_COMPARE_OP_ALWAYS,										// depthCompareOp
+		DE_FALSE,													// depthBoundsTestEnable
+		DE_FALSE,													// stencilTestEnable
+		{
+			VK_STENCIL_OP_KEEP,											// failOp
+			VK_STENCIL_OP_KEEP,											// passOp
+			VK_STENCIL_OP_KEEP,											// depthFailOp
+			VK_COMPARE_OP_ALWAYS,										// compareOp
+			0u,															// compareMask
+			0u,															// writeMask
+			0u,															// reference
+		},															// front
+		{
+			VK_STENCIL_OP_KEEP,											// failOp
+			VK_STENCIL_OP_KEEP,											// passOp
+			VK_STENCIL_OP_KEEP,											// depthFailOp
+			VK_COMPARE_OP_ALWAYS,										// compareOp
+			0u,															// compareMask
+			0u,															// writeMask
+			0u,															// reference
+		},															// back;
+		-1.0f,														//	float				minDepthBounds;
+		+1.0f,														//	float				maxDepthBounds;
+	};
+	const VkViewport						viewport0				=
+	{
+		0.0f,														// x
+		0.0f,														// y
+		(float)renderSize.x(),										// width
+		(float)renderSize.y(),										// height
+		0.0f,														// minDepth
+		1.0f,														// maxDepth
+	};
+	const VkRect2D							scissor0				=
+	{
+		{
+			0u,															// x
+			0u,															// y
+		},															// offset
+		{
+			(deUint32)renderSize.x(),									// width
+			(deUint32)renderSize.y(),									// height
+		},															// extent;
+	};
+	const VkPipelineViewportStateCreateInfo		viewportParams			=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,		// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		1u,															// viewportCount
+		&viewport0,													// pViewports
+		1u,															// scissorCount
+		&scissor0													// pScissors
+	};
+	const VkSampleMask							sampleMask				= ~0u;
+	const VkPipelineMultisampleStateCreateInfo	multisampleParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		VK_SAMPLE_COUNT_1_BIT,										// rasterizationSamples
+		VK_FALSE,													// sampleShadingEnable
+		0.0f,														// minSampleShading
+		&sampleMask,												// sampleMask
+		VK_FALSE,													// alphaToCoverageEnable
+		VK_FALSE,													// alphaToOneEnable
+	};
+	const VkPipelineRasterizationStateCreateInfo	rasterParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		VK_TRUE,													// depthClampEnable
+		VK_FALSE,													// rasterizerDiscardEnable
+		VK_POLYGON_MODE_FILL,										// polygonMode
+		VK_CULL_MODE_NONE,											// cullMode
+		VK_FRONT_FACE_COUNTER_CLOCKWISE,							// frontFace
+		VK_FALSE,													// depthBiasEnable
+		0.0f,														// depthBiasConstantFactor
+		0.0f,														// depthBiasClamp
+		0.0f,														// depthBiasSlopeFactor
+		1.0f,														// lineWidth
+	};
+	const VkPipelineInputAssemblyStateCreateInfo	inputAssemblyParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// sType
+		DE_NULL,														// pNext
+		0u,																// flags
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// topology
+		DE_FALSE,														// primitiveRestartEnable
+	};
+	const VkVertexInputBindingDescription		vertexBinding0			=
+	{
+		0u,														// binding
+		(deUint32)sizeof(tcu::Vec4),							// stride
+		VK_VERTEX_INPUT_RATE_VERTEX,							// inputRate
+	};
+	const VkVertexInputAttributeDescription		vertexAttrib0			=
+	{
+		0u,														// location
+		0u,														// binding
+		VK_FORMAT_R32G32B32A32_SFLOAT,							// format
+		0u,														// offset
+	};
+	const VkPipelineVertexInputStateCreateInfo	vertexInputStateParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		1u,															// vertexBindingDescriptionCount
+		&vertexBinding0,											// pVertexBindingDescriptions
+		1u,															// vertexAttributeDescriptionCount
+		&vertexAttrib0,												// pVertexAttributeDescriptions
+	};
+	const VkPipelineColorBlendAttachmentState	attBlendParams			=
+	{
+		VK_FALSE,													// blendEnable
+		VK_BLEND_FACTOR_ONE,										// srcColorBlendFactor
+		VK_BLEND_FACTOR_ZERO,										// dstColorBlendFactor
+		VK_BLEND_OP_ADD,											// colorBlendOp
+		VK_BLEND_FACTOR_ONE,										// srcAlphaBlendFactor
+		VK_BLEND_FACTOR_ZERO,										// dstAlphaBlendFactor
+		VK_BLEND_OP_ADD,											// alphaBlendOp
+		(VK_COLOR_COMPONENT_R_BIT|
+		 VK_COLOR_COMPONENT_G_BIT|
+		 VK_COLOR_COMPONENT_B_BIT|
+		 VK_COLOR_COMPONENT_A_BIT),									// colorWriteMask
+	};
+	const VkPipelineColorBlendStateCreateInfo	blendParams				=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		0u,															// flags
+		DE_FALSE,													// logicOpEnable
+		VK_LOGIC_OP_COPY,											// logicOp
+		1u,															// attachmentCount
+		&attBlendParams,											// pAttachments
+		{ 0.0f, 0.0f, 0.0f, 0.0f },									// blendConstants[4]
+	};
+	const VkPipelineDynamicStateCreateInfo	dynamicStateInfo		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,	// sType
+		DE_NULL,												// pNext
+		0u,														// flags
+		0u,														// dynamicStateCount
+		DE_NULL													// pDynamicStates
+	};
+	const VkGraphicsPipelineCreateInfo		pipelineParams			=
+	{
+		VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,		// sType
+		DE_NULL,												// pNext
+		0u,														// flags
+		(deUint32)DE_LENGTH_OF_ARRAY(shaderStageParams),		// stageCount
+		shaderStageParams,										// pStages
+		&vertexInputStateParams,								// pVertexInputState
+		&inputAssemblyParams,									// pInputAssemblyState
+		DE_NULL,												// pTessellationState
+		&viewportParams,										// pViewportState
+		&rasterParams,											// pRasterizationState
+		&multisampleParams,										// pMultisampleState
+		&depthStencilParams,									// pDepthStencilState
+		&blendParams,											// pColorBlendState
+		&dynamicStateInfo,										// pDynamicState
+		*pipelineLayout,										// layout
+		*renderPass,											// renderPass
+		0u,														// subpass
+		DE_NULL,												// basePipelineHandle
+		0u,														// basePipelineIndex
+	};
+
+	const Unique<VkPipeline>				pipeline				(createGraphicsPipeline(vk, vkDevice, DE_NULL, &pipelineParams));
+
+	// Framebuffer
+	const VkFramebufferCreateInfo			framebufferParams		=
+	{
+		VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,				// sType
+		DE_NULL,												// pNext
+		0u,														// flags
+		*renderPass,											// renderPass
+		1u,														// attachmentCount
+		&*colorAttView,											// pAttachments
+		(deUint32)renderSize.x(),								// width
+		(deUint32)renderSize.y(),								// height
+		1u,														// layers
+	};
+	const Unique<VkFramebuffer>				framebuffer				(createFramebuffer(vk, vkDevice, &framebufferParams));
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					// sType
+		DE_NULL,													// pNext
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,			// flags
+		queueFamilyIndex,											// queueFamilyIndex
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,			// sType
+		DE_NULL,												// pNext
+		*cmdPool,												// pool
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,						// level
+		1u,														// bufferCount
+	};
+	const Unique<VkCommandBuffer>			cmdBuf					(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// sType
+		DE_NULL,												// pNext
+		0u,														// flags
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Record commands
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuf, &cmdBufBeginParams));
+
+	{
+		const VkMemoryBarrier		vertFlushBarrier	=
+		{
+			VK_STRUCTURE_TYPE_MEMORY_BARRIER,			// sType
+			DE_NULL,									// pNext
+			VK_ACCESS_HOST_WRITE_BIT,					// srcAccessMask
+			VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,		// dstAccessMask
+		};
+		const VkImageMemoryBarrier	colorAttBarrier		=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// sType
+			DE_NULL,									// pNext
+			0u,											// srcAccessMask
+			(VK_ACCESS_COLOR_ATTACHMENT_READ_BIT|
+			 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT),		// dstAccessMask
+			VK_IMAGE_LAYOUT_UNDEFINED,					// oldLayout
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// newLayout
+			queueFamilyIndex,							// srcQueueFamilyIndex
+			queueFamilyIndex,							// dstQueueFamilyIndex
+			*image,										// image
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+				0u,											// baseMipLevel
+				1u,											// levelCount
+				0u,											// baseArrayLayer
+				1u,											// layerCount
+			}											// subresourceRange
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, (VkDependencyFlags)0, 1, &vertFlushBarrier, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &colorAttBarrier);
+	}
+
+	{
+		const VkClearValue			clearValue		= makeClearValueColorF32(0.125f, 0.25f, 0.75f, 1.0f);
+		const VkRenderPassBeginInfo	passBeginParams	=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,			// sType
+			DE_NULL,											// pNext
+			*renderPass,										// renderPass
+			*framebuffer,										// framebuffer
+			{
+				{ 0, 0 },
+				{ (deUint32)renderSize.x(), (deUint32)renderSize.y() }
+			},													// renderArea
+			1u,													// clearValueCount
+			&clearValue,										// pClearValues
+		};
+		vk.cmdBeginRenderPass(*cmdBuf, &passBeginParams, VK_SUBPASS_CONTENTS_INLINE);
+	}
+
+	vk.cmdBindPipeline(*cmdBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
+	{
+		const VkDeviceSize bindingOffset = 0;
+		vk.cmdBindVertexBuffers(*cmdBuf, 0u, 1u, &vertexBuffer.get(), &bindingOffset);
+	}
+	vk.cmdDraw(*cmdBuf, 3u, 1u, 0u, 0u);
+	vk.cmdEndRenderPass(*cmdBuf);
+
+	{
+		const VkImageMemoryBarrier	renderFinishBarrier	=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// sType
+			DE_NULL,									// pNext
+			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// outputMask
+			VK_ACCESS_TRANSFER_READ_BIT,				// inputMask
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// oldLayout
+			VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// newLayout
+			queueFamilyIndex,							// srcQueueFamilyIndex
+			queueFamilyIndex,							// dstQueueFamilyIndex
+			*image,										// image
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+				0u,											// baseMipLevel
+				1u,											// mipLevels
+				0u,											// baseArraySlice
+				1u,											// arraySize
+			}											// subresourceRange
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &renderFinishBarrier);
+	}
+
+	{
+		const VkBufferImageCopy	copyParams	=
+		{
+			(VkDeviceSize)0u,						// bufferOffset
+			(deUint32)renderSize.x(),				// bufferRowLength
+			(deUint32)renderSize.y(),				// bufferImageHeight
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,				// aspectMask
+				0u,										// mipLevel
+				0u,										// baseArrayLayer
+				1u,										// layerCount
+			},										// imageSubresource
+			{ 0u, 0u, 0u },							// imageOffset
+			{
+				(deUint32)renderSize.x(),
+				(deUint32)renderSize.y(),
+				1u
+			}										// imageExtent
+		};
+		vk.cmdCopyImageToBuffer(*cmdBuf, *image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *readImageBuffer, 1u, &copyParams);
+	}
+
+	{
+		const VkBufferMemoryBarrier	copyFinishBarrier	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// sType
+			DE_NULL,									// pNext
+			VK_ACCESS_TRANSFER_WRITE_BIT,				// srcAccessMask
+			VK_ACCESS_HOST_READ_BIT,					// dstAccessMask
+			queueFamilyIndex,							// srcQueueFamilyIndex
+			queueFamilyIndex,							// dstQueueFamilyIndex
+			*readImageBuffer,							// buffer
+			0u,											// offset
+			imageSizeBytes								// size
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuf));
+
+	// Upload vertex data
+	{
+		const VkMappedMemoryRange	range			=
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// sType
+			DE_NULL,								// pNext
+			vertexBufferMemory->getMemory(),		// memory
+			0,										// offset
+			(VkDeviceSize)sizeof(vertices),			// size
+		};
+		void*						vertexBufPtr	= vertexBufferMemory->getHostPtr();
+
+		deMemcpy(vertexBufPtr, &vertices[0], sizeof(vertices));
+		VK_CHECK(vk.flushMappedMemoryRanges(vkDevice, 1u, &range));
+	}
+
+	// Submit & wait for completion
+	{
+		const VkFenceCreateInfo	fenceParams	=
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// sType
+			DE_NULL,								// pNext
+			0u,										// flags
+		};
+		const VkSubmitInfo		submitInfo	=
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,			// sType
+			DE_NULL,								// pNext
+			0u,										// waitSemaphoreCount
+			DE_NULL,								// pWaitSemaphores
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,										// commandBufferCount
+			&cmdBuf.get(),							// pCommandBuffers
+			0u,										// signalSemaphoreCount
+			DE_NULL,								// pSignalSemaphores
+		};
+		const Unique<VkFence>	fence		(createFence(vk, vkDevice, &fenceParams));
+
+		VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), DE_TRUE, ~0ull));
+	}
+
+	// Log image
+	{
+		const VkMappedMemoryRange	range		=
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// sType
+			DE_NULL,								// pNext
+			readImageBufferMemory->getMemory(),		// memory
+			0,										// offset
+			imageSizeBytes,							// size
+		};
+		void*						imagePtr	= readImageBufferMemory->getHostPtr();
+
+		VK_CHECK(vk.invalidateMappedMemoryRanges(vkDevice, 1u, &range));
+		context.getTestContext().getLog() << TestLog::Image("Result", "Result", tcu::ConstPixelBufferAccess(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8), renderSize.x(), renderSize.y(), 1, imagePtr));
+	}
+
+	return tcu::TestStatus::pass("Rendering succeeded");
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createSmokeTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	smokeTests	(new tcu::TestCaseGroup(testCtx, "smoke", "Smoke Tests"));
+
+	addFunctionCase				(smokeTests.get(), "create_sampler",	"",	createSamplerTest);
+	addFunctionCaseWithPrograms	(smokeTests.get(), "create_shader",		"", createShaderProgs,		createShaderModuleTest);
+	addFunctionCaseWithPrograms	(smokeTests.get(), "triangle",			"", createTriangleProgs,	renderTriangleTest);
+	addFunctionCaseWithPrograms	(smokeTests.get(), "asm_triangle",		"", createTriangleAsmProgs,	renderTriangleTest);
+
+	return smokeTests.release();
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.hpp
new file mode 100644
index 0000000..8184e7e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiSmokeTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTAPISMOKETESTS_HPP
+#define _VKTAPISMOKETESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple Smoke Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup*		createSmokeTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPISMOKETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/api/vktApiTests.cpp b/external/vulkancts/modules/vulkan/api/vktApiTests.cpp
new file mode 100644
index 0000000..a786bbe
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiTests.cpp
@@ -0,0 +1,86 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief API Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktApiTests.hpp"
+
+#include "vktTestGroupUtil.hpp"
+#include "vktApiSmokeTests.hpp"
+#include "vktApiDeviceInitializationTests.hpp"
+#include "vktApiObjectManagementTests.hpp"
+#include "vktApiBufferTests.hpp"
+#include "vktApiBufferViewCreateTests.hpp"
+#include "vktApiBufferViewAccessTests.hpp"
+#include "vktApiFeatureInfo.hpp"
+#include "vktApiCommandBuffersTests.hpp"
+#include "vktApiCopiesAndBlittingTests.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+namespace
+{
+
+void createBufferViewTests (tcu::TestCaseGroup* bufferViewTests)
+{
+	tcu::TestContext&	testCtx		= bufferViewTests->getTestContext();
+
+	bufferViewTests->addChild(createBufferViewCreateTests	(testCtx));
+	bufferViewTests->addChild(createBufferViewAccessTests	(testCtx));
+}
+
+void createApiTests (tcu::TestCaseGroup* apiTests)
+{
+	tcu::TestContext&	testCtx		= apiTests->getTestContext();
+
+	apiTests->addChild(createSmokeTests					(testCtx));
+	apiTests->addChild(api::createFeatureInfoTests		(testCtx));
+	apiTests->addChild(createDeviceInitializationTests	(testCtx));
+	apiTests->addChild(createObjectManagementTests		(testCtx));
+	apiTests->addChild(createBufferTests				(testCtx));
+	apiTests->addChild(createTestGroup					(testCtx, "buffer_view", "BufferView tests", createBufferViewTests));
+	apiTests->addChild(createCommandBuffersTests		(testCtx));
+	apiTests->addChild(createCopiesAndBlittingTests		(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "api", "API Tests", createApiTests);
+}
+
+} // api
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/api/vktApiTests.hpp b/external/vulkancts/modules/vulkan/api/vktApiTests.hpp
new file mode 100644
index 0000000..4bfc9cd
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/api/vktApiTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTAPITESTS_HPP
+#define _VKTAPITESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief API tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace api
+{
+
+tcu::TestCaseGroup*		createTests		(tcu::TestContext& testCtx);
+
+} // api
+} // vkt
+
+#endif // _VKTAPITESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/binding_model/CMakeLists.txt b/external/vulkancts/modules/vulkan/binding_model/CMakeLists.txt
new file mode 100644
index 0000000..348b363
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/binding_model/CMakeLists.txt
@@ -0,0 +1,19 @@
+# dEQP-VK.binding_model
+
+include_directories(..)
+
+set(DEQP_VK_BINDING_MODEL_SRCS
+	vktBindingModelTests.cpp
+	vktBindingModelTests.hpp
+	vktBindingShaderAccessTests.cpp
+	vktBindingShaderAccessTests.hpp
+	)
+
+set(DEQP_VK_BINDING_MODEL_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-binding-model STATIC ${DEQP_VK_BINDING_MODEL_SRCS})
+target_link_libraries(deqp-vk-binding-model ${DEQP_VK_BINDING_MODEL_LIBS})
diff --git a/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.cpp b/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.cpp
new file mode 100644
index 0000000..571a636
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.cpp
@@ -0,0 +1,67 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Binding Model tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktBindingModelTests.hpp"
+
+#include "vktBindingShaderAccessTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace BindingModel
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* group)
+{
+	tcu::TestContext&	testCtx		= group->getTestContext();
+
+	group->addChild(createShaderAccessTests(testCtx));
+
+	// \todo [2015-07-30 jarkko] .change_binding.{between_renderpasses, within_pass}
+	// \todo [2015-07-30 jarkko] .descriptor_set_chain
+	// \todo [2015-07-30 jarkko] .update_descriptor_set
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "binding_model", "Resource binding tests", createChildren);
+}
+
+} // BindingModel
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.hpp b/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.hpp
new file mode 100644
index 0000000..09e7b5b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/binding_model/vktBindingModelTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTBINDINGMODELTESTS_HPP
+#define _VKTBINDINGMODELTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Binding Model tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace BindingModel
+{
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx);
+
+} // BindingModel
+} // vkt
+
+#endif // _VKTBINDINGMODELTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.cpp b/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.cpp
new file mode 100644
index 0000000..6a90c62
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.cpp
@@ -0,0 +1,6457 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Binding shader access tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktBindingShaderAccessTests.hpp"
+
+#include "vktTestCase.hpp"
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "tcuVector.hpp"
+#include "tcuVectorUtil.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuRGBA.hpp"
+#include "tcuSurface.hpp"
+#include "tcuImageCompare.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+#include "deStringUtil.hpp"
+#include "deArrayUtil.hpp"
+
+#include "qpInfo.h"
+
+namespace vkt
+{
+namespace BindingModel
+{
+namespace
+{
+
+enum ResourceFlag
+{
+	RESOURCE_FLAG_IMMUTABLE_SAMPLER = (1u << 0u),
+
+	RESOURCE_FLAG_LAST				= (1u << 1u)
+};
+
+static const char* const s_quadrantGenVertexPosSource =	"	highp int quadPhase = gl_VertexIndex % 6;\n"
+														"	highp int quadXcoord = int(quadPhase == 1 || quadPhase == 4 || quadPhase == 5);\n"
+														"	highp int quadYcoord = int(quadPhase == 2 || quadPhase == 3 || quadPhase == 5);\n"
+														"	highp int quadOriginX = (gl_VertexIndex / 6) % 2;\n"
+														"	highp int quadOriginY = (gl_VertexIndex / 6) / 2;\n"
+														"	quadrant_id = gl_VertexIndex / 6;\n"
+														"	result_position = vec4(float(quadOriginX + quadXcoord - 1), float(quadOriginY + quadYcoord - 1), 0.0, 1.0);\n";
+
+bool isUniformDescriptorType (vk::VkDescriptorType type)
+{
+	return type == vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
+		   type == vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
+		   type == vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
+}
+
+bool isDynamicDescriptorType (vk::VkDescriptorType type)
+{
+	return type == vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC || type == vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC;
+}
+
+vk::VkImageType viewTypeToImageType (vk::VkImageViewType type)
+{
+	switch (type)
+	{
+		case vk::VK_IMAGE_VIEW_TYPE_1D:
+		case vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY:	return vk::VK_IMAGE_TYPE_1D;
+		case vk::VK_IMAGE_VIEW_TYPE_2D:
+		case vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY:	return vk::VK_IMAGE_TYPE_2D;
+		case vk::VK_IMAGE_VIEW_TYPE_3D:			return vk::VK_IMAGE_TYPE_3D;
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE:
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:	return vk::VK_IMAGE_TYPE_2D;
+
+		default:
+			DE_FATAL("Impossible");
+			return (vk::VkImageType)0;
+	}
+}
+
+vk::VkImageLayout getImageLayoutForDescriptorType (vk::VkDescriptorType descType)
+{
+	if (descType == vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		return vk::VK_IMAGE_LAYOUT_GENERAL;
+	else
+		return vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
+}
+
+deUint32 getTextureLevelPyramidDataSize (const tcu::TextureLevelPyramid& srcImage)
+{
+	deUint32 dataSize = 0;
+	for (int level = 0; level < srcImage.getNumLevels(); ++level)
+	{
+		const tcu::ConstPixelBufferAccess srcAccess = srcImage.getLevel(level);
+
+		// tightly packed
+		DE_ASSERT(srcAccess.getFormat().getPixelSize() == srcAccess.getPixelPitch());
+
+		dataSize += srcAccess.getWidth() * srcAccess.getHeight() * srcAccess.getDepth() * srcAccess.getFormat().getPixelSize();
+	}
+	return dataSize;
+}
+
+void writeTextureLevelPyramidData (void* dst, deUint32 dstLen, const tcu::TextureLevelPyramid& srcImage, vk::VkImageViewType viewType, std::vector<vk::VkBufferImageCopy>* copySlices)
+{
+	// \note cube is copied face-by-face
+	const deUint32	arraySize	= (viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? (srcImage.getLevel(0).getHeight()) :
+								  (viewType == vk::VK_IMAGE_VIEW_TYPE_2D || viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? (srcImage.getLevel(0).getDepth()) :
+								  (viewType == vk::VK_IMAGE_VIEW_TYPE_3D)														? (1) :
+								  (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? (srcImage.getLevel(0).getDepth()) :
+								  ((deUint32)0);
+	deUint32		levelOffset	= 0;
+
+	DE_ASSERT(arraySize != 0);
+
+	for (int level = 0; level < srcImage.getNumLevels(); ++level)
+	{
+		const tcu::ConstPixelBufferAccess	srcAccess		= srcImage.getLevel(level);
+		const tcu::PixelBufferAccess		dstAccess		(srcAccess.getFormat(), srcAccess.getSize(), srcAccess.getPitch(), (deUint8*)dst + levelOffset);
+		const deUint32						dataSize		= srcAccess.getWidth() * srcAccess.getHeight() * srcAccess.getDepth() * srcAccess.getFormat().getPixelSize();
+		const deUint32						sliceDataSize	= dataSize / arraySize;
+		const deInt32						sliceHeight		= (viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? (1) : (srcAccess.getHeight());
+		const deInt32						sliceDepth		= (viewType == vk::VK_IMAGE_VIEW_TYPE_3D) ? (srcAccess.getDepth()) : (1);
+		const tcu::IVec3					sliceSize		(srcAccess.getWidth(), sliceHeight, sliceDepth);
+
+		// tightly packed
+		DE_ASSERT(srcAccess.getFormat().getPixelSize() == srcAccess.getPixelPitch());
+
+		for (int sliceNdx = 0; sliceNdx < (int)arraySize; ++sliceNdx)
+		{
+			const vk::VkBufferImageCopy copySlice =
+			{
+				(vk::VkDeviceSize)levelOffset + sliceNdx * sliceDataSize,	// bufferOffset
+				(deUint32)sliceSize.x(),									// bufferRowLength
+				(deUint32)sliceSize.y(),									// bufferImageHeight
+				{
+					vk::VK_IMAGE_ASPECT_COLOR_BIT,		// aspectMask
+					(deUint32)level,					// mipLevel
+					(deUint32)sliceNdx,					// arrayLayer
+					1u,									// arraySize
+				},															// imageSubresource
+				{
+					0,
+					0,
+					0,
+				},															// imageOffset
+				{
+					(deUint32)sliceSize.x(),
+					(deUint32)sliceSize.y(),
+					(deUint32)sliceSize.z(),
+				}															// imageExtent
+			};
+			copySlices->push_back(copySlice);
+		}
+
+		DE_ASSERT(arraySize * sliceDataSize == dataSize);
+
+		tcu::copy(dstAccess, srcAccess);
+		levelOffset += dataSize;
+	}
+
+	DE_ASSERT(dstLen == levelOffset);
+	DE_UNREF(dstLen);
+}
+
+de::MovePtr<vk::Allocation> allocateAndBindObjectMemory (const vk::DeviceInterface& vki, vk::VkDevice device, vk::Allocator& allocator, vk::VkBuffer buffer, vk::MemoryRequirement requirement)
+{
+	const vk::VkMemoryRequirements	requirements	= vk::getBufferMemoryRequirements(vki, device, buffer);
+	de::MovePtr<vk::Allocation>		allocation		= allocator.allocate(requirements, requirement);
+
+	VK_CHECK(vki.bindBufferMemory(device, buffer, allocation->getMemory(), allocation->getOffset()));
+	return allocation;
+}
+
+de::MovePtr<vk::Allocation> allocateAndBindObjectMemory (const vk::DeviceInterface& vki, vk::VkDevice device, vk::Allocator& allocator, vk::VkImage image, vk::MemoryRequirement requirement)
+{
+	const vk::VkMemoryRequirements	requirements	= vk::getImageMemoryRequirements(vki, device, image);
+	de::MovePtr<vk::Allocation>		allocation		= allocator.allocate(requirements, requirement);
+
+	VK_CHECK(vki.bindImageMemory(device, image, allocation->getMemory(), allocation->getOffset()));
+	return allocation;
+}
+
+vk::VkDescriptorImageInfo makeDescriptorImageInfo (vk::VkSampler sampler)
+{
+	return vk::makeDescriptorImageInfo(sampler, (vk::VkImageView)0, (vk::VkImageLayout)0);
+}
+
+vk::VkDescriptorImageInfo makeDescriptorImageInfo (vk::VkImageView imageView, vk::VkImageLayout layout)
+{
+	return vk::makeDescriptorImageInfo((vk::VkSampler)0, imageView, layout);
+}
+
+void drawQuadrantReferenceResult (const tcu::PixelBufferAccess& dst, const tcu::Vec4& c1, const tcu::Vec4& c2, const tcu::Vec4& c3, const tcu::Vec4& c4)
+{
+	tcu::clear(tcu::getSubregion(dst, 0,					0,						dst.getWidth() / 2,						dst.getHeight() / 2),					c1);
+	tcu::clear(tcu::getSubregion(dst, dst.getWidth() / 2,	0,						dst.getWidth() - dst.getWidth() / 2,	dst.getHeight() / 2),					c2);
+	tcu::clear(tcu::getSubregion(dst, 0,					dst.getHeight() / 2,	dst.getWidth() / 2,						dst.getHeight() - dst.getHeight() / 2),	c3);
+	tcu::clear(tcu::getSubregion(dst, dst.getWidth() / 2,	dst.getHeight() / 2,	dst.getWidth() - dst.getWidth() / 2,	dst.getHeight() - dst.getHeight() / 2),	c4);
+}
+
+class SingleTargetRenderInstance : public vkt::TestInstance
+{
+public:
+											SingleTargetRenderInstance	(Context&			context,
+																		 const tcu::UVec2&	size);
+
+private:
+	static vk::Move<vk::VkImage>			createColorAttachment		(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator,
+																		 const tcu::TextureFormat&		format,
+																		 const tcu::UVec2&				size,
+																		 de::MovePtr<vk::Allocation>*	outAllocation);
+
+	static vk::Move<vk::VkImageView>		createColorAttachmentView	(const vk::DeviceInterface&	vki,
+																		 vk::VkDevice				device,
+																		 const tcu::TextureFormat&	format,
+																		 vk::VkImage				image);
+
+	static vk::Move<vk::VkRenderPass>		createRenderPass			(const vk::DeviceInterface&	vki,
+																		 vk::VkDevice				device,
+																		 const tcu::TextureFormat&	format);
+
+	static vk::Move<vk::VkFramebuffer>		createFramebuffer			(const vk::DeviceInterface&	vki,
+																		 vk::VkDevice				device,
+																		 vk::VkRenderPass			renderpass,
+																		 vk::VkImageView			colorAttachmentView,
+																		 const tcu::UVec2&			size);
+
+	static vk::Move<vk::VkCommandPool>		createCommandPool			(const vk::DeviceInterface&	vki,
+																		 vk::VkDevice				device,
+																		 deUint32					queueFamilyIndex);
+
+	virtual void							logTestPlan					(void) const = 0;
+	virtual void							renderToTarget				(void) = 0;
+	virtual tcu::TestStatus					verifyResultImage			(const tcu::ConstPixelBufferAccess& result) const = 0;
+
+	void									readRenderTarget			(tcu::TextureLevel& dst);
+	tcu::TestStatus							iterate						(void);
+
+protected:
+	const tcu::TextureFormat				m_targetFormat;
+	const tcu::UVec2						m_targetSize;
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkQueue						m_queue;
+	const deUint32							m_queueFamilyIndex;
+	vk::Allocator&							m_allocator;
+	de::MovePtr<vk::Allocation>				m_colorAttachmentMemory;
+	const vk::Unique<vk::VkImage>			m_colorAttachmentImage;
+	const vk::Unique<vk::VkImageView>		m_colorAttachmentView;
+	const vk::Unique<vk::VkRenderPass>		m_renderPass;
+	const vk::Unique<vk::VkFramebuffer>		m_framebuffer;
+	const vk::Unique<vk::VkCommandPool>		m_cmdPool;
+
+	bool									m_firstIteration;
+};
+
+SingleTargetRenderInstance::SingleTargetRenderInstance (Context&			context,
+														const tcu::UVec2&	size)
+	: vkt::TestInstance			(context)
+	, m_targetFormat			(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)
+	, m_targetSize				(size)
+	, m_vki						(context.getDeviceInterface())
+	, m_device					(context.getDevice())
+	, m_queue					(context.getUniversalQueue())
+	, m_queueFamilyIndex		(context.getUniversalQueueFamilyIndex())
+	, m_allocator				(context.getDefaultAllocator())
+	, m_colorAttachmentMemory	(DE_NULL)
+	, m_colorAttachmentImage	(createColorAttachment(m_vki, m_device, m_allocator, m_targetFormat, m_targetSize, &m_colorAttachmentMemory))
+	, m_colorAttachmentView		(createColorAttachmentView(m_vki, m_device, m_targetFormat, *m_colorAttachmentImage))
+	, m_renderPass				(createRenderPass(m_vki, m_device, m_targetFormat))
+	, m_framebuffer				(createFramebuffer(m_vki, m_device, *m_renderPass, *m_colorAttachmentView, m_targetSize))
+	, m_cmdPool					(createCommandPool(m_vki, m_device, context.getUniversalQueueFamilyIndex()))
+	, m_firstIteration			(true)
+{
+}
+
+vk::Move<vk::VkImage> SingleTargetRenderInstance::createColorAttachment (const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator,
+																		 const tcu::TextureFormat&		format,
+																		 const tcu::UVec2&				size,
+																		 de::MovePtr<vk::Allocation>*	outAllocation)
+{
+	const vk::VkImageCreateInfo	imageInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkImageCreateFlags)0,
+		vk::VK_IMAGE_TYPE_2D,							// imageType
+		vk::mapTextureFormat(format),					// format
+		{ size.x(), size.y(), 1u },						// extent
+		1,												// mipLevels
+		1,												// arraySize
+		vk::VK_SAMPLE_COUNT_1_BIT,						// samples
+		vk::VK_IMAGE_TILING_OPTIMAL,					// tiling
+		vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,	// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,					// sharingMode
+		0u,												// queueFamilyCount
+		DE_NULL,										// pQueueFamilyIndices
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,					// initialLayout
+	};
+
+	vk::Move<vk::VkImage>		image		(vk::createImage(vki, device, &imageInfo));
+	de::MovePtr<vk::Allocation>	allocation	(allocateAndBindObjectMemory(vki, device, allocator, *image, vk::MemoryRequirement::Any));
+
+	*outAllocation = allocation;
+	return image;
+}
+
+vk::Move<vk::VkImageView> SingleTargetRenderInstance::createColorAttachmentView (const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 const tcu::TextureFormat&	format,
+																				 vk::VkImage				image)
+{
+	const vk::VkImageViewCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+		DE_NULL,
+		(vk::VkImageViewCreateFlags)0,
+		image,							// image
+		vk::VK_IMAGE_VIEW_TYPE_2D,		// viewType
+		vk::mapTextureFormat(format),	// format
+		vk::makeComponentMappingRGBA(),
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,	// aspectMask
+			0u,								// baseMipLevel
+			1u,								// mipLevels
+			0u,								// baseArrayLayer
+			1u,								// arraySize
+		},
+	};
+
+	return vk::createImageView(vki, device, &createInfo);
+}
+
+vk::Move<vk::VkRenderPass> SingleTargetRenderInstance::createRenderPass (const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 const tcu::TextureFormat&		format)
+{
+	const vk::VkAttachmentDescription	attachmentDescription	=
+	{
+		(vk::VkAttachmentDescriptionFlags)0,
+		vk::mapTextureFormat(format),					// format
+		vk::VK_SAMPLE_COUNT_1_BIT,						// samples
+		vk::VK_ATTACHMENT_LOAD_OP_CLEAR,				// loadOp
+		vk::VK_ATTACHMENT_STORE_OP_STORE,				// storeOp
+		vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,			// stencilLoadOp
+		vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,			// stencilStoreOp
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// initialLayout
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// finalLayout
+	};
+	const vk::VkAttachmentReference		colorAttachment			=
+	{
+		0u,												// attachment
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL	// layout
+	};
+	const vk::VkAttachmentReference		depthStencilAttachment	=
+	{
+		vk::VK_NO_ATTACHMENT,							// attachment
+		vk::VK_IMAGE_LAYOUT_UNDEFINED					// layout
+	};
+	const vk::VkSubpassDescription		subpass					=
+	{
+		(vk::VkSubpassDescriptionFlags)0,
+		vk::VK_PIPELINE_BIND_POINT_GRAPHICS,			// pipelineBindPoint
+		0u,												// inputAttachmentCount
+		DE_NULL,										// pInputAttachments
+		1u,												// colorAttachmentCount
+		&colorAttachment,								// pColorAttachments
+		DE_NULL,										// pResolveAttachments
+		&depthStencilAttachment,						// pDepthStencilAttachment
+		0u,												// preserveAttachmentCount
+		DE_NULL											// pPreserveAttachments
+	};
+	const vk::VkRenderPassCreateInfo	renderPassCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+		DE_NULL,
+		(vk::VkRenderPassCreateFlags)0,
+		1u,												// attachmentCount
+		&attachmentDescription,							// pAttachments
+		1u,												// subpassCount
+		&subpass,										// pSubpasses
+		0u,												// dependencyCount
+		DE_NULL,										// pDependencies
+	};
+
+	return vk::createRenderPass(vki, device, &renderPassCreateInfo);
+}
+
+vk::Move<vk::VkFramebuffer> SingleTargetRenderInstance::createFramebuffer (const vk::DeviceInterface&	vki,
+																		   vk::VkDevice					device,
+																		   vk::VkRenderPass				renderpass,
+																		   vk::VkImageView				colorAttachmentView,
+																		   const tcu::UVec2&			size)
+{
+	const vk::VkFramebufferCreateInfo	framebufferCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+		DE_NULL,
+		(vk::VkFramebufferCreateFlags)0,
+		renderpass,				// renderPass
+		1u,						// attachmentCount
+		&colorAttachmentView,	// pAttachments
+		size.x(),				// width
+		size.y(),				// height
+		1,						// layers
+	};
+
+	return vk::createFramebuffer(vki, device, &framebufferCreateInfo);
+}
+
+vk::Move<vk::VkCommandPool> SingleTargetRenderInstance::createCommandPool (const vk::DeviceInterface&	vki,
+																		   vk::VkDevice					device,
+																		   deUint32						queueFamilyIndex)
+{
+	const vk::VkCommandPoolCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,	// flags
+		queueFamilyIndex,							// queueFamilyIndex
+	};
+	return vk::createCommandPool(vki, device, &createInfo);
+}
+
+void SingleTargetRenderInstance::readRenderTarget (tcu::TextureLevel& dst)
+{
+	const deUint64							pixelDataSize				= (deUint64)(m_targetSize.x() * m_targetSize.y() * m_targetFormat.getPixelSize());
+	const vk::VkBufferCreateInfo			bufferCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,												// flags
+		pixelDataSize,									// size
+		vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,					// sharingMode
+		0u,												// queueFamilyCount
+		DE_NULL,										// pQueueFamilyIndices
+	};
+	const vk::Unique<vk::VkBuffer>			buffer						(vk::createBuffer(m_vki, m_device, &bufferCreateInfo));
+	const vk::VkImageSubresourceRange		fullSubrange				=
+	{
+		vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+		0u,												// baseMipLevel
+		1u,												// mipLevels
+		0u,												// baseArraySlice
+		1u,												// arraySize
+	};
+	const vk::VkImageMemoryBarrier			imageBarrier				=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// srcAccessMask
+		vk::VK_ACCESS_TRANSFER_READ_BIT,				// dstAccessMask
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// oldLayout
+		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// newLayout
+		vk::VK_QUEUE_FAMILY_IGNORED,					// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,					// destQueueFamilyIndex
+		*m_colorAttachmentImage,						// image
+		fullSubrange,									// subresourceRange
+	};
+	const vk::VkBufferMemoryBarrier			memoryBarrier				=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_TRANSFER_WRITE_BIT,				// srcAccessMask
+		vk::VK_ACCESS_HOST_READ_BIT,					// dstAccessMask
+		vk::VK_QUEUE_FAMILY_IGNORED,					// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,					// destQueueFamilyIndex
+		*buffer,										// buffer
+		0u,												// offset
+		(vk::VkDeviceSize)pixelDataSize					// size
+	};
+	const vk::VkCommandBufferAllocateInfo	cmdBufAllocInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		*m_cmdPool,								// cmdPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,	// level
+		1u,										// bufferCount
+	};
+	const vk::VkFenceCreateInfo				fenceCreateInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,												// flags
+	};
+	const vk::VkCommandBufferBeginInfo		cmdBufBeginInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// flags
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	const vk::VkImageSubresourceLayers		firstSlice					=
+	{
+		vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspect
+		0,												// mipLevel
+		0,												// arrayLayer
+		1,												// arraySize
+	};
+	const vk::VkBufferImageCopy				copyRegion					=
+	{
+		0u,												// bufferOffset
+		m_targetSize.x(),								// bufferRowLength
+		m_targetSize.y(),								// bufferImageHeight
+		firstSlice,										// imageSubresource
+		{ 0, 0, 0 },									// imageOffset
+		{ m_targetSize.x(), m_targetSize.y(), 1u }		// imageExtent
+	};
+
+	const de::MovePtr<vk::Allocation>		bufferMemory				= allocateAndBindObjectMemory(m_vki, m_device, m_allocator, *buffer, vk::MemoryRequirement::HostVisible);
+
+	const vk::Unique<vk::VkCommandBuffer>	cmd							(vk::allocateCommandBuffer(m_vki, m_device, &cmdBufAllocInfo));
+	const vk::Unique<vk::VkFence>			cmdCompleteFence			(vk::createFence(m_vki, m_device, &fenceCreateInfo));
+	const deUint64							infiniteTimeout				= ~(deUint64)0u;
+
+	// copy content to buffer
+	VK_CHECK(m_vki.beginCommandBuffer(*cmd, &cmdBufBeginInfo));
+	m_vki.cmdPipelineBarrier(*cmd, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0,
+							 0, (const vk::VkMemoryBarrier*)DE_NULL,
+							 0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+							 1, &imageBarrier);
+	m_vki.cmdCopyImageToBuffer(*cmd, *m_colorAttachmentImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *buffer, 1, &copyRegion);
+	m_vki.cmdPipelineBarrier(*cmd, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+							 0, (const vk::VkMemoryBarrier*)DE_NULL,
+							 1, &memoryBarrier,
+							 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(m_vki.endCommandBuffer(*cmd));
+
+	// wait for transfer to complete
+	{
+		const vk::VkSubmitInfo	submitInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const vk::VkSemaphore*)0,
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmd.get(),
+			0u,
+			(const vk::VkSemaphore*)0,
+		};
+
+		VK_CHECK(m_vki.queueSubmit(m_queue, 1, &submitInfo, *cmdCompleteFence));
+	}
+	VK_CHECK(m_vki.waitForFences(m_device, 1, &cmdCompleteFence.get(), 0u, infiniteTimeout)); // \note: timeout is failure
+
+	dst.setStorage(m_targetFormat, m_targetSize.x(), m_targetSize.y());
+
+	// copy data
+	invalidateMappedMemoryRange(m_vki, m_device, bufferMemory->getMemory(), bufferMemory->getOffset(), pixelDataSize);
+	tcu::copy(dst, tcu::ConstPixelBufferAccess(dst.getFormat(), dst.getSize(), bufferMemory->getHostPtr()));
+}
+
+tcu::TestStatus SingleTargetRenderInstance::iterate (void)
+{
+	tcu::TextureLevel resultImage;
+
+	// log
+	if (m_firstIteration)
+	{
+		logTestPlan();
+		m_firstIteration = false;
+	}
+
+	// render
+	{
+		// transition to VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
+		const vk::VkImageSubresourceRange		fullSubrange				=
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+			0u,												// baseMipLevel
+			1u,												// mipLevels
+			0u,												// baseArraySlice
+			1u,												// arraySize
+		};
+		const vk::VkImageMemoryBarrier			imageBarrier				=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+			0u,												// srcAccessMask
+			vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// dstAccessMask
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,					// oldLayout
+			vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// newLayout
+			vk::VK_QUEUE_FAMILY_IGNORED,					// srcQueueFamilyIndex
+			vk::VK_QUEUE_FAMILY_IGNORED,					// destQueueFamilyIndex
+			*m_colorAttachmentImage,						// image
+			fullSubrange,									// subresourceRange
+		};
+		const vk::VkCommandBufferAllocateInfo	cmdBufAllocInfo				=
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+			DE_NULL,
+			*m_cmdPool,										// cmdPool
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,			// level
+			1u,												// count
+		};
+		const vk::VkCommandBufferBeginInfo		cmdBufBeginInfo				=
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+			DE_NULL,
+			vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// flags
+			(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const vk::Unique<vk::VkCommandBuffer>	cmd					(vk::allocateCommandBuffer(m_vki, m_device, &cmdBufAllocInfo));
+
+		VK_CHECK(m_vki.beginCommandBuffer(*cmd, &cmdBufBeginInfo));
+		m_vki.cmdPipelineBarrier(*cmd, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+								 0, (const vk::VkMemoryBarrier*)DE_NULL,
+								 0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+								 1, &imageBarrier);
+		VK_CHECK(m_vki.endCommandBuffer(*cmd));
+
+		{
+			const vk::VkSubmitInfo	submitInfo	=
+			{
+				vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+				DE_NULL,
+				0u,
+				(const vk::VkSemaphore*)0,
+				(const vk::VkPipelineStageFlags*)DE_NULL,
+				1u,
+				&cmd.get(),
+				0u,
+				(const vk::VkSemaphore*)0,
+			};
+
+			VK_CHECK(m_vki.queueSubmit(m_queue, 1, &submitInfo, (vk::VkFence)0));
+		}
+
+		// and then render to
+		renderToTarget();
+	}
+
+	// read and verify
+	readRenderTarget(resultImage);
+	return verifyResultImage(resultImage.getAccess());
+}
+
+class RenderInstanceShaders
+{
+public:
+														RenderInstanceShaders		(const vk::DeviceInterface&				vki,
+																					 vk::VkDevice							device,
+																					 const vk::VkPhysicalDeviceFeatures&	deviceFeatures,
+																					 const vk::BinaryCollection&			programCollection);
+
+	inline bool											hasTessellationStage		(void) const { return *m_tessCtrlShaderModule != 0 || *m_tessEvalShaderModule != 0;	}
+	inline deUint32										getNumStages				(void) const { return (deUint32)m_stageInfos.size();								}
+	inline const vk::VkPipelineShaderStageCreateInfo*	getStages					(void) const { return &m_stageInfos[0];												}
+
+private:
+	void												addStage					(const vk::DeviceInterface&				vki,
+																					 vk::VkDevice							device,
+																					 const vk::VkPhysicalDeviceFeatures&	deviceFeatures,
+																					 const vk::BinaryCollection&			programCollection,
+																					 const char*							name,
+																					 vk::VkShaderStageFlagBits				stage,
+																					 vk::Move<vk::VkShaderModule>*			outModule);
+
+	vk::VkPipelineShaderStageCreateInfo					getShaderStageCreateInfo	(vk::VkShaderStageFlagBits stage, vk::VkShaderModule shader) const;
+
+	vk::Move<vk::VkShaderModule>						m_vertexShaderModule;
+	vk::Move<vk::VkShaderModule>						m_tessCtrlShaderModule;
+	vk::Move<vk::VkShaderModule>						m_tessEvalShaderModule;
+	vk::Move<vk::VkShaderModule>						m_geometryShaderModule;
+	vk::Move<vk::VkShaderModule>						m_fragmentShaderModule;
+	std::vector<vk::VkPipelineShaderStageCreateInfo>	m_stageInfos;
+};
+
+RenderInstanceShaders::RenderInstanceShaders (const vk::DeviceInterface&			vki,
+											  vk::VkDevice							device,
+											  const vk::VkPhysicalDeviceFeatures&	deviceFeatures,
+											  const vk::BinaryCollection&			programCollection)
+{
+	addStage(vki, device, deviceFeatures, programCollection, "vertex",		vk::VK_SHADER_STAGE_VERTEX_BIT,						&m_vertexShaderModule);
+	addStage(vki, device, deviceFeatures, programCollection, "tess_ctrl",	vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,		&m_tessCtrlShaderModule);
+	addStage(vki, device, deviceFeatures, programCollection, "tess_eval",	vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,	&m_tessEvalShaderModule);
+	addStage(vki, device, deviceFeatures, programCollection, "geometry",	vk::VK_SHADER_STAGE_GEOMETRY_BIT,					&m_geometryShaderModule);
+	addStage(vki, device, deviceFeatures, programCollection, "fragment",	vk::VK_SHADER_STAGE_FRAGMENT_BIT,					&m_fragmentShaderModule);
+
+	DE_ASSERT(!m_stageInfos.empty());
+}
+
+void RenderInstanceShaders::addStage (const vk::DeviceInterface&			vki,
+									  vk::VkDevice							device,
+									  const vk::VkPhysicalDeviceFeatures&	deviceFeatures,
+									  const vk::BinaryCollection&			programCollection,
+									  const char*							name,
+									  vk::VkShaderStageFlagBits				stage,
+									  vk::Move<vk::VkShaderModule>*			outModule)
+{
+	if (programCollection.contains(name))
+	{
+		if (vk::isShaderStageSupported(deviceFeatures, stage))
+		{
+			vk::Move<vk::VkShaderModule>	module	= createShaderModule(vki, device, programCollection.get(name), (vk::VkShaderModuleCreateFlags)0);
+
+			m_stageInfos.push_back(getShaderStageCreateInfo(stage, *module));
+			*outModule = module;
+		}
+		else
+		{
+			// Wait for the GPU to idle so that throwing the exception
+			// below doesn't free in-use GPU resource.
+			vki.deviceWaitIdle(device);
+			TCU_THROW(NotSupportedError, (de::toString(stage) + " is not supported").c_str());
+		}
+	}
+}
+
+vk::VkPipelineShaderStageCreateInfo RenderInstanceShaders::getShaderStageCreateInfo (vk::VkShaderStageFlagBits stage, vk::VkShaderModule shader) const
+{
+	const vk::VkPipelineShaderStageCreateInfo	stageCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineShaderStageCreateFlags)0,
+		stage,			// stage
+		shader,			// shader
+		"main",
+		DE_NULL,		// pSpecializationInfo
+	};
+	return stageCreateInfo;
+}
+
+class SingleCmdRenderInstance : public SingleTargetRenderInstance
+{
+public:
+									SingleCmdRenderInstance	(Context&			context,
+															 bool				isPrimaryCmdBuf,
+															 const tcu::UVec2&	renderSize);
+
+private:
+	vk::Move<vk::VkPipeline>		createPipeline				(vk::VkPipelineLayout pipelineLayout);
+
+	virtual vk::VkPipelineLayout	getPipelineLayout			(void) const = 0;
+	virtual void					writeDrawCmdBuffer			(vk::VkCommandBuffer cmd) const = 0;
+
+	void							renderToTarget				(void);
+
+	const bool						m_isPrimaryCmdBuf;
+};
+
+SingleCmdRenderInstance::SingleCmdRenderInstance (Context&			context,
+												  bool				isPrimaryCmdBuf,
+												  const tcu::UVec2&	renderSize)
+	: SingleTargetRenderInstance	(context, renderSize)
+	, m_isPrimaryCmdBuf				(isPrimaryCmdBuf)
+{
+}
+
+vk::Move<vk::VkPipeline> SingleCmdRenderInstance::createPipeline (vk::VkPipelineLayout pipelineLayout)
+{
+	const RenderInstanceShaders							shaderStages		(m_vki, m_device, m_context.getDeviceFeatures(), m_context.getBinaryCollection());
+	const vk::VkPrimitiveTopology						topology			= shaderStages.hasTessellationStage() ? vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+	const vk::VkPipelineVertexInputStateCreateInfo		vertexInputState	=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineVertexInputStateCreateFlags)0,
+		0u,											// bindingCount
+		DE_NULL,									// pVertexBindingDescriptions
+		0u,											// attributeCount
+		DE_NULL,									// pVertexAttributeDescriptions
+	};
+	const vk::VkPipelineInputAssemblyStateCreateInfo	iaState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineInputAssemblyStateCreateFlags)0,
+		topology,									// topology
+		vk::VK_FALSE,								// primitiveRestartEnable
+	};
+	const vk::VkPipelineTessellationStateCreateInfo		tessState			=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineTessellationStateCreateFlags)0,
+		3u,											// patchControlPoints
+	};
+	const vk::VkViewport								viewport			=
+	{
+		0.0f,										// originX
+		0.0f,										// originY
+		float(m_targetSize.x()),					// width
+		float(m_targetSize.y()),					// height
+		0.0f,										// minDepth
+		1.0f,										// maxDepth
+	};
+	const vk::VkRect2D									renderArea			=
+	{
+		{ 0, 0 },									// offset
+		{ m_targetSize.x(), m_targetSize.y() },		// extent
+	};
+	const vk::VkPipelineViewportStateCreateInfo			vpState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineViewportStateCreateFlags)0,
+		1u,											// viewportCount
+		&viewport,
+		1u,
+		&renderArea,
+	};
+	const vk::VkPipelineRasterizationStateCreateInfo	rsState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineRasterizationStateCreateFlags)0,
+		vk::VK_TRUE,								// depthClipEnable
+		vk::VK_FALSE,								// rasterizerDiscardEnable
+		vk::VK_POLYGON_MODE_FILL,					// fillMode
+		vk::VK_CULL_MODE_NONE,						// cullMode
+		vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,		// frontFace
+		vk::VK_FALSE,								// depthBiasEnable
+		0.0f,										// depthBias
+		0.0f,										// depthBiasClamp
+		0.0f,										// slopeScaledDepthBias
+		1.0f,										// lineWidth
+	};
+	const vk::VkSampleMask								sampleMask			= 0x01u;
+	const vk::VkPipelineMultisampleStateCreateInfo		msState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineMultisampleStateCreateFlags)0,
+		vk::VK_SAMPLE_COUNT_1_BIT,					// rasterSamples
+		vk::VK_FALSE,								// sampleShadingEnable
+		0.0f,										// minSampleShading
+		&sampleMask,								// sampleMask
+		vk::VK_FALSE,								// alphaToCoverageEnable
+		vk::VK_FALSE,								// alphaToOneEnable
+	};
+	const vk::VkPipelineDepthStencilStateCreateInfo		dsState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineDepthStencilStateCreateFlags)0,
+		vk::VK_FALSE,								// depthTestEnable
+		vk::VK_FALSE,								// depthWriteEnable
+		vk::VK_COMPARE_OP_ALWAYS,					// depthCompareOp
+		vk::VK_FALSE,								// depthBoundsTestEnable
+		vk::VK_FALSE,								// stencilTestEnable
+		{ vk::VK_STENCIL_OP_KEEP, vk::VK_STENCIL_OP_KEEP, vk::VK_STENCIL_OP_KEEP, vk::VK_COMPARE_OP_ALWAYS, 0u, 0u, 0u },	// front
+		{ vk::VK_STENCIL_OP_KEEP, vk::VK_STENCIL_OP_KEEP, vk::VK_STENCIL_OP_KEEP, vk::VK_COMPARE_OP_ALWAYS, 0u, 0u, 0u },	// back
+		-1.0f,										// minDepthBounds
+		+1.0f,										// maxDepthBounds
+	};
+	const vk::VkPipelineColorBlendAttachmentState		cbAttachment		=
+	{
+		vk::VK_FALSE,								// blendEnable
+		vk::VK_BLEND_FACTOR_ZERO,					// srcBlendColor
+		vk::VK_BLEND_FACTOR_ZERO,					// destBlendColor
+		vk::VK_BLEND_OP_ADD,						// blendOpColor
+		vk::VK_BLEND_FACTOR_ZERO,					// srcBlendAlpha
+		vk::VK_BLEND_FACTOR_ZERO,					// destBlendAlpha
+		vk::VK_BLEND_OP_ADD,						// blendOpAlpha
+		(vk::VK_COLOR_COMPONENT_R_BIT |
+		 vk::VK_COLOR_COMPONENT_G_BIT |
+		 vk::VK_COLOR_COMPONENT_B_BIT |
+		 vk::VK_COLOR_COMPONENT_A_BIT),				// channelWriteMask
+	};
+	const vk::VkPipelineColorBlendStateCreateInfo		cbState				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineColorBlendStateCreateFlags)0,
+		vk::VK_FALSE,								// logicOpEnable
+		vk::VK_LOGIC_OP_CLEAR,						// logicOp
+		1u,											// attachmentCount
+		&cbAttachment,								// pAttachments
+		{ 0.0f, 0.0f, 0.0f, 0.0f },					// blendConst
+	};
+	const vk::VkPipelineDynamicStateCreateInfo			dynState			=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineDynamicStateCreateFlags)0,
+		0u,											// dynamicStateCount
+		DE_NULL,									// pDynamicStates
+	};
+	const vk::VkGraphicsPipelineCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineCreateFlags)0,
+		shaderStages.getNumStages(),									// stageCount
+		shaderStages.getStages(),										// pStages
+		&vertexInputState,												// pVertexInputState
+		&iaState,														// pInputAssemblyState
+		(shaderStages.hasTessellationStage() ? &tessState : DE_NULL),	// pTessellationState
+		&vpState,														// pViewportState
+		&rsState,														// pRasterState
+		&msState,														// pMultisampleState
+		&dsState,														// pDepthStencilState
+		&cbState,														// pColorBlendState
+		&dynState,														// pDynamicState
+		pipelineLayout,													// layout
+		*m_renderPass,													// renderPass
+		0u,																// subpass
+		(vk::VkPipeline)0,												// basePipelineHandle
+		0u,																// basePipelineIndex
+	};
+	return createGraphicsPipeline(m_vki, m_device, (vk::VkPipelineCache)0u, &createInfo);
+}
+
+void SingleCmdRenderInstance::renderToTarget (void)
+{
+	const vk::VkRect2D									renderArea						=
+	{
+		{ 0, 0 },								// offset
+		{ m_targetSize.x(), m_targetSize.y() },	// extent
+	};
+	const vk::VkCommandBufferAllocateInfo				mainCmdBufCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		*m_cmdPool,								// cmdPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,	// level
+		1u,										// count
+	};
+	const vk::VkCommandBufferBeginInfo					mainCmdBufBeginInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// flags
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	const vk::VkCommandBufferAllocateInfo				passCmdBufCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		*m_cmdPool,								// cmdPool
+		vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY,	// level
+		1u,										// count
+	};
+	const vk::VkCommandBufferInheritanceInfo			passCmdBufInheritInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		(vk::VkRenderPass)*m_renderPass,						// renderPass
+		0u,														// subpass
+		(vk::VkFramebuffer)*m_framebuffer,						// framebuffer
+		vk::VK_FALSE,											// occlusionQueryEnable
+		(vk::VkQueryControlFlags)0,
+		(vk::VkQueryPipelineStatisticFlags)0,
+	};
+	const vk::VkCommandBufferBeginInfo					passCmdBufBeginInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT |
+		vk::VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,	// flags
+		&passCmdBufInheritInfo,
+	};
+	const vk::VkFenceCreateInfo							fenceCreateInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,			// flags
+	};
+	const vk::VkClearValue								clearValue					= vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 0.0f);
+	const vk::VkRenderPassBeginInfo						renderPassBeginInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+		DE_NULL,
+		*m_renderPass,		// renderPass
+		*m_framebuffer,		// framebuffer
+		renderArea,			// renderArea
+		1u,					// clearValueCount
+		&clearValue,		// pClearValues
+	};
+
+	const vk::VkPipelineLayout							pipelineLayout				(getPipelineLayout());
+	const vk::Unique<vk::VkPipeline>					pipeline					(createPipeline(pipelineLayout));
+	const vk::Unique<vk::VkCommandBuffer>				mainCmd						(vk::allocateCommandBuffer(m_vki, m_device, &mainCmdBufCreateInfo));
+	const vk::Unique<vk::VkCommandBuffer>				passCmd						((m_isPrimaryCmdBuf) ? (vk::Move<vk::VkCommandBuffer>()) : (vk::allocateCommandBuffer(m_vki, m_device, &passCmdBufCreateInfo)));
+	const vk::Unique<vk::VkFence>						fence						(vk::createFence(m_vki, m_device, &fenceCreateInfo));
+	const deUint64										infiniteTimeout				= ~(deUint64)0u;
+	const vk::VkSubpassContents							passContents				= (m_isPrimaryCmdBuf) ? (vk::VK_SUBPASS_CONTENTS_INLINE) : (vk::VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS);
+
+	VK_CHECK(m_vki.beginCommandBuffer(*mainCmd, &mainCmdBufBeginInfo));
+	m_vki.cmdBeginRenderPass(*mainCmd, &renderPassBeginInfo, passContents);
+
+	if (m_isPrimaryCmdBuf)
+	{
+		m_vki.cmdBindPipeline(*mainCmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
+		writeDrawCmdBuffer(*mainCmd);
+	}
+	else
+	{
+		VK_CHECK(m_vki.beginCommandBuffer(*passCmd, &passCmdBufBeginInfo));
+		m_vki.cmdBindPipeline(*passCmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
+		writeDrawCmdBuffer(*passCmd);
+		VK_CHECK(m_vki.endCommandBuffer(*passCmd));
+
+		m_vki.cmdExecuteCommands(*mainCmd, 1, &passCmd.get());
+	}
+
+	m_vki.cmdEndRenderPass(*mainCmd);
+	VK_CHECK(m_vki.endCommandBuffer(*mainCmd));
+
+	// submit and wait for them to finish before exiting scope. (Killing in-flight objects is a no-no).
+	{
+		const vk::VkSubmitInfo	submitInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const vk::VkSemaphore*)0,
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&mainCmd.get(),
+			0u,
+			(const vk::VkSemaphore*)0,
+		};
+		VK_CHECK(m_vki.queueSubmit(m_queue, 1, &submitInfo, *fence));
+	}
+	VK_CHECK(m_vki.waitForFences(m_device, 1, &fence.get(), 0u, infiniteTimeout)); // \note: timeout is failure
+}
+
+enum ShaderInputInterface
+{
+	SHADER_INPUT_SINGLE_DESCRIPTOR = 0,	//!< one descriptor
+	SHADER_INPUT_MULTIPLE_DESCRIPTORS,	//!< multiple descriptors
+	SHADER_INPUT_DESCRIPTOR_ARRAY,		//!< descriptor array
+
+	SHADER_INPUT_LAST
+};
+
+deUint32 getInterfaceNumResources (ShaderInputInterface shaderInterface)
+{
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:	return 1u;
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:	return 2u;
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:		return 2u;
+
+		default:
+			DE_FATAL("Impossible");
+			return 0u;
+	}
+}
+
+class BufferRenderInstance : public SingleCmdRenderInstance
+{
+public:
+													BufferRenderInstance		(Context&					context,
+																				 bool						isPrimaryCmdBuf,
+																				 vk::VkDescriptorType		descriptorType,
+																				 vk::VkShaderStageFlags		stageFlags,
+																				 ShaderInputInterface		shaderInterface,
+																				 bool						viewOffset,
+																				 bool						dynamicOffset,
+																				 bool						dynamicOffsetNonZero);
+
+	static vk::Move<vk::VkBuffer>					createSourceBuffer			(const vk::DeviceInterface&		vki,
+																				 vk::VkDevice					device,
+																				 vk::Allocator&					allocator,
+																				 vk::VkDescriptorType			descriptorType,
+																				 deUint32						offset,
+																				 deUint32						bufferSize,
+																				 de::MovePtr<vk::Allocation>*	outMemory);
+
+	static vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface);
+
+	static vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkShaderStageFlags		stageFlags);
+
+	static vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorSetLayout	descriptorSetLayout,
+																				 vk::VkDescriptorPool		descriptorPool,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkBuffer				sourceBufferA,
+																				 const deUint32				viewOffsetA,
+																				 vk::VkBuffer				sourceBufferB,
+																				 const deUint32				viewOffsetB);
+
+	static vk::Move<vk::VkPipelineLayout>			createPipelineLayout		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+	void											logTestPlan					(void) const;
+	vk::VkPipelineLayout							getPipelineLayout			(void) const;
+	void											writeDrawCmdBuffer			(vk::VkCommandBuffer cmd) const;
+	tcu::TestStatus									verifyResultImage			(const tcu::ConstPixelBufferAccess& result) const;
+
+	enum
+	{
+		RENDER_SIZE				= 128,
+		BUFFER_DATA_SIZE		= 8 * sizeof(float),
+		BUFFER_SIZE_A			= 2048, //!< a lot more than required
+		BUFFER_SIZE_B			= 2560, //!< a lot more than required
+
+		STATIC_OFFSET_VALUE_A	= 256,
+		DYNAMIC_OFFSET_VALUE_A	= 512,
+		STATIC_OFFSET_VALUE_B	= 1024,
+		DYNAMIC_OFFSET_VALUE_B	= 768,
+	};
+
+	const vk::VkDescriptorType						m_descriptorType;
+	const ShaderInputInterface						m_shaderInterface;
+	const bool										m_setViewOffset;
+	const bool										m_setDynamicOffset;
+	const bool										m_dynamicOffsetNonZero;
+	const vk::VkShaderStageFlags					m_stageFlags;
+
+	const deUint32									m_viewOffsetA;
+	const deUint32									m_viewOffsetB;
+	const deUint32									m_dynamicOffsetA;
+	const deUint32									m_dynamicOffsetB;
+	const deUint32									m_effectiveOffsetA;
+	const deUint32									m_effectiveOffsetB;
+	const deUint32									m_bufferSizeA;
+	const deUint32									m_bufferSizeB;
+
+	de::MovePtr<vk::Allocation>						m_bufferMemoryA;
+	de::MovePtr<vk::Allocation>						m_bufferMemoryB;
+	const vk::Unique<vk::VkBuffer>					m_sourceBufferA;
+	const vk::Unique<vk::VkBuffer>					m_sourceBufferB;
+	const vk::Unique<vk::VkDescriptorPool>			m_descriptorPool;
+	const vk::Unique<vk::VkDescriptorSetLayout>		m_descriptorSetLayout;
+	const vk::Unique<vk::VkDescriptorSet>			m_descriptorSet;
+	const vk::Unique<vk::VkPipelineLayout>			m_pipelineLayout;
+};
+
+BufferRenderInstance::BufferRenderInstance	(Context&				context,
+											 bool					isPrimaryCmdBuf,
+											 vk::VkDescriptorType	descriptorType,
+											 vk::VkShaderStageFlags	stageFlags,
+											 ShaderInputInterface	shaderInterface,
+											 bool					viewOffset,
+											 bool					dynamicOffset,
+											 bool					dynamicOffsetNonZero)
+	: SingleCmdRenderInstance		(context, isPrimaryCmdBuf, tcu::UVec2(RENDER_SIZE, RENDER_SIZE))
+	, m_descriptorType				(descriptorType)
+	, m_shaderInterface				(shaderInterface)
+	, m_setViewOffset				(viewOffset)
+	, m_setDynamicOffset			(dynamicOffset)
+	, m_dynamicOffsetNonZero		(dynamicOffsetNonZero)
+	, m_stageFlags					(stageFlags)
+	, m_viewOffsetA					((m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_A) : (0u))
+	, m_viewOffsetB					((m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_B) : (0u))
+	, m_dynamicOffsetA				((dynamicOffsetNonZero) ? ((deUint32)DYNAMIC_OFFSET_VALUE_A) : (0u))
+	, m_dynamicOffsetB				((dynamicOffsetNonZero) ? ((deUint32)DYNAMIC_OFFSET_VALUE_B) : (0u))
+	, m_effectiveOffsetA			((isDynamicDescriptorType(m_descriptorType)) ? (m_viewOffsetA + m_dynamicOffsetA) : (m_viewOffsetA))
+	, m_effectiveOffsetB			((isDynamicDescriptorType(m_descriptorType)) ? (m_viewOffsetB + m_dynamicOffsetB) : (m_viewOffsetB))
+	, m_bufferSizeA					(BUFFER_SIZE_A)
+	, m_bufferSizeB					(BUFFER_SIZE_B)
+	, m_bufferMemoryA				(DE_NULL)
+	, m_bufferMemoryB				(DE_NULL)
+	, m_sourceBufferA				(createSourceBuffer(m_vki, m_device, m_allocator, m_descriptorType, m_effectiveOffsetA, m_bufferSizeA, &m_bufferMemoryA))
+	, m_sourceBufferB				((getInterfaceNumResources(m_shaderInterface) == 1u)
+										? vk::Move<vk::VkBuffer>()
+										: createSourceBuffer(m_vki, m_device, m_allocator, m_descriptorType, m_effectiveOffsetB, m_bufferSizeB, &m_bufferMemoryB))
+	, m_descriptorPool				(createDescriptorPool(m_vki, m_device, m_descriptorType, m_shaderInterface))
+	, m_descriptorSetLayout			(createDescriptorSetLayout(m_vki, m_device, m_descriptorType, m_shaderInterface, m_stageFlags))
+	, m_descriptorSet				(createDescriptorSet(m_vki, m_device, *m_descriptorSetLayout, *m_descriptorPool, m_descriptorType, m_shaderInterface, *m_sourceBufferA, m_viewOffsetA, *m_sourceBufferB, m_viewOffsetB))
+	, m_pipelineLayout				(createPipelineLayout(m_vki, m_device, *m_descriptorSetLayout))
+{
+	if (m_setDynamicOffset)
+		DE_ASSERT(isDynamicDescriptorType(m_descriptorType));
+	if (m_dynamicOffsetNonZero)
+		DE_ASSERT(m_setDynamicOffset);
+}
+
+vk::Move<vk::VkBuffer> BufferRenderInstance::createSourceBuffer (const vk::DeviceInterface&		vki,
+																 vk::VkDevice					device,
+																 vk::Allocator&					allocator,
+																 vk::VkDescriptorType			descriptorType,
+																 deUint32						offset,
+																 deUint32						bufferSize,
+																 de::MovePtr<vk::Allocation>*	outMemory)
+{
+	static const float				s_colors[]			=
+	{
+		0.0f, 1.0f, 0.0f, 1.0f,		// green
+		1.0f, 1.0f, 0.0f, 1.0f,		// yellow
+	};
+	DE_STATIC_ASSERT(sizeof(s_colors) == BUFFER_DATA_SIZE);
+	DE_ASSERT(offset + BUFFER_DATA_SIZE <= bufferSize);
+	DE_ASSERT(offset % sizeof(float) == 0);
+	DE_ASSERT(bufferSize % sizeof(float) == 0);
+
+	const bool						isUniformBuffer		= isUniformDescriptorType(descriptorType);
+	const vk::VkBufferUsageFlags	usageFlags			= (isUniformBuffer) ? (vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) : (vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+	const float						preGuardValue		= 0.5f;
+	const float						postGuardValue		= 0.75f;
+	const vk::VkBufferCreateInfo	bufferCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,								// flags
+		bufferSize,						// size
+		usageFlags,						// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,	// sharingMode
+		0u,								// queueFamilyCount
+		DE_NULL,						// pQueueFamilyIndices
+	};
+	vk::Move<vk::VkBuffer>			buffer				(vk::createBuffer(vki, device, &bufferCreateInfo));
+	de::MovePtr<vk::Allocation>		bufferMemory		= allocateAndBindObjectMemory(vki, device, allocator, *buffer, vk::MemoryRequirement::HostVisible);
+	void* const						mapPtr				= bufferMemory->getHostPtr();
+
+	// guard with interesting values
+	for (size_t preGuardOffset = 0; preGuardOffset + sizeof(float) <= (size_t)offset; preGuardOffset += sizeof(float))
+		deMemcpy((deUint8*)mapPtr + preGuardOffset, &preGuardValue, sizeof(float));
+
+	deMemcpy((deUint8*)mapPtr + offset, s_colors, sizeof(s_colors));
+	for (size_t postGuardOffset = (size_t)offset + sizeof(s_colors); postGuardOffset + sizeof(float) <= (size_t)bufferSize; postGuardOffset += sizeof(float))
+		deMemcpy((deUint8*)mapPtr + postGuardOffset, &postGuardValue, sizeof(float));
+	deMemset((deUint8*)mapPtr + offset + sizeof(s_colors), 0x5A, (size_t)bufferSize - (size_t)offset - sizeof(s_colors)); // fill with interesting pattern that produces valid floats
+
+	flushMappedMemoryRange(vki, device, bufferMemory->getMemory(), bufferMemory->getOffset(), bufferSize);
+
+	*outMemory = bufferMemory;
+	return buffer;
+}
+
+vk::Move<vk::VkDescriptorPool> BufferRenderInstance::createDescriptorPool (const vk::DeviceInterface&	vki,
+																		   vk::VkDevice					device,
+																		   vk::VkDescriptorType			descriptorType,
+																		   ShaderInputInterface			shaderInterface)
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(descriptorType, getInterfaceNumResources(shaderInterface))
+		.build(vki, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSetLayout> BufferRenderInstance::createDescriptorSetLayout (const vk::DeviceInterface&	vki,
+																					 vk::VkDevice				device,
+																					 vk::VkDescriptorType		descriptorType,
+																					 ShaderInputInterface		shaderInterface,
+																					 vk::VkShaderStageFlags		stageFlags)
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(descriptorType, 2u, stageFlags);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return builder.build(vki, device);
+}
+
+vk::Move<vk::VkDescriptorSet> BufferRenderInstance::createDescriptorSet (const vk::DeviceInterface&	vki,
+																		 vk::VkDevice				device,
+																		 vk::VkDescriptorSetLayout	descriptorSetLayout,
+																		 vk::VkDescriptorPool		descriptorPool,
+																		 vk::VkDescriptorType		descriptorType,
+																		 ShaderInputInterface		shaderInterface,
+																		 vk::VkBuffer				bufferA,
+																		 deUint32					offsetA,
+																		 vk::VkBuffer				bufferB,
+																		 deUint32					offsetB)
+{
+	const vk::VkDescriptorBufferInfo		bufferInfos[2]	=
+	{
+		vk::makeDescriptorBufferInfo(bufferA, (vk::VkDeviceSize)offsetA, (vk::VkDeviceSize)BUFFER_DATA_SIZE),
+		vk::makeDescriptorBufferInfo(bufferB, (vk::VkDeviceSize)offsetB, (vk::VkDeviceSize)BUFFER_DATA_SIZE),
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		descriptorPool,
+		1u,
+		&descriptorSetLayout
+	};
+
+	vk::Move<vk::VkDescriptorSet>	descriptorSet	= allocateDescriptorSet(vki, device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder	builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &bufferInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &bufferInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), descriptorType, &bufferInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, 2u, bufferInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(vki, device);
+	return descriptorSet;
+}
+
+vk::Move<vk::VkPipelineLayout> BufferRenderInstance::createPipelineLayout (const vk::DeviceInterface&	vki,
+																		   vk::VkDevice					device,
+																		   vk::VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1,						// descriptorSetCount
+		&descriptorSetLayout,	// pSetLayouts
+		0u,						// pushConstantRangeCount
+		DE_NULL,				// pPushConstantRanges
+	};
+
+	return vk::createPipelineLayout(vki, device, &createInfo);
+}
+
+void BufferRenderInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Rendering 2x2 yellow-green grid.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+		<< " descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType) << "\n"
+		<< "Buffer view(s) have " << ((m_setViewOffset) ? ("non-") : ("")) << "zero offset.\n";
+
+	if (isDynamicDescriptorType(m_descriptorType))
+	{
+		if (m_setDynamicOffset)
+		{
+			msg << "Source buffer(s) are given a dynamic offset at bind time.\n"
+				<< "The supplied dynamic offset is " << ((m_dynamicOffsetNonZero) ? ("non-") : ("")) << "zero.\n";
+		}
+		else
+		{
+			msg << "Dynamic offset is not supplied at bind time. Expecting bind to offset 0.\n";
+		}
+	}
+
+	if (m_stageFlags == 0u)
+	{
+		msg << "Descriptors are not accessed in any shader stage.\n";
+	}
+	else
+	{
+		msg << "Descriptors are accessed in {"
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0)					? (" vertex")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0)	? (" tess_control")		: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0)	? (" tess_evaluation")	: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0)				? (" geometry")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0)				? (" fragment")			: (""))
+			<< " } stages.\n";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+vk::VkPipelineLayout BufferRenderInstance::getPipelineLayout (void) const
+{
+	return *m_pipelineLayout;
+}
+
+void BufferRenderInstance::writeDrawCmdBuffer (vk::VkCommandBuffer cmd) const
+{
+	const bool							isUniformBuffer		= isUniformDescriptorType(m_descriptorType);
+
+	// \note dynamic offset replaces the view offset, i.e. it is not offset relative to the view offset
+	const deUint32						dynamicOffsets[]	=
+	{
+		m_dynamicOffsetA,
+		m_dynamicOffsetB,
+	};
+	const deUint32						numOffsets			= (!m_setDynamicOffset) ? (0u) : (getInterfaceNumResources(m_shaderInterface));
+	const deUint32* const				dynamicOffsetPtr	= (!m_setDynamicOffset) ? (DE_NULL) : (dynamicOffsets);
+
+	// make host writes device-visible
+	const vk::VkAccessFlags				inputBit			= (isUniformBuffer) ? (vk::VK_ACCESS_UNIFORM_READ_BIT) : (vk::VK_ACCESS_SHADER_READ_BIT);
+	const vk::VkBufferMemoryBarrier		memoryBarriers[]	=
+	{
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			vk::VK_ACCESS_HOST_WRITE_BIT,				// outputMask
+			inputBit,									// inputMask
+			vk::VK_QUEUE_FAMILY_IGNORED,				// srcQueueFamilyIndex
+			vk::VK_QUEUE_FAMILY_IGNORED,				// destQueueFamilyIndex
+			*m_sourceBufferA,							// buffer
+			0u,											// offset
+			(vk::VkDeviceSize)m_bufferSizeA,			// size
+		},
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			vk::VK_ACCESS_HOST_WRITE_BIT,				// outputMask
+			inputBit,									// inputMask
+			vk::VK_QUEUE_FAMILY_IGNORED,				// srcQueueFamilyIndex
+			vk::VK_QUEUE_FAMILY_IGNORED,				// destQueueFamilyIndex
+			*m_sourceBufferB,							// buffer
+			0u,											// offset
+			(vk::VkDeviceSize)m_bufferSizeB,			// size
+		}
+	};
+	const deUint32						numMemoryBarriers	= getInterfaceNumResources(m_shaderInterface);
+
+	m_vki.cmdBindDescriptorSets(cmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, getPipelineLayout(), 0, 1, &m_descriptorSet.get(), numOffsets, dynamicOffsetPtr);
+	m_vki.cmdPipelineBarrier(cmd, 0u, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, (vk::VkDependencyFlags)0,
+							 0, (const vk::VkMemoryBarrier*)DE_NULL,
+							 numMemoryBarriers, memoryBarriers,
+							 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	m_vki.cmdDraw(cmd, 6 * 4, 1, 0, 0); // render four quads (two separate triangles)
+}
+
+tcu::TestStatus BufferRenderInstance::verifyResultImage (const tcu::ConstPixelBufferAccess& result) const
+{
+	const tcu::Vec4		green		(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4		yellow		(1.0f, 1.0f, 0.0f, 1.0f);
+	tcu::Surface		reference	(m_targetSize.x(), m_targetSize.y());
+
+	drawQuadrantReferenceResult(reference.getAccess(), yellow, green, green, yellow);
+
+	if (!bilinearCompare(m_context.getTestContext().getLog(), "Compare", "Result comparison", reference.getAccess(), result, tcu::RGBA(1, 1, 1, 1), tcu::COMPARE_LOG_RESULT))
+		return tcu::TestStatus::fail("Image verification failed");
+	else
+		return tcu::TestStatus::pass("Pass");
+}
+
+class ComputeInstanceResultBuffer
+{
+public:
+	enum
+	{
+		DATA_SIZE = sizeof(tcu::Vec4[4])
+	};
+
+											ComputeInstanceResultBuffer	(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator);
+
+	void									readResultContentsTo		(tcu::Vec4 (*results)[4]) const;
+
+	inline vk::VkBuffer						getBuffer					(void) const { return *m_buffer;			}
+	inline const vk::VkBufferMemoryBarrier*	getResultReadBarrier		(void) const { return &m_bufferBarrier;		}
+
+private:
+	static vk::Move<vk::VkBuffer>			createResultBuffer			(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator,
+																		 de::MovePtr<vk::Allocation>*	outAllocation);
+
+	static vk::VkBufferMemoryBarrier		createResultBufferBarrier	(vk::VkBuffer buffer);
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+
+	de::MovePtr<vk::Allocation>				m_bufferMem;
+	const vk::Unique<vk::VkBuffer>			m_buffer;
+	const vk::VkBufferMemoryBarrier			m_bufferBarrier;
+};
+
+ComputeInstanceResultBuffer::ComputeInstanceResultBuffer (const vk::DeviceInterface&	vki,
+														  vk::VkDevice					device,
+														  vk::Allocator&				allocator)
+	: m_vki				(vki)
+	, m_device			(device)
+	, m_bufferMem		(DE_NULL)
+	, m_buffer			(createResultBuffer(m_vki, m_device, allocator, &m_bufferMem))
+	, m_bufferBarrier	(createResultBufferBarrier(*m_buffer))
+{
+}
+
+void ComputeInstanceResultBuffer::readResultContentsTo (tcu::Vec4 (*results)[4]) const
+{
+	invalidateMappedMemoryRange(m_vki, m_device, m_bufferMem->getMemory(), m_bufferMem->getOffset(), sizeof(*results));
+	deMemcpy(*results, m_bufferMem->getHostPtr(), sizeof(*results));
+}
+
+vk::Move<vk::VkBuffer> ComputeInstanceResultBuffer::createResultBuffer (const vk::DeviceInterface&		vki,
+																		vk::VkDevice					device,
+																		vk::Allocator&					allocator,
+																		de::MovePtr<vk::Allocation>*	outAllocation)
+{
+	const vk::VkBufferCreateInfo	createInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,											// flags
+		(vk::VkDeviceSize)DATA_SIZE,				// size
+		vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,		// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,				// sharingMode
+		0u,											// queueFamilyCount
+		DE_NULL,									// pQueueFamilyIndices
+	};
+	vk::Move<vk::VkBuffer>			buffer		(vk::createBuffer(vki, device, &createInfo));
+	de::MovePtr<vk::Allocation>		allocation	(allocateAndBindObjectMemory(vki, device, allocator, *buffer, vk::MemoryRequirement::HostVisible));
+	const float						clearValue	= -1.0f;
+	void*							mapPtr		= allocation->getHostPtr();
+
+	for (size_t offset = 0; offset < DATA_SIZE; offset += sizeof(float))
+		deMemcpy(((deUint8*)mapPtr) + offset, &clearValue, sizeof(float));
+
+	flushMappedMemoryRange(vki, device, allocation->getMemory(), allocation->getOffset(), (vk::VkDeviceSize)DATA_SIZE);
+
+	*outAllocation = allocation;
+	return buffer;
+}
+
+vk::VkBufferMemoryBarrier ComputeInstanceResultBuffer::createResultBufferBarrier (vk::VkBuffer buffer)
+{
+	const vk::VkBufferMemoryBarrier bufferBarrier =
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_SHADER_WRITE_BIT,				// outputMask
+		vk::VK_ACCESS_HOST_READ_BIT,				// inputMask
+		vk::VK_QUEUE_FAMILY_IGNORED,				// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,				// destQueueFamilyIndex
+		buffer,										// buffer
+		(vk::VkDeviceSize)0u,						// offset
+		DATA_SIZE,									// size
+	};
+	return bufferBarrier;
+}
+
+class ComputePipeline
+{
+public:
+											ComputePipeline			(const vk::DeviceInterface&			vki,
+																	 vk::VkDevice						device,
+																	 const vk::BinaryCollection&		programCollection,
+																	 deUint32							numDescriptorSets,
+																	 const vk::VkDescriptorSetLayout*	descriptorSetLayouts);
+
+	inline vk::VkPipeline					getPipeline				(void) const { return *m_pipeline;			};
+	inline vk::VkPipelineLayout				getPipelineLayout		(void) const { return *m_pipelineLayout;	};
+
+private:
+	static vk::Move<vk::VkPipelineLayout>	createPipelineLayout	(const vk::DeviceInterface&			vki,
+																	 vk::VkDevice						device,
+																	 deUint32							numDescriptorSets,
+																	 const vk::VkDescriptorSetLayout*	descriptorSetLayouts);
+
+	static vk::Move<vk::VkPipeline>			createPipeline			(const vk::DeviceInterface&			vki,
+																	 vk::VkDevice						device,
+																	 const vk::BinaryCollection&		programCollection,
+																	 vk::VkPipelineLayout				layout);
+
+	const vk::Unique<vk::VkPipelineLayout>	m_pipelineLayout;
+	const vk::Unique<vk::VkPipeline>		m_pipeline;
+};
+
+ComputePipeline::ComputePipeline (const vk::DeviceInterface&		vki,
+								  vk::VkDevice						device,
+								  const vk::BinaryCollection&		programCollection,
+								  deUint32							numDescriptorSets,
+								  const vk::VkDescriptorSetLayout*	descriptorSetLayouts)
+	: m_pipelineLayout	(createPipelineLayout(vki, device, numDescriptorSets, descriptorSetLayouts))
+	, m_pipeline		(createPipeline(vki, device, programCollection, *m_pipelineLayout))
+{
+}
+
+vk::Move<vk::VkPipelineLayout> ComputePipeline::createPipelineLayout (const vk::DeviceInterface&		vki,
+																	  vk::VkDevice						device,
+																	  deUint32							numDescriptorSets,
+																	  const vk::VkDescriptorSetLayout*	descriptorSetLayouts)
+{
+	const vk::VkPipelineLayoutCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineLayoutCreateFlags)0,
+		numDescriptorSets,		// descriptorSetCount
+		descriptorSetLayouts,	// pSetLayouts
+		0u,						// pushConstantRangeCount
+		DE_NULL,				// pPushConstantRanges
+	};
+	return vk::createPipelineLayout(vki, device, &createInfo);
+}
+
+vk::Move<vk::VkPipeline> ComputePipeline::createPipeline (const vk::DeviceInterface&	vki,
+														  vk::VkDevice					device,
+														  const vk::BinaryCollection&	programCollection,
+														  vk::VkPipelineLayout			layout)
+{
+	const vk::Unique<vk::VkShaderModule>		computeModule		(vk::createShaderModule(vki, device, programCollection.get("compute"), (vk::VkShaderModuleCreateFlags)0u));
+	const vk::VkPipelineShaderStageCreateInfo	cs					=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineShaderStageCreateFlags)0,
+		vk::VK_SHADER_STAGE_COMPUTE_BIT,	// stage
+		*computeModule,						// shader
+		"main",
+		DE_NULL,							// pSpecializationInfo
+	};
+	const vk::VkComputePipelineCreateInfo		createInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+		DE_NULL,
+		0u,								// flags
+		cs,								// cs
+		layout,							// layout
+		(vk::VkPipeline)0,				// basePipelineHandle
+		0u,								// basePipelineIndex
+	};
+	return createComputePipeline(vki, device, (vk::VkPipelineCache)0u, &createInfo);
+}
+
+class ComputeCommand
+{
+public:
+											ComputeCommand	(const vk::DeviceInterface&			vki,
+															 vk::VkDevice						device,
+															 vk::VkPipeline						pipeline,
+															 vk::VkPipelineLayout				pipelineLayout,
+															 const tcu::UVec3&					numWorkGroups,
+															 int								numDescriptorSets,
+															 const vk::VkDescriptorSet*			descriptorSets,
+															 int								numDynamicOffsets,
+															 const deUint32*					dynamicOffsets,
+															 int								numPreBarriers,
+															 const vk::VkBufferMemoryBarrier*	preBarriers,
+															 int								numPostBarriers,
+															 const vk::VkBufferMemoryBarrier*	postBarriers);
+
+	void									submitAndWait	(deUint32 queueFamilyIndex, vk::VkQueue queue) const;
+
+private:
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkPipeline					m_pipeline;
+	const vk::VkPipelineLayout				m_pipelineLayout;
+	const tcu::UVec3						m_numWorkGroups;
+	const int								m_numDescriptorSets;
+	const vk::VkDescriptorSet* const		m_descriptorSets;
+	const int								m_numDynamicOffsets;
+	const deUint32* const					m_dynamicOffsets;
+	const int								m_numPreBarriers;
+	const vk::VkBufferMemoryBarrier* const	m_preBarriers;
+	const int								m_numPostBarriers;
+	const vk::VkBufferMemoryBarrier* const	m_postBarriers;
+};
+
+ComputeCommand::ComputeCommand (const vk::DeviceInterface&			vki,
+								vk::VkDevice						device,
+								vk::VkPipeline						pipeline,
+								vk::VkPipelineLayout				pipelineLayout,
+								const tcu::UVec3&					numWorkGroups,
+								int									numDescriptorSets,
+								const vk::VkDescriptorSet*			descriptorSets,
+								int									numDynamicOffsets,
+								const deUint32*						dynamicOffsets,
+								int									numPreBarriers,
+								const vk::VkBufferMemoryBarrier*	preBarriers,
+								int									numPostBarriers,
+								const vk::VkBufferMemoryBarrier*	postBarriers)
+	: m_vki					(vki)
+	, m_device				(device)
+	, m_pipeline			(pipeline)
+	, m_pipelineLayout		(pipelineLayout)
+	, m_numWorkGroups		(numWorkGroups)
+	, m_numDescriptorSets	(numDescriptorSets)
+	, m_descriptorSets		(descriptorSets)
+	, m_numDynamicOffsets	(numDynamicOffsets)
+	, m_dynamicOffsets		(dynamicOffsets)
+	, m_numPreBarriers		(numPreBarriers)
+	, m_preBarriers			(preBarriers)
+	, m_numPostBarriers		(numPostBarriers)
+	, m_postBarriers		(postBarriers)
+{
+}
+
+void ComputeCommand::submitAndWait (deUint32 queueFamilyIndex, vk::VkQueue queue) const
+{
+	const vk::VkCommandPoolCreateInfo				cmdPoolCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// flags
+		queueFamilyIndex,									// queueFamilyIndex
+	};
+	const vk::Unique<vk::VkCommandPool>				cmdPool				(vk::createCommandPool(m_vki, m_device, &cmdPoolCreateInfo));
+
+	const vk::VkFenceCreateInfo						fenceCreateInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,			// flags
+	};
+
+	const vk::VkCommandBufferAllocateInfo			cmdBufCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		*cmdPool,											// cmdPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// level
+		1u,													// count
+	};
+	const vk::VkCommandBufferBeginInfo				cmdBufBeginInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// flags
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const vk::Unique<vk::VkFence>					cmdCompleteFence	(vk::createFence(m_vki, m_device, &fenceCreateInfo));
+	const vk::Unique<vk::VkCommandBuffer>			cmd					(vk::allocateCommandBuffer(m_vki, m_device, &cmdBufCreateInfo));
+	const deUint64									infiniteTimeout		= ~(deUint64)0u;
+
+	VK_CHECK(m_vki.beginCommandBuffer(*cmd, &cmdBufBeginInfo));
+
+	m_vki.cmdBindPipeline(*cmd, vk::VK_PIPELINE_BIND_POINT_COMPUTE, m_pipeline);
+	m_vki.cmdBindDescriptorSets(*cmd, vk::VK_PIPELINE_BIND_POINT_COMPUTE, m_pipelineLayout, 0, m_numDescriptorSets, m_descriptorSets, m_numDynamicOffsets, m_dynamicOffsets);
+
+	if (m_numPreBarriers)
+		m_vki.cmdPipelineBarrier(*cmd, 0u, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (vk::VkDependencyFlags)0,
+								 0, (const vk::VkMemoryBarrier*)DE_NULL,
+								 m_numPreBarriers, m_preBarriers,
+								 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+
+	m_vki.cmdDispatch(*cmd, m_numWorkGroups.x(), m_numWorkGroups.y(), m_numWorkGroups.z());
+	m_vki.cmdPipelineBarrier(*cmd, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+							 0, (const vk::VkMemoryBarrier*)DE_NULL,
+							 m_numPostBarriers, m_postBarriers,
+							 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(m_vki.endCommandBuffer(*cmd));
+
+	// run
+	{
+		const vk::VkSubmitInfo	submitInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const vk::VkSemaphore*)0,
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmd.get(),
+			0u,
+			(const vk::VkSemaphore*)0,
+		};
+		VK_CHECK(m_vki.queueSubmit(queue, 1, &submitInfo, *cmdCompleteFence));
+	}
+	VK_CHECK(m_vki.waitForFences(m_device, 1, &cmdCompleteFence.get(), 0u, infiniteTimeout)); // \note: timeout is failure
+}
+
+class BufferComputeInstance : public vkt::TestInstance
+{
+public:
+											BufferComputeInstance		(Context&				context,
+																		 vk::VkDescriptorType	descriptorType,
+																		 ShaderInputInterface	shaderInterface,
+																		 bool					viewOffset,
+																		 bool					dynamicOffset,
+																		 bool					dynamicOffsetNonZero);
+
+private:
+	vk::Move<vk::VkBuffer>					createColorDataBuffer		(deUint32 offset, deUint32 bufferSize, const tcu::Vec4& value1, const tcu::Vec4& value2, de::MovePtr<vk::Allocation>* outAllocation);
+	vk::Move<vk::VkBufferView>				createBufferView			(vk::VkBuffer buffer, deUint32 offset) const;
+	vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(void) const;
+	vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(void) const;
+	vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout, vk::VkBuffer viewA, deUint32 offsetA, vk::VkBuffer viewB, deUint32 offsetB, vk::VkBuffer resBuf) const;
+
+	tcu::TestStatus							iterate						(void);
+	void									logTestPlan					(void) const;
+	tcu::TestStatus							testResourceAccess			(void);
+
+	enum
+	{
+		STATIC_OFFSET_VALUE_A	= 256,
+		DYNAMIC_OFFSET_VALUE_A	= 512,
+		STATIC_OFFSET_VALUE_B	= 1024,
+		DYNAMIC_OFFSET_VALUE_B	= 768,
+	};
+
+	const vk::VkDescriptorType				m_descriptorType;
+	const ShaderInputInterface				m_shaderInterface;
+	const bool								m_setViewOffset;
+	const bool								m_setDynamicOffset;
+	const bool								m_dynamicOffsetNonZero;
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkQueue						m_queue;
+	const deUint32							m_queueFamilyIndex;
+	vk::Allocator&							m_allocator;
+
+	const ComputeInstanceResultBuffer		m_result;
+};
+
+BufferComputeInstance::BufferComputeInstance (Context&					context,
+											  vk::VkDescriptorType		descriptorType,
+											  ShaderInputInterface		shaderInterface,
+											  bool						viewOffset,
+											  bool						dynamicOffset,
+											  bool						dynamicOffsetNonZero)
+	: vkt::TestInstance			(context)
+	, m_descriptorType			(descriptorType)
+	, m_shaderInterface			(shaderInterface)
+	, m_setViewOffset			(viewOffset)
+	, m_setDynamicOffset		(dynamicOffset)
+	, m_dynamicOffsetNonZero	(dynamicOffsetNonZero)
+	, m_vki						(context.getDeviceInterface())
+	, m_device					(context.getDevice())
+	, m_queue					(context.getUniversalQueue())
+	, m_queueFamilyIndex		(context.getUniversalQueueFamilyIndex())
+	, m_allocator				(context.getDefaultAllocator())
+	, m_result					(m_vki, m_device, m_allocator)
+{
+	if (m_dynamicOffsetNonZero)
+		DE_ASSERT(m_setDynamicOffset);
+}
+
+vk::Move<vk::VkBuffer> BufferComputeInstance::createColorDataBuffer (deUint32 offset, deUint32 bufferSize, const tcu::Vec4& value1, const tcu::Vec4& value2, de::MovePtr<vk::Allocation>* outAllocation)
+{
+	DE_ASSERT(offset + sizeof(tcu::Vec4[2]) <= bufferSize);
+
+	const bool						isUniformBuffer		= isUniformDescriptorType(m_descriptorType);
+	const vk::VkBufferUsageFlags	usageFlags			= (isUniformBuffer) ? (vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) : (vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+	const vk::VkBufferCreateInfo	createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,								// flags
+		(vk::VkDeviceSize)bufferSize,	// size
+		usageFlags,						// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,	// sharingMode
+		0u,								// queueFamilyCount
+		DE_NULL,						// pQueueFamilyIndices
+	};
+	vk::Move<vk::VkBuffer>			buffer				(vk::createBuffer(m_vki, m_device, &createInfo));
+	de::MovePtr<vk::Allocation>		allocation			(allocateAndBindObjectMemory(m_vki, m_device, m_allocator, *buffer, vk::MemoryRequirement::HostVisible));
+	void*							mapPtr				= allocation->getHostPtr();
+
+	if (offset)
+		deMemset(mapPtr, 0x5A, (size_t)offset);
+	deMemcpy((deUint8*)mapPtr + offset, value1.getPtr(), sizeof(tcu::Vec4));
+	deMemcpy((deUint8*)mapPtr + offset + sizeof(tcu::Vec4), value2.getPtr(), sizeof(tcu::Vec4));
+	deMemset((deUint8*)mapPtr + offset + 2 * sizeof(tcu::Vec4), 0x5A, (size_t)bufferSize - (size_t)offset - 2 * sizeof(tcu::Vec4));
+
+	flushMappedMemoryRange(m_vki, m_device, allocation->getMemory(), allocation->getOffset(), bufferSize);
+
+	*outAllocation = allocation;
+	return buffer;
+}
+
+vk::Move<vk::VkDescriptorSetLayout> BufferComputeInstance::createDescriptorSetLayout (void) const
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(m_descriptorType, 2u, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	};
+
+	return builder.build(m_vki, m_device);
+}
+
+vk::Move<vk::VkDescriptorPool> BufferComputeInstance::createDescriptorPool (void) const
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(m_descriptorType, getInterfaceNumResources(m_shaderInterface))
+		.build(m_vki, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> BufferComputeInstance::createDescriptorSet (vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout, vk::VkBuffer viewA, deUint32 offsetA, vk::VkBuffer viewB, deUint32 offsetB, vk::VkBuffer resBuf) const
+{
+	const vk::VkDescriptorBufferInfo		resultInfo		= vk::makeDescriptorBufferInfo(resBuf, 0u, (vk::VkDeviceSize)ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkDescriptorBufferInfo		bufferInfos[2]	=
+	{
+		vk::makeDescriptorBufferInfo(viewA, (vk::VkDeviceSize)offsetA, (vk::VkDeviceSize)sizeof(tcu::Vec4[2])),
+		vk::makeDescriptorBufferInfo(viewB, (vk::VkDeviceSize)offsetB, (vk::VkDeviceSize)sizeof(tcu::Vec4[2])),
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>	descriptorSet	= allocateDescriptorSet(m_vki, m_device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder	builder;
+
+	// result
+	builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// buffers
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &bufferInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &bufferInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), m_descriptorType, &bufferInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, 2u, bufferInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(m_vki, m_device);
+	return descriptorSet;
+}
+
+tcu::TestStatus BufferComputeInstance::iterate (void)
+{
+	logTestPlan();
+	return testResourceAccess();
+}
+
+void BufferComputeInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Accessing resource in a compute program.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+				(m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+				(m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+				(const char*)DE_NULL)
+		<< " source descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType)
+		<< " and one destination VK_DESCRIPTOR_TYPE_STORAGE_BUFFER to store results to.\n"
+		<< "Source descriptor buffer view(s) have " << ((m_setViewOffset) ? ("non-") : ("")) << "zero offset.\n";
+
+	if (isDynamicDescriptorType(m_descriptorType))
+	{
+		if (m_setDynamicOffset)
+		{
+			msg << "Source buffer(s) are given a dynamic offset at bind time.\n"
+				<< "The supplied dynamic offset is " << ((m_dynamicOffsetNonZero) ? ("non-") : ("")) << "zero.\n";
+		}
+		else
+		{
+			msg << "Dynamic offset is not supplied at bind time. Expecting bind to offset 0.\n";
+		}
+	}
+
+	msg << "Destination buffer is pre-initialized to -1.\n";
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+tcu::TestStatus BufferComputeInstance::testResourceAccess (void)
+{
+	enum
+	{
+		ADDRESSABLE_SIZE = 256, // allocate a lot more than required
+	};
+
+	const bool										isDynamicCase		= isDynamicDescriptorType(m_descriptorType);
+	const bool										isUniformBuffer		= isUniformDescriptorType(m_descriptorType);
+	const deUint32									bindTimeOffsets[]	=
+	{
+		(m_dynamicOffsetNonZero) ? ((deUint32)DYNAMIC_OFFSET_VALUE_A) : (0u),
+		(m_dynamicOffsetNonZero) ? ((deUint32)DYNAMIC_OFFSET_VALUE_B) : (0u),
+	};
+
+	const tcu::Vec4									colorA1				= tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4									colorA2				= tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4									colorB1				= tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f);
+	const tcu::Vec4									colorB2				= tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f);
+
+	const deUint32									dataOffsetA			= ((isDynamicCase) ? (bindTimeOffsets[0]) : 0) + ((m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_A) : (0u));
+	const deUint32									dataOffsetB			= ((isDynamicCase) ? (bindTimeOffsets[1]) : 0) + ((m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_B) : (0u));
+	const deUint32									viewOffsetA			= (m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_A) : (0u);
+	const deUint32									viewOffsetB			= (m_setViewOffset) ? ((deUint32)STATIC_OFFSET_VALUE_B) : (0u);
+	const deUint32									bufferSizeA			= dataOffsetA + ADDRESSABLE_SIZE;
+	const deUint32									bufferSizeB			= dataOffsetB + ADDRESSABLE_SIZE;
+
+	de::MovePtr<vk::Allocation>						bufferMemA;
+	const vk::Unique<vk::VkBuffer>					bufferA				(createColorDataBuffer(dataOffsetA, bufferSizeA, colorA1, colorA2, &bufferMemA));
+
+	de::MovePtr<vk::Allocation>						bufferMemB;
+	const vk::Unique<vk::VkBuffer>					bufferB				((getInterfaceNumResources(m_shaderInterface) == 1u)
+																			? (vk::Move<vk::VkBuffer>())
+																			: (createColorDataBuffer(dataOffsetB, bufferSizeB, colorB1, colorB2, &bufferMemB)));
+
+	const vk::Unique<vk::VkDescriptorSetLayout>		descriptorSetLayout	(createDescriptorSetLayout());
+	const vk::Unique<vk::VkDescriptorPool>			descriptorPool		(createDescriptorPool());
+	const vk::Unique<vk::VkDescriptorSet>			descriptorSet		(createDescriptorSet(*descriptorPool, *descriptorSetLayout, *bufferA, viewOffsetA, *bufferB, viewOffsetB, m_result.getBuffer()));
+	const ComputePipeline							pipeline			(m_vki, m_device, m_context.getBinaryCollection(), 1, &descriptorSetLayout.get());
+
+	const vk::VkAccessFlags							inputBit			= (isUniformBuffer) ? (vk::VK_ACCESS_UNIFORM_READ_BIT) : (vk::VK_ACCESS_SHADER_READ_BIT);
+	const vk::VkBufferMemoryBarrier					bufferBarriers[]	=
+	{
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			vk::VK_ACCESS_HOST_WRITE_BIT,				// outputMask
+			inputBit,									// inputMask
+			vk::VK_QUEUE_FAMILY_IGNORED,				// srcQueueFamilyIndex
+			vk::VK_QUEUE_FAMILY_IGNORED,				// destQueueFamilyIndex
+			*bufferA,									// buffer
+			(vk::VkDeviceSize)0u,						// offset
+			(vk::VkDeviceSize)bufferSizeA,				// size
+		},
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+			vk::VK_ACCESS_HOST_WRITE_BIT,				// outputMask
+			inputBit,									// inputMask
+			vk::VK_QUEUE_FAMILY_IGNORED,				// srcQueueFamilyIndex
+			vk::VK_QUEUE_FAMILY_IGNORED,				// destQueueFamilyIndex
+			*bufferB,									// buffer
+			(vk::VkDeviceSize)0u,						// offset
+			(vk::VkDeviceSize)bufferSizeB,				// size
+		}
+	};
+
+	const deUint32									numSrcBuffers		= getInterfaceNumResources(m_shaderInterface);
+
+	const vk::VkDescriptorSet						descriptorSets[]	= { *descriptorSet };
+	const int										numDescriptorSets	= DE_LENGTH_OF_ARRAY(descriptorSets);
+	const deUint32* const							dynamicOffsets		= (m_setDynamicOffset) ? (bindTimeOffsets) : (DE_NULL);
+	const deUint32									numDynamicOffsets	= (m_setDynamicOffset) ? (numSrcBuffers) : (0);
+	const vk::VkBufferMemoryBarrier* const			preBarriers			= bufferBarriers;
+	const int										numPreBarriers		= numSrcBuffers;
+	const vk::VkBufferMemoryBarrier* const			postBarriers		= m_result.getResultReadBarrier();
+	const int										numPostBarriers		= 1;
+
+	const ComputeCommand							compute				(m_vki,
+																		 m_device,
+																		 pipeline.getPipeline(),
+																		 pipeline.getPipelineLayout(),
+																		 tcu::UVec3(4, 1, 1),
+																		 numDescriptorSets,	descriptorSets,
+																		 numDynamicOffsets,	dynamicOffsets,
+																		 numPreBarriers,	preBarriers,
+																		 numPostBarriers,	postBarriers);
+
+	const tcu::Vec4									refQuadrantValue14	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? (colorA2) :
+																		  (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? (colorB2) :
+																		  (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? (colorB2) :
+																																	(tcu::Vec4(-2.0f));
+	const tcu::Vec4									refQuadrantValue23	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? (colorA1) :
+																		  (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? (colorA1) :
+																		  (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? (colorA1) :
+																																	(tcu::Vec4(-2.0f));
+	const tcu::Vec4									references[4]		=
+	{
+		refQuadrantValue14,
+		refQuadrantValue23,
+		refQuadrantValue23,
+		refQuadrantValue14,
+	};
+	tcu::Vec4										results[4];
+
+	compute.submitAndWait(m_queueFamilyIndex, m_queue);
+	m_result.readResultContentsTo(&results);
+
+	// verify
+	if (results[0] == references[0] &&
+		results[1] == references[1] &&
+		results[2] == references[2] &&
+		results[3] == references[3])
+	{
+		return tcu::TestStatus::pass("Pass");
+	}
+	else if (results[0] == tcu::Vec4(-1.0f) &&
+			 results[1] == tcu::Vec4(-1.0f) &&
+			 results[2] == tcu::Vec4(-1.0f) &&
+			 results[3] == tcu::Vec4(-1.0f))
+	{
+		m_context.getTestContext().getLog()
+			<< tcu::TestLog::Message
+			<< "Result buffer was not written to."
+			<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Result buffer was not written to");
+	}
+	else
+	{
+		m_context.getTestContext().getLog()
+			<< tcu::TestLog::Message
+			<< "Error expected ["
+				<< references[0] << ", "
+				<< references[1] << ", "
+				<< references[2] << ", "
+				<< references[3] << "], got ["
+				<< results[0] << ", "
+				<< results[1] << ", "
+				<< results[2] << ", "
+				<< results[3] << "]"
+			<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Invalid result values");
+	}
+}
+
+class QuadrantRendederCase : public vkt::TestCase
+{
+public:
+									QuadrantRendederCase		(tcu::TestContext&		testCtx,
+																 const char*			name,
+																 const char*			description,
+																 glu::GLSLVersion		glslVersion,
+																 vk::VkShaderStageFlags	exitingStages,
+																 vk::VkShaderStageFlags	activeStages);
+private:
+	virtual std::string				genExtensionDeclarations	(vk::VkShaderStageFlagBits stage) const = 0;
+	virtual std::string				genResourceDeclarations		(vk::VkShaderStageFlagBits stage, int numUsedBindings) const = 0;
+	virtual std::string				genResourceAccessSource		(vk::VkShaderStageFlagBits stage) const = 0;
+	virtual std::string				genNoAccessSource			(void) const = 0;
+
+	std::string						genVertexSource				(void) const;
+	std::string						genTessCtrlSource			(void) const;
+	std::string						genTessEvalSource			(void) const;
+	std::string						genGeometrySource			(void) const;
+	std::string						genFragmentSource			(void) const;
+	std::string						genComputeSource			(void) const;
+
+	void							initPrograms				(vk::SourceCollections& programCollection) const;
+
+protected:
+	const glu::GLSLVersion			m_glslVersion;
+	const vk::VkShaderStageFlags	m_exitingStages;
+	const vk::VkShaderStageFlags	m_activeStages;
+};
+
+QuadrantRendederCase::QuadrantRendederCase (tcu::TestContext&		testCtx,
+											const char*				name,
+											const char*				description,
+											glu::GLSLVersion		glslVersion,
+											vk::VkShaderStageFlags	exitingStages,
+											vk::VkShaderStageFlags	activeStages)
+	: vkt::TestCase		(testCtx, name, description)
+	, m_glslVersion		(glslVersion)
+	, m_exitingStages	(exitingStages)
+	, m_activeStages	(activeStages)
+{
+	DE_ASSERT((m_exitingStages & m_activeStages) == m_activeStages);
+}
+
+std::string QuadrantRendederCase::genVertexSource (void) const
+{
+	const char* const	nextStageName	= ((m_exitingStages & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0u)	? ("tsc")
+										: ((m_exitingStages & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0u)				? ("geo")
+										: ((m_exitingStages & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0u)				? ("frag")
+										: (DE_NULL);
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	std::ostringstream	buf;
+
+	if ((m_activeStages & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0u)
+	{
+		// active vertex shader
+		buf << versionDecl << "\n"
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_VERTEX_BIT)
+			<< genResourceDeclarations(vk::VK_SHADER_STAGE_VERTEX_BIT, 0)
+			<< "layout(location = 0) out highp vec4 " << nextStageName << "_color;\n"
+			<< "layout(location = 1) flat out highp int " << nextStageName << "_quadrant_id;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp vec4 result_position;\n"
+			<< "	highp int quadrant_id;\n"
+			<< s_quadrantGenVertexPosSource
+			<< "	gl_Position = result_position;\n"
+			<< "	" << nextStageName << "_quadrant_id = quadrant_id;\n"
+			<< "\n"
+			<< "	highp vec4 result_color;\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_VERTEX_BIT)
+			<< "	" << nextStageName << "_color = result_color;\n"
+			<< "}\n";
+	}
+	else
+	{
+		// do nothing
+		buf << versionDecl << "\n"
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_VERTEX_BIT)
+			<< "layout(location = 1) flat out highp int " << nextStageName << "_quadrant_id;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp vec4 result_position;\n"
+			<< "	highp int quadrant_id;\n"
+			<< s_quadrantGenVertexPosSource
+			<< "	gl_Position = result_position;\n"
+			<< "	" << nextStageName << "_quadrant_id = quadrant_id;\n"
+			<< "}\n";
+	}
+
+	return buf.str();
+}
+
+std::string QuadrantRendederCase::genTessCtrlSource (void) const
+{
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	const bool			extRequired		= glu::glslVersionIsES(m_glslVersion) && m_glslVersion <= glu::GLSL_VERSION_310_ES;
+	const char* const	tessExtDecl		= extRequired ? "#extension GL_EXT_tessellation_shader : require\n" : "";
+	std::ostringstream	buf;
+
+	if ((m_activeStages & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0u)
+	{
+		// contributing not implemented
+		DE_ASSERT(m_activeStages == vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
+
+		// active tc shader
+		buf << versionDecl << "\n"
+			<< tessExtDecl
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
+			<< "layout(vertices=3) out;\n"
+			<< genResourceDeclarations(vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, 0)
+			<< "layout(location = 1) flat in highp int tsc_quadrant_id[];\n"
+			<< "layout(location = 0) out highp vec4 tes_color[];\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp vec4 result_color;\n"
+			<< "	highp int quadrant_id = tsc_quadrant_id[gl_InvocationID];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
+			<< "\n"
+			<< "	tes_color[gl_InvocationID] = result_color;\n"
+			<< "\n"
+			<< "	// no dynamic input block indexing\n"
+			<< "	highp vec4 position;\n"
+			<< "	if (gl_InvocationID == 0)\n"
+			<< "		position = gl_in[0].gl_Position;\n"
+			<< "	else if (gl_InvocationID == 1)\n"
+			<< "		position = gl_in[1].gl_Position;\n"
+			<< "	else\n"
+			<< "		position = gl_in[2].gl_Position;\n"
+			<< "	gl_out[gl_InvocationID].gl_Position = position;\n"
+			<< "	gl_TessLevelInner[0] = 2.8;\n"
+			<< "	gl_TessLevelInner[1] = 2.8;\n"
+			<< "	gl_TessLevelOuter[0] = 2.8;\n"
+			<< "	gl_TessLevelOuter[1] = 2.8;\n"
+			<< "	gl_TessLevelOuter[2] = 2.8;\n"
+			<< "	gl_TessLevelOuter[3] = 2.8;\n"
+			<< "}\n";
+	}
+	else if ((m_activeStages & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0u)
+	{
+		// active te shader, tc passthru
+		buf << versionDecl << "\n"
+			<< tessExtDecl
+			<< "layout(vertices=3) out;\n"
+			<< "layout(location = 1) flat in highp int tsc_quadrant_id[];\n"
+			<< "layout(location = 1) flat out highp int tes_quadrant_id[];\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	tes_quadrant_id[gl_InvocationID] = tsc_quadrant_id[0];\n"
+			<< "\n"
+			<< "	// no dynamic input block indexing\n"
+			<< "	highp vec4 position;\n"
+			<< "	if (gl_InvocationID == 0)\n"
+			<< "		position = gl_in[0].gl_Position;\n"
+			<< "	else if (gl_InvocationID == 1)\n"
+			<< "		position = gl_in[1].gl_Position;\n"
+			<< "	else\n"
+			<< "		position = gl_in[2].gl_Position;\n"
+			<< "	gl_out[gl_InvocationID].gl_Position = position;\n"
+			<< "	gl_TessLevelInner[0] = 2.8;\n"
+			<< "	gl_TessLevelInner[1] = 2.8;\n"
+			<< "	gl_TessLevelOuter[0] = 2.8;\n"
+			<< "	gl_TessLevelOuter[1] = 2.8;\n"
+			<< "	gl_TessLevelOuter[2] = 2.8;\n"
+			<< "	gl_TessLevelOuter[3] = 2.8;\n"
+			<< "}\n";
+	}
+	else
+	{
+		// passthrough not implemented
+		DE_FATAL("not implemented");
+	}
+
+	return buf.str();
+}
+
+std::string QuadrantRendederCase::genTessEvalSource (void) const
+{
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	const bool			extRequired		= glu::glslVersionIsES(m_glslVersion) && m_glslVersion <= glu::GLSL_VERSION_310_ES;
+	const char* const	tessExtDecl		= extRequired ? "#extension GL_EXT_tessellation_shader : require\n" : "";
+	std::ostringstream	buf;
+
+	if ((m_activeStages & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0u)
+	{
+		// contributing not implemented
+		DE_ASSERT(m_activeStages == vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT);
+
+		// active te shader
+		buf << versionDecl << "\n"
+			<< tessExtDecl
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+			<< "layout(triangles) in;\n"
+			<< genResourceDeclarations(vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, 0)
+			<< "layout(location = 1) flat in highp int tes_quadrant_id[];\n"
+			<< "layout(location = 0) out highp vec4 frag_color;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp vec4 result_color;\n"
+			<< "	highp int quadrant_id = tes_quadrant_id[0];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+			<< "\n"
+			<< "	frag_color = result_color;\n"
+			<< "	gl_Position = gl_TessCoord.x * gl_in[0].gl_Position + gl_TessCoord.y * gl_in[1].gl_Position + gl_TessCoord.z * gl_in[2].gl_Position;\n"
+			<< "}\n";
+	}
+	else if ((m_activeStages & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0u)
+	{
+		// contributing not implemented
+		DE_ASSERT(m_activeStages == vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
+
+		// active tc shader, te is passthru
+		buf << versionDecl << "\n"
+			<< tessExtDecl
+			<< "layout(triangles) in;\n"
+			<< "layout(location = 0) in highp vec4 tes_color[];\n"
+			<< "layout(location = 0) out highp vec4 frag_color;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	frag_color = tes_color[0];\n"
+			<< "	gl_Position = gl_TessCoord.x * gl_in[0].gl_Position + gl_TessCoord.y * gl_in[1].gl_Position + gl_TessCoord.z * gl_in[2].gl_Position;\n"
+			<< "}\n";
+	}
+	else
+	{
+		// passthrough not implemented
+		DE_FATAL("not implemented");
+	}
+
+	return buf.str();
+}
+
+std::string QuadrantRendederCase::genGeometrySource (void) const
+{
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	const bool			extRequired		= glu::glslVersionIsES(m_glslVersion) && m_glslVersion <= glu::GLSL_VERSION_310_ES;
+	const char* const	geomExtDecl		= extRequired ? "#extension GL_EXT_geometry_shader : require\n" : "";
+	std::ostringstream	buf;
+
+	if ((m_activeStages & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0u)
+	{
+		// contributing not implemented
+		DE_ASSERT(m_activeStages == vk::VK_SHADER_STAGE_GEOMETRY_BIT);
+
+		// active geometry shader
+		buf << versionDecl << "\n"
+			<< geomExtDecl
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< "layout(triangles) in;\n"
+			<< "layout(triangle_strip, max_vertices=4) out;\n"
+			<< genResourceDeclarations(vk::VK_SHADER_STAGE_GEOMETRY_BIT, 0)
+			<< "layout(location = 1) flat in highp int geo_quadrant_id[];\n"
+			<< "layout(location = 0) out highp vec4 frag_color;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp int quadrant_id;\n"
+			<< "	highp vec4 result_color;\n"
+			<< "\n"
+			<< "	quadrant_id = geo_quadrant_id[0];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< "	frag_color = result_color;\n"
+			<< "	gl_Position = gl_in[0].gl_Position;\n"
+			<< "	EmitVertex();\n"
+			<< "\n"
+			<< "	quadrant_id = geo_quadrant_id[1];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< "	frag_color = result_color;\n"
+			<< "	gl_Position = gl_in[1].gl_Position;\n"
+			<< "	EmitVertex();\n"
+			<< "\n"
+			<< "	quadrant_id = geo_quadrant_id[2];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< "	frag_color = result_color;\n"
+			<< "	gl_Position = gl_in[0].gl_Position * 0.5 + gl_in[2].gl_Position * 0.5;\n"
+			<< "	EmitVertex();\n"
+			<< "\n"
+			<< "	quadrant_id = geo_quadrant_id[0];\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< "	frag_color = result_color;\n"
+			<< "	gl_Position = gl_in[2].gl_Position;\n"
+			<< "	EmitVertex();\n"
+			<< "}\n";
+	}
+	else
+	{
+		// passthrough not implemented
+		DE_FATAL("not implemented");
+	}
+
+	return buf.str();
+}
+
+std::string QuadrantRendederCase::genFragmentSource (void) const
+{
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	std::ostringstream	buf;
+
+	if ((m_activeStages & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0u)
+	{
+		buf << versionDecl << "\n"
+			<< genExtensionDeclarations(vk::VK_SHADER_STAGE_GEOMETRY_BIT)
+			<< genResourceDeclarations(vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0);
+
+		if (m_activeStages != vk::VK_SHADER_STAGE_FRAGMENT_BIT)
+		{
+			// there are other stages, this is just a contributor
+			buf << "layout(location = 0) in mediump vec4 frag_color;\n";
+		}
+
+		buf << "layout(location = 1) flat in highp int frag_quadrant_id;\n"
+			<< "layout(location = 0) out mediump vec4 o_color;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp int quadrant_id = frag_quadrant_id;\n"
+			<< "	highp vec4 result_color;\n"
+			<< genResourceAccessSource(vk::VK_SHADER_STAGE_FRAGMENT_BIT);
+
+		if (m_activeStages != vk::VK_SHADER_STAGE_FRAGMENT_BIT)
+		{
+			// just contributor
+			buf	<< "	if (frag_quadrant_id < 2)\n"
+				<< "		o_color = result_color;\n"
+				<< "	else\n"
+				<< "		o_color = frag_color;\n";
+		}
+		else
+			buf << "	o_color = result_color;\n";
+
+		buf << "}\n";
+	}
+	else if (m_activeStages == 0u)
+	{
+		// special case, no active stages
+		buf << versionDecl << "\n"
+			<< "layout(location = 1) flat in highp int frag_quadrant_id;\n"
+			<< "layout(location = 0) out mediump vec4 o_color;\n"
+			<< "void main (void)\n"
+			<< "{\n"
+			<< "	highp int quadrant_id = frag_quadrant_id;\n"
+			<< "	highp vec4 result_color;\n"
+			<< genNoAccessSource()
+			<< "	o_color = result_color;\n"
+			<< "}\n";
+	}
+	else
+	{
+		// passthrough
+		buf <<	versionDecl << "\n"
+			<<	"layout(location = 0) in mediump vec4 frag_color;\n"
+				"layout(location = 0) out mediump vec4 o_color;\n"
+				"void main (void)\n"
+				"{\n"
+				"	o_color = frag_color;\n"
+				"}\n";
+	}
+
+	return buf.str();
+}
+
+std::string QuadrantRendederCase::genComputeSource (void) const
+{
+	const char* const	versionDecl		= glu::getGLSLVersionDeclaration(m_glslVersion);
+	std::ostringstream	buf;
+
+	buf	<< versionDecl << "\n"
+		<< genExtensionDeclarations(vk::VK_SHADER_STAGE_COMPUTE_BIT)
+		<< "layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+		<< genResourceDeclarations(vk::VK_SHADER_STAGE_COMPUTE_BIT, 1)
+		<< "layout(set = 0, binding = 0, std140) writeonly buffer OutBuf\n"
+		<< "{\n"
+		<< "	highp vec4 read_colors[4];\n"
+		<< "} b_out;\n"
+		<< "void main(void)\n"
+		<< "{\n"
+		<< "	highp int quadrant_id = int(gl_WorkGroupID.x);\n"
+		<< "	highp vec4 result_color;\n"
+		<< genResourceAccessSource(vk::VK_SHADER_STAGE_COMPUTE_BIT)
+		<< "	b_out.read_colors[gl_WorkGroupID.x] = result_color;\n"
+		<< "}\n";
+
+	return buf.str();
+}
+
+void QuadrantRendederCase::initPrograms (vk::SourceCollections& programCollection) const
+{
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0u)
+		programCollection.glslSources.add("vertex") << glu::VertexSource(genVertexSource());
+
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0u)
+		programCollection.glslSources.add("tess_ctrl") << glu::TessellationControlSource(genTessCtrlSource());
+
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0u)
+		programCollection.glslSources.add("tess_eval") << glu::TessellationEvaluationSource(genTessEvalSource());
+
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0u)
+		programCollection.glslSources.add("geometry") << glu::GeometrySource(genGeometrySource());
+
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0u)
+		programCollection.glslSources.add("fragment") << glu::FragmentSource(genFragmentSource());
+
+	if ((m_exitingStages & vk::VK_SHADER_STAGE_COMPUTE_BIT) != 0u)
+		programCollection.glslSources.add("compute") << glu::ComputeSource(genComputeSource());
+}
+
+class BufferDescriptorCase : public QuadrantRendederCase
+{
+public:
+	enum
+	{
+		FLAG_VIEW_OFFSET			= (1u << 1u),
+		FLAG_DYNAMIC_OFFSET_ZERO	= (1u << 2u),
+		FLAG_DYNAMIC_OFFSET_NONZERO	= (1u << 3u),
+	};
+	// enum continues where resource flags ends
+	DE_STATIC_ASSERT((deUint32)FLAG_VIEW_OFFSET == (deUint32)RESOURCE_FLAG_LAST);
+
+									BufferDescriptorCase		(tcu::TestContext&		testCtx,
+																 const char*			name,
+																 const char*			description,
+																 bool					isPrimaryCmdBuf,
+																 vk::VkDescriptorType	descriptorType,
+																 vk::VkShaderStageFlags	exitingStages,
+																 vk::VkShaderStageFlags	activeStages,
+																 ShaderInputInterface	shaderInterface,
+																 deUint32				flags);
+
+private:
+	std::string						genExtensionDeclarations	(vk::VkShaderStageFlagBits stage) const;
+	std::string						genResourceDeclarations		(vk::VkShaderStageFlagBits stage, int numUsedBindings) const;
+	std::string						genResourceAccessSource		(vk::VkShaderStageFlagBits stage) const;
+	std::string						genNoAccessSource			(void) const;
+
+	vkt::TestInstance*				createInstance				(vkt::Context& context) const;
+
+	const bool						m_viewOffset;
+	const bool						m_dynamicOffsetSet;
+	const bool						m_dynamicOffsetNonZero;
+	const bool						m_isPrimaryCmdBuf;
+	const vk::VkDescriptorType		m_descriptorType;
+	const ShaderInputInterface		m_shaderInterface;
+};
+
+BufferDescriptorCase::BufferDescriptorCase (tcu::TestContext&		testCtx,
+											const char*				name,
+											const char*				description,
+											bool					isPrimaryCmdBuf,
+											vk::VkDescriptorType	descriptorType,
+											vk::VkShaderStageFlags	exitingStages,
+											vk::VkShaderStageFlags	activeStages,
+											ShaderInputInterface	shaderInterface,
+											deUint32				flags)
+	: QuadrantRendederCase		(testCtx, name, description, glu::GLSL_VERSION_310_ES, exitingStages, activeStages)
+	, m_viewOffset				((flags & FLAG_VIEW_OFFSET) != 0u)
+	, m_dynamicOffsetSet		((flags & (FLAG_DYNAMIC_OFFSET_ZERO | FLAG_DYNAMIC_OFFSET_NONZERO)) != 0u)
+	, m_dynamicOffsetNonZero	((flags & FLAG_DYNAMIC_OFFSET_NONZERO) != 0u)
+	, m_isPrimaryCmdBuf			(isPrimaryCmdBuf)
+	, m_descriptorType			(descriptorType)
+	, m_shaderInterface			(shaderInterface)
+{
+}
+
+std::string BufferDescriptorCase::genExtensionDeclarations (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+	return "";
+}
+
+std::string BufferDescriptorCase::genResourceDeclarations (vk::VkShaderStageFlagBits stage, int numUsedBindings) const
+{
+	DE_UNREF(stage);
+
+	const bool			isUniform		= isUniformDescriptorType(m_descriptorType);
+	const char* const	storageType		= (isUniform) ? ("uniform") : ("buffer");
+	std::ostringstream	buf;
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			buf	<< "layout(set = 0, binding = " << (numUsedBindings) << ", std140) " << storageType << " BufferName\n"
+				<< "{\n"
+				<< "	highp vec4 colorA;\n"
+				<< "	highp vec4 colorB;\n"
+				<< "} b_instance;\n";
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			buf	<< "layout(set = 0, binding = " << (numUsedBindings) << ", std140) " << storageType << " BufferNameA\n"
+				<< "{\n"
+				<< "	highp vec4 colorA;\n"
+				<< "	highp vec4 colorB;\n"
+				<< "} b_instanceA;\n"
+				<< "layout(set = 0, binding = " << (numUsedBindings+1) << ", std140) " << storageType << " BufferNameB\n"
+				<< "{\n"
+				<< "	highp vec4 colorA;\n"
+				<< "	highp vec4 colorB;\n"
+				<< "} b_instanceB;\n";
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			buf	<< "layout(set = 0, binding = " << (numUsedBindings) << ", std140) " << storageType << " BufferName\n"
+				<< "{\n"
+				<< "	highp vec4 colorA;\n"
+				<< "	highp vec4 colorB;\n"
+				<< "} b_instances[2];\n";
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return buf.str();
+}
+
+std::string BufferDescriptorCase::genResourceAccessSource (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+
+	std::ostringstream buf;
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			buf << "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+				<< "		result_color = b_instance.colorA;\n"
+				<< "	else\n"
+				<< "		result_color = b_instance.colorB;\n";
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			buf << "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+				<< "		result_color = b_instanceA.colorA;\n"
+				<< "	else\n"
+				<< "		result_color = b_instanceB.colorB;\n";
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			buf << "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+				<< "		result_color = b_instances[0].colorA;\n"
+				<< "	else\n"
+				<< "		result_color = b_instances[1].colorB;\n";
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return buf.str();
+}
+
+std::string BufferDescriptorCase::genNoAccessSource (void) const
+{
+	return "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+		   "		result_color = vec4(0.0, 1.0, 0.0, 1.0);\n"
+		   "	else\n"
+		   "		result_color = vec4(1.0, 1.0, 0.0, 1.0);\n";
+}
+
+vkt::TestInstance* BufferDescriptorCase::createInstance (vkt::Context& context) const
+{
+	if (m_exitingStages == vk::VK_SHADER_STAGE_COMPUTE_BIT)
+	{
+		DE_ASSERT(m_isPrimaryCmdBuf); // secondaries are only valid within renderpass
+		return new BufferComputeInstance(context, m_descriptorType, m_shaderInterface, m_viewOffset, m_dynamicOffsetSet, m_dynamicOffsetNonZero);
+	}
+	else
+		return new BufferRenderInstance(context, m_isPrimaryCmdBuf, m_descriptorType, m_activeStages, m_shaderInterface, m_viewOffset, m_dynamicOffsetSet, m_dynamicOffsetNonZero);
+}
+
+class ImageInstanceImages
+{
+public:
+										ImageInstanceImages		(const vk::DeviceInterface&		vki,
+																 vk::VkDevice					device,
+																 deUint32						queueFamilyIndex,
+																 vk::VkQueue					queue,
+																 vk::Allocator&					allocator,
+																 vk::VkDescriptorType			descriptorType,
+																 vk::VkImageViewType			viewType,
+																 int							numImages,
+																 deUint32						baseMipLevel,
+																 deUint32						baseArraySlice);
+
+private:
+	static vk::Move<vk::VkImage>		createImage				(const vk::DeviceInterface&			vki,
+																 vk::VkDevice						device,
+																 vk::Allocator&						allocator,
+																 vk::VkDescriptorType				descriptorType,
+																 vk::VkImageViewType				viewType,
+																 const tcu::TextureLevelPyramid&	sourceImage,
+																 de::MovePtr<vk::Allocation>*		outAllocation);
+
+	static vk::Move<vk::VkImageView>	createImageView			(const vk::DeviceInterface&			vki,
+																 vk::VkDevice						device,
+																 vk::VkImageViewType				viewType,
+																 const tcu::TextureLevelPyramid&	sourceImage,
+																 vk::VkImage						image,
+																 deUint32							baseMipLevel,
+																 deUint32							baseArraySlice);
+
+	void								populateSourceImage		(tcu::TextureLevelPyramid*			dst,
+																 bool								isFirst) const;
+
+	void								uploadImage				(const vk::DeviceInterface&			vki,
+																 vk::VkDevice						device,
+																 deUint32							queueFamilyIndex,
+																 vk::VkQueue						queue,
+																 vk::Allocator&						allocator,
+																 vk::VkImage						image,
+																 vk::VkImageLayout					layout,
+																 const tcu::TextureLevelPyramid&	data);
+
+protected:
+	enum
+	{
+		IMAGE_SIZE		= 64,
+		NUM_MIP_LEVELS	= 2,
+		ARRAY_SIZE		= 2,
+	};
+
+	const vk::VkImageViewType			m_viewType;
+	const deUint32						m_baseMipLevel;
+	const deUint32						m_baseArraySlice;
+
+	const tcu::TextureFormat			m_imageFormat;
+	tcu::TextureLevelPyramid			m_sourceImageA;
+	tcu::TextureLevelPyramid			m_sourceImageB;
+
+	de::MovePtr<vk::Allocation>			m_imageMemoryA;
+	de::MovePtr<vk::Allocation>			m_imageMemoryB;
+	vk::Move<vk::VkImage>				m_imageA;
+	vk::Move<vk::VkImage>				m_imageB;
+	vk::Move<vk::VkImageView>			m_imageViewA;
+	vk::Move<vk::VkImageView>			m_imageViewB;
+};
+
+ImageInstanceImages::ImageInstanceImages (const vk::DeviceInterface&	vki,
+										  vk::VkDevice					device,
+										  deUint32						queueFamilyIndex,
+										  vk::VkQueue					queue,
+										  vk::Allocator&				allocator,
+										  vk::VkDescriptorType			descriptorType,
+										  vk::VkImageViewType			viewType,
+										  int							numImages,
+										  deUint32						baseMipLevel,
+										  deUint32						baseArraySlice)
+	: m_viewType		(viewType)
+	, m_baseMipLevel	(baseMipLevel)
+	, m_baseArraySlice	(baseArraySlice)
+	, m_imageFormat		(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)
+	, m_sourceImageA	(m_imageFormat, NUM_MIP_LEVELS)
+	, m_sourceImageB	(m_imageFormat, NUM_MIP_LEVELS)
+	, m_imageMemoryA	(DE_NULL)
+	, m_imageMemoryB	(DE_NULL)
+	, m_imageA			(vk::Move<vk::VkImage>())
+	, m_imageB			(vk::Move<vk::VkImage>())
+	, m_imageViewA		(vk::Move<vk::VkImageView>())
+	, m_imageViewB		(vk::Move<vk::VkImageView>())
+{
+	const vk::VkImageLayout	layout	= getImageLayoutForDescriptorType(descriptorType);
+
+	DE_ASSERT(numImages == 1 || numImages == 2);
+
+	populateSourceImage(&m_sourceImageA, true);
+	m_imageA = createImage(vki, device, allocator, descriptorType, viewType, m_sourceImageA, &m_imageMemoryA);
+	m_imageViewA = createImageView(vki, device, viewType, m_sourceImageA, *m_imageA, m_baseMipLevel, m_baseArraySlice);
+	uploadImage(vki, device, queueFamilyIndex, queue, allocator, *m_imageA, layout, m_sourceImageA);
+
+	if (numImages == 2)
+	{
+		populateSourceImage(&m_sourceImageB, false);
+		m_imageB = createImage(vki, device, allocator, descriptorType, viewType, m_sourceImageB, &m_imageMemoryB);
+		m_imageViewB = createImageView(vki, device, viewType, m_sourceImageB, *m_imageB, m_baseMipLevel, m_baseArraySlice);
+		uploadImage(vki, device, queueFamilyIndex, queue, allocator, *m_imageB, layout, m_sourceImageB);
+	}
+}
+
+vk::Move<vk::VkImage> ImageInstanceImages::createImage (const vk::DeviceInterface&			vki,
+														vk::VkDevice						device,
+														vk::Allocator&						allocator,
+														vk::VkDescriptorType				descriptorType,
+														vk::VkImageViewType					viewType,
+														const tcu::TextureLevelPyramid&		sourceImage,
+														de::MovePtr<vk::Allocation>*		outAllocation)
+{
+	const tcu::ConstPixelBufferAccess	baseLevel	= sourceImage.getLevel(0);
+	const bool							isCube		= (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY);
+	const bool							isStorage	= (descriptorType == vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
+	const deUint32						readUsage	= (isStorage) ? (vk::VK_IMAGE_USAGE_STORAGE_BIT) : (vk::VK_IMAGE_USAGE_SAMPLED_BIT);
+	const deUint32						arraySize	= (viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? (baseLevel.getHeight())
+													: (viewType == vk::VK_IMAGE_VIEW_TYPE_2D || viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? (baseLevel.getDepth())
+													: (viewType == vk::VK_IMAGE_VIEW_TYPE_3D)														? (1)
+													: (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? (baseLevel.getDepth()) // cube: numFaces * numLayers
+																																					: (0);
+	const vk::VkExtent3D				extent		=
+	{
+		// x
+		(deUint32)baseLevel.getWidth(),
+
+		// y
+		(viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? (1u) : (deUint32)baseLevel.getHeight(),
+
+		// z
+		(viewType == vk::VK_IMAGE_VIEW_TYPE_3D) ? ((deUint32)baseLevel.getDepth()) : (1u),
+	};
+	const vk::VkImageCreateInfo			createInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+		DE_NULL,
+		isCube ? (vk::VkImageCreateFlags)vk::VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : (vk::VkImageCreateFlags)0,
+		viewTypeToImageType(viewType),											// imageType
+		vk::mapTextureFormat(baseLevel.getFormat()),							// format
+		extent,																	// extent
+		(deUint32)sourceImage.getNumLevels(),									// mipLevels
+		arraySize,																// arraySize
+		vk::VK_SAMPLE_COUNT_1_BIT,												// samples
+		vk::VK_IMAGE_TILING_OPTIMAL,											// tiling
+		readUsage | vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT,						// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,											// sharingMode
+		0u,																		// queueFamilyCount
+		DE_NULL,																// pQueueFamilyIndices
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,											// initialLayout
+	};
+	vk::Move<vk::VkImage>				image		(vk::createImage(vki, device, &createInfo));
+
+	*outAllocation = allocateAndBindObjectMemory(vki, device, allocator, *image, vk::MemoryRequirement::Any);
+	return image;
+}
+
+vk::Move<vk::VkImageView> ImageInstanceImages::createImageView (const vk::DeviceInterface&			vki,
+																vk::VkDevice						device,
+																vk::VkImageViewType					viewType,
+																const tcu::TextureLevelPyramid&		sourceImage,
+																vk::VkImage							image,
+																deUint32							baseMipLevel,
+																deUint32							baseArraySlice)
+{
+	const tcu::ConstPixelBufferAccess	baseLevel			= sourceImage.getLevel(0);
+	const deUint32						viewTypeBaseSlice	= (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) ? (6 * baseArraySlice) : (baseArraySlice);
+	const deUint32						viewArraySize		= (viewType == vk::VK_IMAGE_VIEW_TYPE_1D)			? (1)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? (baseLevel.getHeight() - viewTypeBaseSlice)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_2D)			? (1)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? (baseLevel.getDepth() - viewTypeBaseSlice)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_3D)			? (1)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE)			? (6)
+															: (viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? (baseLevel.getDepth() - viewTypeBaseSlice) // cube: numFaces * numLayers
+																												: (0);
+
+	DE_ASSERT(viewArraySize > 0);
+
+	const vk::VkImageSubresourceRange	resourceRange	=
+	{
+		vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+		baseMipLevel,									// baseMipLevel
+		sourceImage.getNumLevels() - baseMipLevel,		// mipLevels
+		viewTypeBaseSlice,								// baseArraySlice
+		viewArraySize,									// arraySize
+	};
+	const vk::VkImageViewCreateInfo		createInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+		DE_NULL,
+		(vk::VkImageViewCreateFlags)0,
+		image,											// image
+		viewType,										// viewType
+		vk::mapTextureFormat(baseLevel.getFormat()),	// format
+		{
+			vk::VK_COMPONENT_SWIZZLE_R,
+			vk::VK_COMPONENT_SWIZZLE_G,
+			vk::VK_COMPONENT_SWIZZLE_B,
+			vk::VK_COMPONENT_SWIZZLE_A
+		},												// channels
+		resourceRange,									// subresourceRange
+	};
+	return vk::createImageView(vki, device, &createInfo);
+}
+
+void ImageInstanceImages::populateSourceImage (tcu::TextureLevelPyramid* dst, bool isFirst) const
+{
+	const int numLevels = dst->getNumLevels();
+
+	for (int level = 0; level < numLevels; ++level)
+	{
+		const int	width	= IMAGE_SIZE >> level;
+		const int	height	= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? (ARRAY_SIZE)
+																																: (IMAGE_SIZE >> level);
+		const int	depth	= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? (1)
+							: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? (ARRAY_SIZE)
+							: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? (6 * ARRAY_SIZE)
+							: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_3D)															? (IMAGE_SIZE >> level)
+																																: (1);
+
+		dst->allocLevel(level, width, height, depth);
+
+		{
+			const tcu::PixelBufferAccess levelAccess = dst->getLevel(level);
+
+			for (int z = 0; z < depth; ++z)
+			for (int y = 0; y < height; ++y)
+			for (int x = 0; x < width; ++x)
+			{
+				const int			gradPos	= x + y + z;
+				const int			gradMax	= width + height + depth - 3;
+
+				const int			red		= 255 * gradPos / gradMax;													//!< gradient from 0 -> max (detects large offset errors)
+				const int			green	= ((gradPos % 2 == 0) ? (127) : (0)) + ((gradPos % 4 < 3) ? (128) : (0));	//!< 3-level M pattern (detects small offset errors)
+				const int			blue	= (128 * level / numLevels) + (isFirst ? 127 : 0);							//!< level and image index (detects incorrect lod / image)
+
+				DE_ASSERT(de::inRange(red, 0, 255));
+				DE_ASSERT(de::inRange(green, 0, 255));
+				DE_ASSERT(de::inRange(blue, 0, 255));
+
+				levelAccess.setPixel(tcu::IVec4(red, green, blue, 255), x, y, z);
+			}
+		}
+	}
+}
+
+void ImageInstanceImages::uploadImage (const vk::DeviceInterface&		vki,
+									   vk::VkDevice						device,
+									   deUint32							queueFamilyIndex,
+									   vk::VkQueue						queue,
+									   vk::Allocator&					allocator,
+									   vk::VkImage						image,
+									   vk::VkImageLayout				layout,
+									   const tcu::TextureLevelPyramid&	data)
+{
+	const deUint32						arraySize					= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_3D) ? (1) :
+																	  (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY) ? (6 * (deUint32)ARRAY_SIZE) :
+																	  ((deUint32)ARRAY_SIZE);
+	const deUint32						dataBufferSize				= getTextureLevelPyramidDataSize(data);
+	const vk::VkBufferCreateInfo		bufferCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,													// flags
+		dataBufferSize,										// size
+		vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT,				// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,						// sharingMode
+		0u,													// queueFamilyCount
+		DE_NULL,											// pQueueFamilyIndices
+	};
+	const vk::Unique<vk::VkBuffer>		dataBuffer					(vk::createBuffer(vki, device, &bufferCreateInfo));
+	const de::MovePtr<vk::Allocation>	dataBufferMemory			= allocateAndBindObjectMemory(vki, device, allocator, *dataBuffer, vk::MemoryRequirement::HostVisible);
+	const vk::VkFenceCreateInfo			fenceCreateInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+		0u,													// flags
+	};
+	const vk::VkBufferMemoryBarrier		preMemoryBarrier			=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_HOST_WRITE_BIT,					// outputMask
+		vk::VK_ACCESS_TRANSFER_READ_BIT,					// inputMask
+		vk::VK_QUEUE_FAMILY_IGNORED,						// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,						// destQueueFamilyIndex
+		*dataBuffer,										// buffer
+		0u,													// offset
+		dataBufferSize,										// size
+	};
+	const vk::VkImageSubresourceRange	fullSubrange				=
+	{
+		vk::VK_IMAGE_ASPECT_COLOR_BIT,						// aspectMask
+		0u,													// baseMipLevel
+		(deUint32)data.getNumLevels(),						// mipLevels
+		0u,													// baseArraySlice
+		arraySize,											// arraySize
+	};
+	const vk::VkImageMemoryBarrier		preImageBarrier				=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+		DE_NULL,
+		0u,													// outputMask
+		0u,													// inputMask
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,						// oldLayout
+		vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// newLayout
+		vk::VK_QUEUE_FAMILY_IGNORED,						// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,						// destQueueFamilyIndex
+		image,												// image
+		fullSubrange										// subresourceRange
+	};
+	const vk::VkImageMemoryBarrier		postImageBarrier			=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_TRANSFER_WRITE_BIT,					// outputMask
+		vk::VK_ACCESS_SHADER_READ_BIT,						// inputMask
+		vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// oldLayout
+		layout,												// newLayout
+		vk::VK_QUEUE_FAMILY_IGNORED,						// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,						// destQueueFamilyIndex
+		image,												// image
+		fullSubrange										// subresourceRange
+	};
+	const vk::VkCommandPoolCreateInfo		cmdPoolCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// flags
+		queueFamilyIndex,									// queueFamilyIndex
+	};
+	const vk::Unique<vk::VkCommandPool>		cmdPool						(vk::createCommandPool(vki, device, &cmdPoolCreateInfo));
+	const vk::VkCommandBufferAllocateInfo	cmdBufCreateInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		*cmdPool,											// cmdPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// level
+		1u,													// count
+	};
+	const vk::VkCommandBufferBeginInfo		cmdBufBeginInfo				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		vk::VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// flags
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const vk::Unique<vk::VkCommandBuffer>	cmd							(vk::allocateCommandBuffer(vki, device, &cmdBufCreateInfo));
+	const vk::Unique<vk::VkFence>			cmdCompleteFence			(vk::createFence(vki, device, &fenceCreateInfo));
+	const deUint64							infiniteTimeout				= ~(deUint64)0u;
+	std::vector<vk::VkBufferImageCopy>		copySlices;
+
+	// copy data to buffer
+	writeTextureLevelPyramidData(dataBufferMemory->getHostPtr(), dataBufferSize, data, m_viewType , &copySlices);
+	flushMappedMemoryRange(vki, device, dataBufferMemory->getMemory(), dataBufferMemory->getOffset(), dataBufferSize);
+
+	// record command buffer
+	VK_CHECK(vki.beginCommandBuffer(*cmd, &cmdBufBeginInfo));
+	vki.cmdPipelineBarrier(*cmd, 0u, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0,
+						   0, (const vk::VkMemoryBarrier*)DE_NULL,
+						   1, &preMemoryBarrier,
+						   1, &preImageBarrier);
+	vki.cmdCopyBufferToImage(*cmd, *dataBuffer, image, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copySlices.size(), &copySlices[0]);
+	vki.cmdPipelineBarrier(*cmd, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+						   0, (const vk::VkMemoryBarrier*)DE_NULL,
+						   0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+						   1, &postImageBarrier);
+	VK_CHECK(vki.endCommandBuffer(*cmd));
+
+	// submit and wait for command buffer to complete before killing it
+	{
+		const vk::VkSubmitInfo	submitInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const vk::VkSemaphore*)0,
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmd.get(),
+			0u,
+			(const vk::VkSemaphore*)0,
+		};
+		VK_CHECK(vki.queueSubmit(queue, 1, &submitInfo, *cmdCompleteFence));
+	}
+	VK_CHECK(vki.waitForFences(device, 1, &cmdCompleteFence.get(), 0u, infiniteTimeout)); // \note: timeout is failure
+}
+
+class ImageFetchInstanceImages : private ImageInstanceImages
+{
+public:
+								ImageFetchInstanceImages	(const vk::DeviceInterface&		vki,
+															 vk::VkDevice					device,
+															 deUint32						queueFamilyIndex,
+															 vk::VkQueue					queue,
+															 vk::Allocator&					allocator,
+															 vk::VkDescriptorType			descriptorType,
+															 ShaderInputInterface			shaderInterface,
+															 vk::VkImageViewType			viewType,
+															 deUint32						baseMipLevel,
+															 deUint32						baseArraySlice);
+
+	static tcu::IVec3			getFetchPos					(vk::VkImageViewType viewType, deUint32 baseMipLevel, deUint32 baseArraySlice, int fetchPosNdx);
+	tcu::Vec4					fetchImageValue				(int fetchPosNdx) const;
+
+	inline vk::VkImageView		getImageViewA				(void) const { return *m_imageViewA; }
+	inline vk::VkImageView		getImageViewB				(void) const { return *m_imageViewB; }
+
+private:
+	enum
+	{
+		// some arbitrary sample points for all four quadrants
+		SAMPLE_POINT_0_X = 6,
+		SAMPLE_POINT_0_Y = 13,
+		SAMPLE_POINT_0_Z = 49,
+
+		SAMPLE_POINT_1_X = 51,
+		SAMPLE_POINT_1_Y = 40,
+		SAMPLE_POINT_1_Z = 44,
+
+		SAMPLE_POINT_2_X = 42,
+		SAMPLE_POINT_2_Y = 26,
+		SAMPLE_POINT_2_Z = 19,
+
+		SAMPLE_POINT_3_X = 25,
+		SAMPLE_POINT_3_Y = 25,
+		SAMPLE_POINT_3_Z = 18,
+	};
+
+	const ShaderInputInterface	m_shaderInterface;
+};
+
+ImageFetchInstanceImages::ImageFetchInstanceImages (const vk::DeviceInterface&	vki,
+													vk::VkDevice				device,
+													deUint32					queueFamilyIndex,
+													vk::VkQueue					queue,
+													vk::Allocator&				allocator,
+													vk::VkDescriptorType		descriptorType,
+													ShaderInputInterface		shaderInterface,
+													vk::VkImageViewType			viewType,
+													deUint32					baseMipLevel,
+													deUint32					baseArraySlice)
+	: ImageInstanceImages	(vki,
+							 device,
+							 queueFamilyIndex,
+							 queue,
+							 allocator,
+							 descriptorType,
+							 viewType,
+							 getInterfaceNumResources(shaderInterface),	// numImages
+							 baseMipLevel,
+							 baseArraySlice)
+	, m_shaderInterface		(shaderInterface)
+{
+}
+
+bool isImageViewTypeArray (vk::VkImageViewType type)
+{
+	return type == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY || type == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY || type == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
+}
+
+tcu::IVec3 ImageFetchInstanceImages::getFetchPos (vk::VkImageViewType viewType, deUint32 baseMipLevel, deUint32 baseArraySlice, int fetchPosNdx)
+{
+	const tcu::IVec3	fetchPositions[4]	=
+	{
+		tcu::IVec3(SAMPLE_POINT_0_X, SAMPLE_POINT_0_Y, SAMPLE_POINT_0_Z),
+		tcu::IVec3(SAMPLE_POINT_1_X, SAMPLE_POINT_1_Y, SAMPLE_POINT_1_Z),
+		tcu::IVec3(SAMPLE_POINT_2_X, SAMPLE_POINT_2_Y, SAMPLE_POINT_2_Z),
+		tcu::IVec3(SAMPLE_POINT_3_X, SAMPLE_POINT_3_Y, SAMPLE_POINT_3_Z),
+	};
+	const tcu::IVec3	coord				= de::getSizedArrayElement<4>(fetchPositions, fetchPosNdx);
+	const deUint32		imageSize			= (deUint32)IMAGE_SIZE >> baseMipLevel;
+	const deUint32		arraySize			= isImageViewTypeArray(viewType) ? ARRAY_SIZE - baseArraySlice : 1;
+
+	switch (viewType)
+	{
+		case vk::VK_IMAGE_VIEW_TYPE_1D:
+		case vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY:	return tcu::IVec3(coord.x() % imageSize, coord.y() % arraySize, 0);
+		case vk::VK_IMAGE_VIEW_TYPE_2D:
+		case vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY:	return tcu::IVec3(coord.x() % imageSize, coord.y() % imageSize, coord.z() % arraySize);
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE:
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:	return tcu::IVec3(coord.x() % imageSize, coord.y() % imageSize, coord.z() % (arraySize * 6));
+		case vk::VK_IMAGE_VIEW_TYPE_3D:			return tcu::IVec3(coord.x() % imageSize, coord.y() % imageSize, coord.z() % imageSize);
+		default:
+			DE_FATAL("Impossible");
+			return tcu::IVec3();
+	}
+}
+
+tcu::Vec4 ImageFetchInstanceImages::fetchImageValue (int fetchPosNdx) const
+{
+	DE_ASSERT(de::inBounds(fetchPosNdx, 0, 4));
+
+	const tcu::TextureLevelPyramid&	fetchSrcA	= m_sourceImageA;
+	const tcu::TextureLevelPyramid&	fetchSrcB	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? (m_sourceImageA) : (m_sourceImageB);
+	const tcu::TextureLevelPyramid&	fetchSrc	= ((fetchPosNdx % 2) == 0) ? (fetchSrcA) : (fetchSrcB); // sampling order is ABAB
+	tcu::IVec3						fetchPos	= getFetchPos(m_viewType, m_baseMipLevel, m_baseArraySlice, fetchPosNdx);
+
+	// add base array layer into the appropriate coordinate, based on the view type
+	if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+		fetchPos.z() += 6 * m_baseArraySlice;
+	else if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)
+		fetchPos.y() += m_baseArraySlice;
+	else
+		fetchPos.z() += m_baseArraySlice;
+
+	return fetchSrc.getLevel(m_baseMipLevel).getPixel(fetchPos.x(), fetchPos.y(), fetchPos.z());
+}
+
+class ImageFetchRenderInstance : public SingleCmdRenderInstance
+{
+public:
+													ImageFetchRenderInstance	(vkt::Context&			context,
+																				 bool					isPrimaryCmdBuf,
+																				 vk::VkDescriptorType	descriptorType,
+																				 vk::VkShaderStageFlags	stageFlags,
+																				 ShaderInputInterface	shaderInterface,
+																				 vk::VkImageViewType	viewType,
+																				 deUint32				baseMipLevel,
+																				 deUint32				baseArraySlice);
+
+private:
+	static vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkShaderStageFlags		stageFlags);
+
+	static vk::Move<vk::VkPipelineLayout>			createPipelineLayout		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+	static vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface);
+
+	static vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkDescriptorSetLayout	layout,
+																				 vk::VkDescriptorPool		pool,
+																				 vk::VkImageView			viewA,
+																				 vk::VkImageView			viewB);
+
+	void											logTestPlan					(void) const;
+	vk::VkPipelineLayout							getPipelineLayout			(void) const;
+	void											writeDrawCmdBuffer			(vk::VkCommandBuffer cmd) const;
+	tcu::TestStatus									verifyResultImage			(const tcu::ConstPixelBufferAccess& result) const;
+
+	enum
+	{
+		RENDER_SIZE = 128,
+	};
+
+	const vk::VkDescriptorType						m_descriptorType;
+	const vk::VkShaderStageFlags					m_stageFlags;
+	const ShaderInputInterface						m_shaderInterface;
+	const vk::VkImageViewType						m_viewType;
+	const deUint32									m_baseMipLevel;
+	const deUint32									m_baseArraySlice;
+
+	const vk::Unique<vk::VkDescriptorSetLayout>		m_descriptorSetLayout;
+	const vk::Unique<vk::VkPipelineLayout>			m_pipelineLayout;
+	const ImageFetchInstanceImages					m_images;
+	const vk::Unique<vk::VkDescriptorPool>			m_descriptorPool;
+	const vk::Unique<vk::VkDescriptorSet>			m_descriptorSet;
+};
+
+ImageFetchRenderInstance::ImageFetchRenderInstance	(vkt::Context&			context,
+													 bool					isPrimaryCmdBuf,
+													 vk::VkDescriptorType	descriptorType,
+													 vk::VkShaderStageFlags	stageFlags,
+													 ShaderInputInterface	shaderInterface,
+													 vk::VkImageViewType	viewType,
+													 deUint32				baseMipLevel,
+													 deUint32				baseArraySlice)
+	: SingleCmdRenderInstance	(context, isPrimaryCmdBuf, tcu::UVec2(RENDER_SIZE, RENDER_SIZE))
+	, m_descriptorType			(descriptorType)
+	, m_stageFlags				(stageFlags)
+	, m_shaderInterface			(shaderInterface)
+	, m_viewType				(viewType)
+	, m_baseMipLevel			(baseMipLevel)
+	, m_baseArraySlice			(baseArraySlice)
+	, m_descriptorSetLayout		(createDescriptorSetLayout(m_vki, m_device, m_descriptorType, m_shaderInterface, m_stageFlags))
+	, m_pipelineLayout			(createPipelineLayout(m_vki, m_device, *m_descriptorSetLayout))
+	, m_images					(m_vki, m_device, m_queueFamilyIndex, m_queue, m_allocator, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice)
+	, m_descriptorPool			(createDescriptorPool(m_vki, m_device, m_descriptorType, m_shaderInterface))
+	, m_descriptorSet			(createDescriptorSet(m_vki, m_device, m_descriptorType, m_shaderInterface, *m_descriptorSetLayout, *m_descriptorPool, m_images.getImageViewA(), m_images.getImageViewB()))
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> ImageFetchRenderInstance::createDescriptorSetLayout (const vk::DeviceInterface&		vki,
+																						 vk::VkDevice					device,
+																						 vk::VkDescriptorType			descriptorType,
+																						 ShaderInputInterface			shaderInterface,
+																						 vk::VkShaderStageFlags			stageFlags)
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(descriptorType, 2u, stageFlags);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return builder.build(vki, device);
+}
+
+vk::Move<vk::VkPipelineLayout> ImageFetchRenderInstance::createPipelineLayout (const vk::DeviceInterface&	vki,
+																			   vk::VkDevice					device,
+																			   vk::VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1,						// descriptorSetCount
+		&descriptorSetLayout,	// pSetLayouts
+		0u,						// pushConstantRangeCount
+		DE_NULL,				// pPushConstantRanges
+	};
+	return vk::createPipelineLayout(vki, device, &createInfo);
+}
+
+vk::Move<vk::VkDescriptorPool> ImageFetchRenderInstance::createDescriptorPool (const vk::DeviceInterface&	vki,
+																			   vk::VkDevice					device,
+																			   vk::VkDescriptorType			descriptorType,
+																			   ShaderInputInterface			shaderInterface)
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(descriptorType, getInterfaceNumResources(shaderInterface))
+		.build(vki, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> ImageFetchRenderInstance::createDescriptorSet (const vk::DeviceInterface&		vki,
+																			 vk::VkDevice					device,
+																			 vk::VkDescriptorType			descriptorType,
+																			 ShaderInputInterface			shaderInterface,
+																			 vk::VkDescriptorSetLayout		layout,
+																			 vk::VkDescriptorPool			pool,
+																			 vk::VkImageView				viewA,
+																			 vk::VkImageView				viewB)
+{
+	const vk::VkImageLayout					imageLayout		= getImageLayoutForDescriptorType(descriptorType);
+	const vk::VkDescriptorImageInfo			imageInfos[2]	=
+	{
+		makeDescriptorImageInfo(viewA, imageLayout),
+		makeDescriptorImageInfo(viewB, imageLayout),
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet	= allocateDescriptorSet(vki, device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder			builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &imageInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &imageInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), descriptorType, &imageInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, 2u, imageInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(vki, device);
+	return descriptorSet;
+}
+
+void ImageFetchRenderInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Rendering 2x2 grid.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+		<< " descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType) << "\n"
+		<< "Image view type is " << vk::getImageViewTypeName(m_viewType) << "\n";
+
+	if (m_baseMipLevel)
+		msg << "Image view base mip level = " << m_baseMipLevel << "\n";
+	if (m_baseArraySlice)
+		msg << "Image view base array slice = " << m_baseArraySlice << "\n";
+
+	if (m_stageFlags == 0u)
+	{
+		msg << "Descriptors are not accessed in any shader stage.\n";
+	}
+	else
+	{
+		msg << "Color in each cell is fetched using the descriptor(s):\n";
+
+		for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+		{
+			msg << "Test sample " << resultNdx << ": fetching at position " << m_images.getFetchPos(m_viewType, m_baseMipLevel, m_baseArraySlice, resultNdx);
+
+			if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+			{
+				const int srcResourceNdx = (resultNdx % 2); // ABAB source
+				msg << " from descriptor " << srcResourceNdx;
+			}
+
+			msg << "\n";
+		}
+
+		msg << "Descriptors are accessed in {"
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0)					? (" vertex")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0)	? (" tess_control")		: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0)	? (" tess_evaluation")	: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0)				? (" geometry")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0)				? (" fragment")			: (""))
+			<< " } stages.";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+vk::VkPipelineLayout ImageFetchRenderInstance::getPipelineLayout (void) const
+{
+	return *m_pipelineLayout;
+}
+
+void ImageFetchRenderInstance::writeDrawCmdBuffer (vk::VkCommandBuffer cmd) const
+{
+	m_vki.cmdBindDescriptorSets(cmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, getPipelineLayout(), 0, 1, &m_descriptorSet.get(), 0, DE_NULL);
+	m_vki.cmdDraw(cmd, 6 * 4, 1, 0, 0); // render four quads (two separate triangles)
+}
+
+tcu::TestStatus ImageFetchRenderInstance::verifyResultImage (const tcu::ConstPixelBufferAccess& result) const
+{
+	const tcu::Vec4		green		(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4		yellow		(1.0f, 1.0f, 0.0f, 1.0f);
+	const bool			doFetch		= (m_stageFlags != 0u); // no active stages? Then don't fetch
+	const tcu::Vec4		sample0		= (!doFetch) ? (yellow)	: (m_images.fetchImageValue(0));
+	const tcu::Vec4		sample1		= (!doFetch) ? (green)	: (m_images.fetchImageValue(1));
+	const tcu::Vec4		sample2		= (!doFetch) ? (green)	: (m_images.fetchImageValue(2));
+	const tcu::Vec4		sample3		= (!doFetch) ? (yellow)	: (m_images.fetchImageValue(3));
+	tcu::Surface		reference	(m_targetSize.x(), m_targetSize.y());
+
+	drawQuadrantReferenceResult(reference.getAccess(), sample0, sample1, sample2, sample3);
+
+	if (!bilinearCompare(m_context.getTestContext().getLog(), "Compare", "Result comparison", reference.getAccess(), result, tcu::RGBA(1, 1, 1, 1), tcu::COMPARE_LOG_RESULT))
+		return tcu::TestStatus::fail("Image verification failed");
+	else
+		return tcu::TestStatus::pass("Pass");
+}
+
+class ImageFetchComputeInstance : public vkt::TestInstance
+{
+public:
+											ImageFetchComputeInstance	(vkt::Context&			context,
+																		 vk::VkDescriptorType	descriptorType,
+																		 ShaderInputInterface	shaderInterface,
+																		 vk::VkImageViewType	viewType,
+																		 deUint32				baseMipLevel,
+																		 deUint32				baseArraySlice);
+
+private:
+	vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(void) const;
+	vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(void) const;
+	vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const;
+
+	tcu::TestStatus							iterate						(void);
+	void									logTestPlan					(void) const;
+	tcu::TestStatus							testResourceAccess			(void);
+
+	const vk::VkDescriptorType				m_descriptorType;
+	const ShaderInputInterface				m_shaderInterface;
+	const vk::VkImageViewType				m_viewType;
+	const deUint32							m_baseMipLevel;
+	const deUint32							m_baseArraySlice;
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkQueue						m_queue;
+	const deUint32							m_queueFamilyIndex;
+	vk::Allocator&							m_allocator;
+
+	const ComputeInstanceResultBuffer		m_result;
+	const ImageFetchInstanceImages			m_images;
+};
+
+ImageFetchComputeInstance::ImageFetchComputeInstance (Context&				context,
+													  vk::VkDescriptorType	descriptorType,
+													  ShaderInputInterface	shaderInterface,
+													  vk::VkImageViewType	viewType,
+													  deUint32				baseMipLevel,
+													  deUint32				baseArraySlice)
+	: vkt::TestInstance		(context)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_viewType			(viewType)
+	, m_baseMipLevel		(baseMipLevel)
+	, m_baseArraySlice		(baseArraySlice)
+	, m_vki					(context.getDeviceInterface())
+	, m_device				(context.getDevice())
+	, m_queue				(context.getUniversalQueue())
+	, m_queueFamilyIndex	(context.getUniversalQueueFamilyIndex())
+	, m_allocator			(context.getDefaultAllocator())
+	, m_result				(m_vki, m_device, m_allocator)
+	, m_images				(m_vki, m_device, m_queueFamilyIndex, m_queue, m_allocator, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice)
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> ImageFetchComputeInstance::createDescriptorSetLayout (void) const
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(m_descriptorType, 2u, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	};
+
+	return builder.build(m_vki, m_device);
+}
+
+vk::Move<vk::VkDescriptorPool> ImageFetchComputeInstance::createDescriptorPool (void) const
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(m_descriptorType, getInterfaceNumResources(m_shaderInterface))
+		.build(m_vki, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> ImageFetchComputeInstance::createDescriptorSet (vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const
+{
+	const vk::VkDescriptorBufferInfo		resultInfo		= vk::makeDescriptorBufferInfo(m_result.getBuffer(), 0u, (vk::VkDeviceSize)ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkImageLayout					imageLayout		= getImageLayoutForDescriptorType(m_descriptorType);
+	const vk::VkDescriptorImageInfo			imageInfos[2]	=
+	{
+		makeDescriptorImageInfo(m_images.getImageViewA(), imageLayout),
+		makeDescriptorImageInfo(m_images.getImageViewB(), imageLayout),
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet	= allocateDescriptorSet(m_vki, m_device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder			builder;
+
+	// result
+	builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// images
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &imageInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &imageInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), m_descriptorType, &imageInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, 2u, imageInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(m_vki, m_device);
+	return descriptorSet;
+}
+
+tcu::TestStatus ImageFetchComputeInstance::iterate (void)
+{
+	logTestPlan();
+	return testResourceAccess();
+}
+
+void ImageFetchComputeInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Fetching 4 values from image in compute shader.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+		<< " descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType) << "\n"
+		<< "Image view type is " << vk::getImageViewTypeName(m_viewType) << "\n";
+
+	if (m_baseMipLevel)
+		msg << "Image view base mip level = " << m_baseMipLevel << "\n";
+	if (m_baseArraySlice)
+		msg << "Image view base array slice = " << m_baseArraySlice << "\n";
+
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		msg << "Test sample " << resultNdx << ": fetch at position " << m_images.getFetchPos(m_viewType, m_baseMipLevel, m_baseArraySlice, resultNdx);
+
+		if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+		{
+			const int srcResourceNdx = (resultNdx % 2); // ABAB source
+			msg << " from descriptor " << srcResourceNdx;
+		}
+
+		msg << "\n";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+tcu::TestStatus ImageFetchComputeInstance::testResourceAccess (void)
+{
+	const vk::Unique<vk::VkDescriptorSetLayout>		descriptorSetLayout	(createDescriptorSetLayout());
+	const vk::Unique<vk::VkDescriptorPool>			descriptorPool		(createDescriptorPool());
+	const vk::Unique<vk::VkDescriptorSet>			descriptorSet		(createDescriptorSet(*descriptorPool, *descriptorSetLayout));
+	const ComputePipeline							pipeline			(m_vki, m_device, m_context.getBinaryCollection(), 1, &descriptorSetLayout.get());
+
+	const vk::VkDescriptorSet						descriptorSets[]	= { *descriptorSet };
+	const int										numDescriptorSets	= DE_LENGTH_OF_ARRAY(descriptorSets);
+	const deUint32* const							dynamicOffsets		= DE_NULL;
+	const int										numDynamicOffsets	= 0;
+	const vk::VkBufferMemoryBarrier* const			preBarriers			= DE_NULL;
+	const int										numPreBarriers		= 0;
+	const vk::VkBufferMemoryBarrier* const			postBarriers		= m_result.getResultReadBarrier();
+	const int										numPostBarriers		= 1;
+
+	const ComputeCommand							compute				(m_vki,
+																		 m_device,
+																		 pipeline.getPipeline(),
+																		 pipeline.getPipelineLayout(),
+																		 tcu::UVec3(4, 1, 1),
+																		 numDescriptorSets,	descriptorSets,
+																		 numDynamicOffsets,	dynamicOffsets,
+																		 numPreBarriers,	preBarriers,
+																		 numPostBarriers,	postBarriers);
+
+	tcu::Vec4										results[4];
+	bool											anyResultSet		= false;
+	bool											allResultsOk		= true;
+
+	compute.submitAndWait(m_queueFamilyIndex, m_queue);
+	m_result.readResultContentsTo(&results);
+
+	// verify
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		const tcu::Vec4	result				= results[resultNdx];
+		const tcu::Vec4	reference			= m_images.fetchImageValue(resultNdx);
+		const tcu::Vec4	conversionThreshold	= tcu::Vec4(1.0f / 255.0f);
+
+		if (result != tcu::Vec4(-1.0f))
+			anyResultSet = true;
+
+		if (tcu::boolAny(tcu::greaterThan(tcu::abs(result - reference), conversionThreshold)))
+		{
+			allResultsOk = false;
+
+			m_context.getTestContext().getLog()
+				<< tcu::TestLog::Message
+				<< "Test sample " << resultNdx << ": Expected " << reference << ", got " << result
+				<< tcu::TestLog::EndMessage;
+		}
+	}
+
+	// read back and verify
+	if (allResultsOk)
+		return tcu::TestStatus::pass("Pass");
+	else if (anyResultSet)
+		return tcu::TestStatus::fail("Invalid result values");
+	else
+	{
+		m_context.getTestContext().getLog()
+			<< tcu::TestLog::Message
+			<< "Result buffer was not written to."
+			<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Result buffer was not written to");
+	}
+}
+
+class ImageSampleInstanceImages : private ImageInstanceImages
+{
+public:
+										ImageSampleInstanceImages	(const vk::DeviceInterface&		vki,
+																	 vk::VkDevice					device,
+																	 deUint32						queueFamilyIndex,
+																	 vk::VkQueue					queue,
+																	 vk::Allocator&					allocator,
+																	 vk::VkDescriptorType			descriptorType,
+																	 ShaderInputInterface			shaderInterface,
+																	 vk::VkImageViewType			viewType,
+																	 deUint32						baseMipLevel,
+																	 deUint32						baseArraySlice,
+																	 bool							immutable);
+
+	static tcu::Vec4					getSamplePos				(vk::VkImageViewType viewType, deUint32 baseMipLevel, deUint32 baseArraySlice, int samplePosNdx);
+	tcu::Vec4							fetchSampleValue			(int samplePosNdx) const;
+
+	inline vk::VkImageView				getImageViewA				(void) const { return *m_imageViewA;	}
+	inline vk::VkImageView				getImageViewB				(void) const { return *m_imageViewB;	}
+	inline vk::VkSampler				getSamplerA					(void) const { return *m_samplerA;		}
+	inline vk::VkSampler				getSamplerB					(void) const { return *m_samplerB;		}
+	inline bool							isImmutable					(void) const { return m_isImmutable;	}
+
+private:
+	static int							getNumImages				(vk::VkDescriptorType descriptorType, ShaderInputInterface shaderInterface);
+	static tcu::Sampler					createRefSampler			(bool isFirst);
+	static vk::Move<vk::VkSampler>		createSampler				(const vk::DeviceInterface& vki, vk::VkDevice device, const tcu::Sampler& sampler, const tcu::TextureFormat& format);
+
+	static tcu::Texture1DArrayView		getRef1DView				(const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage);
+	static tcu::Texture2DArrayView		getRef2DView				(const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage);
+	static tcu::Texture3DView			getRef3DView				(const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage);
+	static tcu::TextureCubeArrayView	getRefCubeView				(const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage);
+
+	const vk::VkDescriptorType			m_descriptorType;
+	const ShaderInputInterface			m_shaderInterface;
+	const bool							m_isImmutable;
+
+	const tcu::Sampler					m_refSamplerA;
+	const tcu::Sampler					m_refSamplerB;
+	const vk::Unique<vk::VkSampler>		m_samplerA;
+	const vk::Unique<vk::VkSampler>		m_samplerB;
+};
+
+ImageSampleInstanceImages::ImageSampleInstanceImages (const vk::DeviceInterface&	vki,
+													  vk::VkDevice					device,
+													  deUint32						queueFamilyIndex,
+													  vk::VkQueue					queue,
+													  vk::Allocator&				allocator,
+													  vk::VkDescriptorType			descriptorType,
+													  ShaderInputInterface			shaderInterface,
+													  vk::VkImageViewType			viewType,
+													  deUint32						baseMipLevel,
+													  deUint32						baseArraySlice,
+													  bool							immutable)
+	: ImageInstanceImages	(vki,
+							 device,
+							 queueFamilyIndex,
+							 queue,
+							 allocator,
+							 descriptorType,
+							 viewType,
+							 getNumImages(descriptorType, shaderInterface),
+							 baseMipLevel,
+							 baseArraySlice)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_isImmutable			(immutable)
+	, m_refSamplerA			(createRefSampler(true))
+	, m_refSamplerB			(createRefSampler(false))
+	, m_samplerA			(createSampler(vki, device, m_refSamplerA, m_imageFormat))
+	, m_samplerB			((getInterfaceNumResources(m_shaderInterface) == 1u)
+								? vk::Move<vk::VkSampler>()
+								: createSampler(vki, device, m_refSamplerB, m_imageFormat))
+{
+}
+
+tcu::Vec4 ImageSampleInstanceImages::getSamplePos (vk::VkImageViewType viewType, deUint32 baseMipLevel, deUint32 baseArraySlice, int samplePosNdx)
+{
+	DE_ASSERT(de::inBounds(samplePosNdx, 0, 4));
+
+	const deUint32	imageSize	= (deUint32)IMAGE_SIZE >> baseMipLevel;
+	const deUint32	arraySize	= isImageViewTypeArray(viewType) ? ARRAY_SIZE - baseArraySlice : 1;
+
+	// choose arbitrary values that are not ambiguous with NEAREST filtering
+
+	switch (viewType)
+	{
+		case vk::VK_IMAGE_VIEW_TYPE_1D:
+		case vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case vk::VK_IMAGE_VIEW_TYPE_2D:
+		case vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case vk::VK_IMAGE_VIEW_TYPE_3D:
+		{
+			const tcu::Vec3	coords[4]	=
+			{
+				tcu::Vec3(0.75f,
+						  0.5f,
+						  (float)(12u % imageSize) + 0.25f),
+
+				tcu::Vec3((float)(23u % imageSize) + 0.25f,
+						  (float)(73u % imageSize) + 0.5f,
+						  (float)(16u % imageSize) + 0.5f + (float)imageSize),
+
+				tcu::Vec3(-(float)(43u % imageSize) + 0.25f,
+						  (float)(84u % imageSize) + 0.5f + (float)imageSize,
+						  (float)(117u % imageSize) + 0.75f),
+
+				tcu::Vec3((float)imageSize + 0.5f,
+						  (float)(75u % imageSize) + 0.25f,
+						  (float)(83u % imageSize) + 0.25f + (float)imageSize),
+			};
+			const deUint32	slices[4]	=
+			{
+				0u % arraySize,
+				4u % arraySize,
+				9u % arraySize,
+				2u % arraySize,
+			};
+
+			if (viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)
+				return tcu::Vec4(coords[samplePosNdx].x() / (float)imageSize,
+								 (float)slices[samplePosNdx],
+								 0.0f,
+								 0.0f);
+			else if (viewType == vk::VK_IMAGE_VIEW_TYPE_2D || viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)
+				return tcu::Vec4(coords[samplePosNdx].x() / (float)imageSize,
+								 coords[samplePosNdx].y() / (float)imageSize,
+								 (float)slices[samplePosNdx],
+								 0.0f);
+			else if (viewType == vk::VK_IMAGE_VIEW_TYPE_3D)
+				return tcu::Vec4(coords[samplePosNdx].x() / (float)imageSize,
+								 coords[samplePosNdx].y() / (float)imageSize,
+								 coords[samplePosNdx].z() / (float)imageSize,
+								 0.0f);
+			else
+			{
+				DE_FATAL("Impossible");
+				return tcu::Vec4();
+			}
+		}
+
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE:
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+		{
+			// \note these values are in [0, texSize]*3 space for convenience
+			const tcu::Vec3	coords[4]	=
+			{
+				tcu::Vec3(0.75f,
+						  0.5f,
+						  (float)imageSize),
+
+				tcu::Vec3((float)(13u % imageSize) + 0.25f,
+						  0.0f,
+						  (float)(16u % imageSize) + 0.5f),
+
+				tcu::Vec3(0.0f,
+						  (float)(84u % imageSize) + 0.5f,
+						  (float)(10u % imageSize) + 0.75f),
+
+				tcu::Vec3((float)imageSize,
+						  (float)(75u % imageSize) + 0.25f,
+						  (float)(83u % imageSize) + 0.75f),
+			};
+			const deUint32	slices[4]	=
+			{
+				1u % arraySize,
+				2u % arraySize,
+				9u % arraySize,
+				5u % arraySize,
+			};
+
+			DE_ASSERT(de::inRange(coords[samplePosNdx].x(), 0.0f, (float)imageSize));
+			DE_ASSERT(de::inRange(coords[samplePosNdx].y(), 0.0f, (float)imageSize));
+			DE_ASSERT(de::inRange(coords[samplePosNdx].z(), 0.0f, (float)imageSize));
+
+			// map to [-1, 1]*3 space
+			return tcu::Vec4(coords[samplePosNdx].x() / (float)imageSize * 2.0f - 1.0f,
+							 coords[samplePosNdx].y() / (float)imageSize * 2.0f - 1.0f,
+							 coords[samplePosNdx].z() / (float)imageSize * 2.0f - 1.0f,
+							 (float)slices[samplePosNdx]);
+		}
+
+		default:
+			DE_FATAL("Impossible");
+			return tcu::Vec4();
+	}
+}
+
+tcu::Vec4 ImageSampleInstanceImages::fetchSampleValue (int samplePosNdx) const
+{
+	DE_ASSERT(de::inBounds(samplePosNdx, 0, 4));
+
+	// texture order is ABAB
+	const bool									isSamplerCase	= (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER);
+	const tcu::TextureLevelPyramid&				sampleSrcA		= m_sourceImageA;
+	const tcu::TextureLevelPyramid&				sampleSrcB		= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? (m_sourceImageA) : (m_sourceImageB);
+	const tcu::TextureLevelPyramid&				sampleSrc		= (isSamplerCase) ? (sampleSrcA) : ((samplePosNdx % 2) == 0) ? (sampleSrcA) : (sampleSrcB);
+
+	// sampler order is ABAB
+	const tcu::Sampler&							samplerA		= m_refSamplerA;
+	const tcu::Sampler&							samplerB		= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? (m_refSamplerA) : (m_refSamplerB);
+	const tcu::Sampler&							sampler			= ((samplePosNdx % 2) == 0) ? (samplerA) : (samplerB);
+
+	const tcu::Vec4								samplePos		= getSamplePos(m_viewType, m_baseMipLevel, m_baseArraySlice, samplePosNdx);
+	const float									lod				= 0.0f;
+	std::vector<tcu::ConstPixelBufferAccess>	levelStorage;
+
+	switch (m_viewType)
+	{
+		case vk::VK_IMAGE_VIEW_TYPE_1D:
+		case vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY:	return getRef1DView(sampleSrc, m_baseMipLevel, m_baseArraySlice, &levelStorage).sample(sampler, samplePos.x(), samplePos.y(), lod);
+		case vk::VK_IMAGE_VIEW_TYPE_2D:
+		case vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY:	return getRef2DView(sampleSrc, m_baseMipLevel, m_baseArraySlice, &levelStorage).sample(sampler, samplePos.x(), samplePos.y(), samplePos.z(), lod);
+		case vk::VK_IMAGE_VIEW_TYPE_3D:			return getRef3DView(sampleSrc, m_baseMipLevel, m_baseArraySlice, &levelStorage).sample(sampler, samplePos.x(), samplePos.y(), samplePos.z(), lod);
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE:
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:	return getRefCubeView(sampleSrc, m_baseMipLevel, m_baseArraySlice, &levelStorage).sample(sampler, samplePos.x(), samplePos.y(), samplePos.z(), samplePos.w(), lod);
+
+		default:
+		{
+			DE_FATAL("Impossible");
+			return tcu::Vec4();
+		}
+	}
+}
+
+int ImageSampleInstanceImages::getNumImages (vk::VkDescriptorType descriptorType, ShaderInputInterface shaderInterface)
+{
+	// If we are testing separate samplers, just one image is enough
+	if (descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		return 1;
+	else if (descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+	{
+		// combined: numImages == numSamplers
+		return getInterfaceNumResources(shaderInterface);
+	}
+	else
+	{
+		DE_FATAL("Impossible");
+		return 0;
+	}
+}
+
+tcu::Sampler ImageSampleInstanceImages::createRefSampler (bool isFirst)
+{
+	if (isFirst)
+	{
+		// linear, wrapping
+		return tcu::Sampler(tcu::Sampler::REPEAT_GL, tcu::Sampler::REPEAT_GL, tcu::Sampler::REPEAT_GL, tcu::Sampler::LINEAR, tcu::Sampler::LINEAR);
+	}
+	else
+	{
+		// nearest, clamping
+		return tcu::Sampler(tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::NEAREST, tcu::Sampler::NEAREST);
+	}
+}
+
+vk::Move<vk::VkSampler> ImageSampleInstanceImages::createSampler (const vk::DeviceInterface& vki, vk::VkDevice device, const tcu::Sampler& sampler, const tcu::TextureFormat& format)
+{
+	const vk::VkSamplerCreateInfo	createInfo		= vk::mapSampler(sampler, format);
+
+	return vk::createSampler(vki, device, &createInfo);
+}
+
+tcu::Texture1DArrayView ImageSampleInstanceImages::getRef1DView (const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage)
+{
+	DE_ASSERT(levelStorage->empty());
+
+	const deUint32 numSlices = (deUint32)source.getLevel(0).getHeight();
+	const deUint32 numLevels = (deUint32)source.getNumLevels();
+
+	// cut pyramid from baseMipLevel
+	for (deUint32 level = baseMipLevel; level < numLevels; ++level)
+	{
+		// cut levels from baseArraySlice
+		const tcu::ConstPixelBufferAccess wholeLevel	= source.getLevel(level);
+		const tcu::ConstPixelBufferAccess cutLevel		= tcu::getSubregion(wholeLevel, 0, baseArraySlice, wholeLevel.getWidth(), numSlices - baseArraySlice);
+		levelStorage->push_back(cutLevel);
+	}
+
+	return tcu::Texture1DArrayView((int)levelStorage->size(), &levelStorage->front());
+}
+
+tcu::Texture2DArrayView ImageSampleInstanceImages::getRef2DView (const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage)
+{
+	DE_ASSERT(levelStorage->empty());
+
+	const deUint32 numSlices = (deUint32)source.getLevel(0).getDepth();
+	const deUint32 numLevels = (deUint32)source.getNumLevels();
+
+	// cut pyramid from baseMipLevel
+	for (deUint32 level = baseMipLevel; level < numLevels; ++level)
+	{
+		// cut levels from baseArraySlice
+		const tcu::ConstPixelBufferAccess wholeLevel	= source.getLevel(level);
+		const tcu::ConstPixelBufferAccess cutLevel		= tcu::getSubregion(wholeLevel, 0, 0, baseArraySlice, wholeLevel.getWidth(), wholeLevel.getHeight(), numSlices - baseArraySlice);
+		levelStorage->push_back(cutLevel);
+	}
+
+	return tcu::Texture2DArrayView((int)levelStorage->size(), &levelStorage->front());
+}
+
+tcu::Texture3DView ImageSampleInstanceImages::getRef3DView (const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage)
+{
+	DE_ASSERT(levelStorage->empty());
+	DE_ASSERT(baseArraySlice == 0);
+	DE_UNREF(baseArraySlice);
+
+	const deUint32 numLevels = (deUint32)source.getNumLevels();
+
+	// cut pyramid from baseMipLevel
+	for (deUint32 level = baseMipLevel; level < numLevels; ++level)
+		levelStorage->push_back(source.getLevel(level));
+
+	return tcu::Texture3DView((int)levelStorage->size(), &levelStorage->front());
+}
+
+tcu::TextureCubeArrayView ImageSampleInstanceImages::getRefCubeView (const tcu::TextureLevelPyramid& source, deUint32 baseMipLevel, deUint32 baseArraySlice, std::vector<tcu::ConstPixelBufferAccess>* levelStorage)
+{
+	DE_ASSERT(levelStorage->empty());
+
+	const deUint32 numSlices = (deUint32)source.getLevel(0).getDepth() / 6;
+	const deUint32 numLevels = (deUint32)source.getNumLevels();
+
+	// cut pyramid from baseMipLevel
+	for (deUint32 level = baseMipLevel; level < numLevels; ++level)
+	{
+		// cut levels from baseArraySlice
+		const tcu::ConstPixelBufferAccess wholeLevel	= source.getLevel(level);
+		const tcu::ConstPixelBufferAccess cutLevel		= tcu::getSubregion(wholeLevel, 0, 0, baseArraySlice * 6, wholeLevel.getWidth(), wholeLevel.getHeight(), (numSlices - baseArraySlice) * 6);
+		levelStorage->push_back(cutLevel);
+	}
+
+	return tcu::TextureCubeArrayView((int)levelStorage->size(), &levelStorage->front());
+}
+
+class ImageSampleRenderInstance : public SingleCmdRenderInstance
+{
+public:
+													ImageSampleRenderInstance		(vkt::Context&			context,
+																					 bool					isPrimaryCmdBuf,
+																					 vk::VkDescriptorType	descriptorType,
+																					 vk::VkShaderStageFlags	stageFlags,
+																					 ShaderInputInterface	shaderInterface,
+																					 vk::VkImageViewType	viewType,
+																					 deUint32				baseMipLevel,
+																					 deUint32				baseArraySlice,
+																					 bool					isImmutable);
+
+private:
+	static vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout		(const vk::DeviceInterface&			vki,
+																					 vk::VkDevice						device,
+																					 vk::VkDescriptorType				descriptorType,
+																					 ShaderInputInterface				shaderInterface,
+																					 vk::VkShaderStageFlags				stageFlags,
+																					 const ImageSampleInstanceImages&	images);
+
+	static vk::Move<vk::VkPipelineLayout>			createPipelineLayout			(const vk::DeviceInterface&	vki,
+																					 vk::VkDevice				device,
+																					 vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+	static vk::Move<vk::VkDescriptorPool>			createDescriptorPool			(const vk::DeviceInterface&	vki,
+																					 vk::VkDevice				device,
+																					 vk::VkDescriptorType		descriptorType,
+																					 ShaderInputInterface		shaderInterface);
+
+	static vk::Move<vk::VkDescriptorSet>			createDescriptorSet				(const vk::DeviceInterface&			vki,
+																					 vk::VkDevice						device,
+																					 vk::VkDescriptorType				descriptorType,
+																					 ShaderInputInterface				shaderInterface,
+																					 vk::VkDescriptorSetLayout			layout,
+																					 vk::VkDescriptorPool				pool,
+																					 bool								isImmutable,
+																					 const ImageSampleInstanceImages&	images);
+
+	static void										writeSamplerDescriptorSet		(const vk::DeviceInterface&			vki,
+																					 vk::VkDevice						device,
+																					 ShaderInputInterface				shaderInterface,
+																					 bool								isImmutable,
+																					 const ImageSampleInstanceImages&	images,
+																					 vk::VkDescriptorSet				descriptorSet);
+
+	static void										writeImageSamplerDescriptorSet	(const vk::DeviceInterface&			vki,
+																					 vk::VkDevice						device,
+																					 ShaderInputInterface				shaderInterface,
+																					 bool								isImmutable,
+																					 const ImageSampleInstanceImages&	images,
+																					 vk::VkDescriptorSet				descriptorSet);
+
+	void											logTestPlan						(void) const;
+	vk::VkPipelineLayout							getPipelineLayout				(void) const;
+	void											writeDrawCmdBuffer				(vk::VkCommandBuffer cmd) const;
+	tcu::TestStatus									verifyResultImage				(const tcu::ConstPixelBufferAccess& result) const;
+
+	enum
+	{
+		RENDER_SIZE = 128,
+	};
+
+	const vk::VkDescriptorType						m_descriptorType;
+	const vk::VkShaderStageFlags					m_stageFlags;
+	const ShaderInputInterface						m_shaderInterface;
+	const vk::VkImageViewType						m_viewType;
+	const deUint32									m_baseMipLevel;
+	const deUint32									m_baseArraySlice;
+
+	const ImageSampleInstanceImages					m_images;
+	const vk::Unique<vk::VkDescriptorSetLayout>		m_descriptorSetLayout;
+	const vk::Unique<vk::VkPipelineLayout>			m_pipelineLayout;
+	const vk::Unique<vk::VkDescriptorPool>			m_descriptorPool;
+	const vk::Unique<vk::VkDescriptorSet>			m_descriptorSet;
+};
+
+ImageSampleRenderInstance::ImageSampleRenderInstance (vkt::Context&				context,
+													  bool						isPrimaryCmdBuf,
+													  vk::VkDescriptorType		descriptorType,
+													  vk::VkShaderStageFlags	stageFlags,
+													  ShaderInputInterface		shaderInterface,
+													  vk::VkImageViewType		viewType,
+													  deUint32					baseMipLevel,
+													  deUint32					baseArraySlice,
+													  bool						isImmutable)
+	: SingleCmdRenderInstance	(context, isPrimaryCmdBuf, tcu::UVec2(RENDER_SIZE, RENDER_SIZE))
+	, m_descriptorType			(descriptorType)
+	, m_stageFlags				(stageFlags)
+	, m_shaderInterface			(shaderInterface)
+	, m_viewType				(viewType)
+	, m_baseMipLevel			(baseMipLevel)
+	, m_baseArraySlice			(baseArraySlice)
+	, m_images					(m_vki, m_device, m_queueFamilyIndex, m_queue, m_allocator, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice, isImmutable)
+	, m_descriptorSetLayout		(createDescriptorSetLayout(m_vki, m_device, m_descriptorType, m_shaderInterface, m_stageFlags, m_images))
+	, m_pipelineLayout			(createPipelineLayout(m_vki, m_device, *m_descriptorSetLayout))
+	, m_descriptorPool			(createDescriptorPool(m_vki, m_device, m_descriptorType, m_shaderInterface))
+	, m_descriptorSet			(createDescriptorSet(m_vki, m_device, m_descriptorType, m_shaderInterface, *m_descriptorSetLayout, *m_descriptorPool, isImmutable, m_images))
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> ImageSampleRenderInstance::createDescriptorSetLayout (const vk::DeviceInterface&		vki,
+																						  vk::VkDevice						device,
+																						  vk::VkDescriptorType				descriptorType,
+																						  ShaderInputInterface				shaderInterface,
+																						  vk::VkShaderStageFlags			stageFlags,
+																						  const ImageSampleInstanceImages&	images)
+{
+	const vk::VkSampler				samplers[2] =
+	{
+		images.getSamplerA(),
+		images.getSamplerB(),
+	};
+
+	vk::DescriptorSetLayoutBuilder	builder;
+
+	// with samplers, separate texture at binding 0
+	if (descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, stageFlags);
+
+	// (combined)samplers follow
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleSamplerBinding(descriptorType, stageFlags, (images.isImmutable()) ? (&samplers[0]) : (DE_NULL));
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleSamplerBinding(descriptorType, stageFlags, (images.isImmutable()) ? (&samplers[0]) : (DE_NULL));
+			builder.addSingleSamplerBinding(descriptorType, stageFlags, (images.isImmutable()) ? (&samplers[1]) : (DE_NULL));
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArraySamplerBinding(descriptorType, 2u, stageFlags, (images.isImmutable()) ? (samplers) : (DE_NULL));
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return builder.build(vki, device);
+}
+
+vk::Move<vk::VkPipelineLayout> ImageSampleRenderInstance::createPipelineLayout (const vk::DeviceInterface&	vki,
+																				vk::VkDevice				device,
+																				vk::VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1,						// descriptorSetCount
+		&descriptorSetLayout,	// pSetLayouts
+		0u,						// pushConstantRangeCount
+		DE_NULL,				// pPushConstantRanges
+	};
+	return vk::createPipelineLayout(vki, device, &createInfo);
+}
+
+vk::Move<vk::VkDescriptorPool> ImageSampleRenderInstance::createDescriptorPool (const vk::DeviceInterface&	vki,
+																				vk::VkDevice				device,
+																				vk::VkDescriptorType		descriptorType,
+																				ShaderInputInterface		shaderInterface)
+{
+	vk::DescriptorPoolBuilder builder;
+
+	if (descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+	{
+		// separate samplers need image to sample
+		builder.addType(vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
+
+		// also need sample to use, indifferent of whether immutable or not
+		builder.addType(vk::VK_DESCRIPTOR_TYPE_SAMPLER, getInterfaceNumResources(shaderInterface));
+	}
+	else if (descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+	{
+		// combined image samplers
+		builder.addType(vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, getInterfaceNumResources(shaderInterface));
+	}
+	else
+		DE_FATAL("Impossible");
+
+	return builder.build(vki, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> ImageSampleRenderInstance::createDescriptorSet (const vk::DeviceInterface&		vki,
+																			  vk::VkDevice						device,
+																			  vk::VkDescriptorType				descriptorType,
+																			  ShaderInputInterface				shaderInterface,
+																			  vk::VkDescriptorSetLayout			layout,
+																			  vk::VkDescriptorPool				pool,
+																			  bool								isImmutable,
+																			  const ImageSampleInstanceImages&	images)
+{
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet	= allocateDescriptorSet(vki, device, &allocInfo);
+
+	if (descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		writeSamplerDescriptorSet(vki, device,  shaderInterface, isImmutable, images, *descriptorSet);
+	else if (descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+		writeImageSamplerDescriptorSet(vki, device, shaderInterface, isImmutable, images, *descriptorSet);
+	else
+		DE_FATAL("Impossible");
+
+	return descriptorSet;
+}
+
+void ImageSampleRenderInstance::writeSamplerDescriptorSet (const vk::DeviceInterface&		vki,
+														   vk::VkDevice						device,
+														   ShaderInputInterface				shaderInterface,
+														   bool								isImmutable,
+														   const ImageSampleInstanceImages&	images,
+														   vk::VkDescriptorSet				descriptorSet)
+{
+	const vk::VkDescriptorImageInfo		imageInfo			= makeDescriptorImageInfo(images.getImageViewA(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+	const vk::VkDescriptorImageInfo		samplersInfos[2]	=
+	{
+		makeDescriptorImageInfo(images.getSamplerA()),
+		makeDescriptorImageInfo(images.getSamplerB()),
+	};
+
+	vk::DescriptorSetUpdateBuilder		builder;
+
+	// stand alone texture
+	builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, &imageInfo);
+
+	// samplers
+	if (!isImmutable)
+	{
+		switch (shaderInterface)
+		{
+			case SHADER_INPUT_SINGLE_DESCRIPTOR:
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[0]);
+				break;
+
+			case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[0]);
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[1]);
+				break;
+
+			case SHADER_INPUT_DESCRIPTOR_ARRAY:
+				builder.writeArray(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, 2u, samplersInfos);
+				break;
+
+			default:
+				DE_FATAL("Impossible");
+		}
+	}
+
+	builder.update(vki, device);
+}
+
+void ImageSampleRenderInstance::writeImageSamplerDescriptorSet (const vk::DeviceInterface&			vki,
+																vk::VkDevice						device,
+																ShaderInputInterface				shaderInterface,
+																bool								isImmutable,
+																const ImageSampleInstanceImages&	images,
+																vk::VkDescriptorSet					descriptorSet)
+{
+	const vk::VkSampler					samplers[2]			=
+	{
+		(isImmutable) ? (0) : (images.getSamplerA()),
+		(isImmutable) ? (0) : (images.getSamplerB()),
+	};
+	const vk::VkDescriptorImageInfo		imageSamplers[2]	=
+	{
+		vk::makeDescriptorImageInfo(samplers[0], images.getImageViewA(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL),
+		vk::makeDescriptorImageInfo(samplers[1], images.getImageViewB(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL),
+	};
+
+	vk::DescriptorSetUpdateBuilder		builder;
+
+	// combined image samplers
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[0]);
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2u, imageSamplers);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(vki, device);
+}
+
+void ImageSampleRenderInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Rendering 2x2 grid.\n";
+
+	if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+	{
+		msg << "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+			<< " VK_DESCRIPTOR_TYPE_SAMPLER descriptor(s) and a single texture.\n";
+	}
+	else if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+	{
+		msg << "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+			<< " VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER descriptor(s).\n";
+	}
+	else
+		DE_FATAL("Impossible");
+
+	msg << "Image view type is " << vk::getImageViewTypeName(m_viewType) << "\n";
+
+	if (m_baseMipLevel)
+		msg << "Image view base mip level = " << m_baseMipLevel << "\n";
+	if (m_baseArraySlice)
+		msg << "Image view base array slice = " << m_baseArraySlice << "\n";
+
+	if (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)
+		msg << "Sampler mode is LINEAR, with WRAP\n";
+	else
+		msg << "Sampler 0 mode is LINEAR, with WRAP\nSampler 1 mode is NEAREST with CLAMP\n";
+
+	if (m_stageFlags == 0u)
+	{
+		msg << "Descriptors are not accessed in any shader stage.\n";
+	}
+	else
+	{
+		msg << "Color in each cell is fetched using the descriptor(s):\n";
+
+		for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+		{
+			msg << "Test sample " << resultNdx << ": sample at position " << m_images.getSamplePos(m_viewType, m_baseMipLevel, m_baseArraySlice, resultNdx);
+
+			if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+			{
+				const int srcResourceNdx = (resultNdx % 2); // ABAB source
+
+				if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+					msg << " using sampler " << srcResourceNdx;
+				else if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+					msg << " from combined image sampler " << srcResourceNdx;
+				else
+					DE_FATAL("Impossible");
+			}
+			msg << "\n";
+		}
+
+		msg << "Descriptors are accessed in {"
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0)					? (" vertex")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0)	? (" tess_control")		: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0)	? (" tess_evaluation")	: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0)				? (" geometry")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0)				? (" fragment")			: (""))
+			<< " } stages.";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+vk::VkPipelineLayout ImageSampleRenderInstance::getPipelineLayout (void) const
+{
+	return *m_pipelineLayout;
+}
+
+void ImageSampleRenderInstance::writeDrawCmdBuffer (vk::VkCommandBuffer cmd) const
+{
+	m_vki.cmdBindDescriptorSets(cmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, getPipelineLayout(), 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+	m_vki.cmdDraw(cmd, 6u * 4u, 1u, 0u, 0u); // render four quads (two separate triangles)
+}
+
+tcu::TestStatus ImageSampleRenderInstance::verifyResultImage (const tcu::ConstPixelBufferAccess& result) const
+{
+	const tcu::Vec4		green		(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4		yellow		(1.0f, 1.0f, 0.0f, 1.0f);
+	const bool			doFetch		= (m_stageFlags != 0u); // no active stages? Then don't fetch
+	const tcu::Vec4		sample0		= (!doFetch) ? (yellow)	: (m_images.fetchSampleValue(0));
+	const tcu::Vec4		sample1		= (!doFetch) ? (green)	: (m_images.fetchSampleValue(1));
+	const tcu::Vec4		sample2		= (!doFetch) ? (green)	: (m_images.fetchSampleValue(2));
+	const tcu::Vec4		sample3		= (!doFetch) ? (yellow)	: (m_images.fetchSampleValue(3));
+	const tcu::RGBA		threshold	= tcu::RGBA(8, 8, 8, 8); // source image is high-frequency so the threshold is quite large to tolerate sampling errors
+	tcu::Surface		reference	(m_targetSize.x(), m_targetSize.y());
+
+	drawQuadrantReferenceResult(reference.getAccess(), sample0, sample1, sample2, sample3);
+
+	if (!bilinearCompare(m_context.getTestContext().getLog(), "Compare", "Result comparison", reference.getAccess(), result, threshold, tcu::COMPARE_LOG_RESULT))
+		return tcu::TestStatus::fail("Image verification failed");
+	else
+		return tcu::TestStatus::pass("Pass");
+}
+
+class ImageSampleComputeInstance : public vkt::TestInstance
+{
+public:
+											ImageSampleComputeInstance		(vkt::Context&			context,
+																			 vk::VkDescriptorType	descriptorType,
+																			 ShaderInputInterface	shaderInterface,
+																			 vk::VkImageViewType	viewType,
+																			 deUint32				baseMipLevel,
+																			 deUint32				baseArraySlice,
+																			 bool					isImmutableSampler);
+
+private:
+	vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout		(void) const;
+	vk::Move<vk::VkDescriptorPool>			createDescriptorPool			(void) const;
+	vk::Move<vk::VkDescriptorSet>			createDescriptorSet				(vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const;
+	void									writeImageSamplerDescriptorSet	(vk::VkDescriptorSet descriptorSet) const;
+	void									writeSamplerDescriptorSet		(vk::VkDescriptorSet descriptorSet) const;
+
+	tcu::TestStatus							iterate							(void);
+	void									logTestPlan						(void) const;
+	tcu::TestStatus							testResourceAccess				(void);
+
+	const vk::VkDescriptorType				m_descriptorType;
+	const ShaderInputInterface				m_shaderInterface;
+	const vk::VkImageViewType				m_viewType;
+	const deUint32							m_baseMipLevel;
+	const deUint32							m_baseArraySlice;
+	const bool								m_isImmutableSampler;
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkQueue						m_queue;
+	const deUint32							m_queueFamilyIndex;
+	vk::Allocator&							m_allocator;
+
+	const ComputeInstanceResultBuffer		m_result;
+	const ImageSampleInstanceImages			m_images;
+};
+
+ImageSampleComputeInstance::ImageSampleComputeInstance (Context&				context,
+														vk::VkDescriptorType	descriptorType,
+														ShaderInputInterface	shaderInterface,
+														vk::VkImageViewType		viewType,
+														deUint32				baseMipLevel,
+														deUint32				baseArraySlice,
+														bool					isImmutableSampler)
+	: vkt::TestInstance		(context)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_viewType			(viewType)
+	, m_baseMipLevel		(baseMipLevel)
+	, m_baseArraySlice		(baseArraySlice)
+	, m_isImmutableSampler	(isImmutableSampler)
+	, m_vki					(context.getDeviceInterface())
+	, m_device				(context.getDevice())
+	, m_queue				(context.getUniversalQueue())
+	, m_queueFamilyIndex	(context.getUniversalQueueFamilyIndex())
+	, m_allocator			(context.getDefaultAllocator())
+	, m_result				(m_vki, m_device, m_allocator)
+	, m_images				(m_vki, m_device, m_queueFamilyIndex, m_queue, m_allocator, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice, isImmutableSampler)
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> ImageSampleComputeInstance::createDescriptorSetLayout (void) const
+{
+	const vk::VkSampler				samplers[2] =
+	{
+		m_images.getSamplerA(),
+		m_images.getSamplerB(),
+	};
+
+	vk::DescriptorSetLayoutBuilder	builder;
+
+	// result buffer
+	builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	// with samplers, separate texture at binding 0
+	if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	// (combined)samplers follow
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleSamplerBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT, (m_images.isImmutable()) ? (&samplers[0]) : (DE_NULL));
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleSamplerBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT, (m_images.isImmutable()) ? (&samplers[0]) : (DE_NULL));
+			builder.addSingleSamplerBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT, (m_images.isImmutable()) ? (&samplers[1]) : (DE_NULL));
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArraySamplerBinding(m_descriptorType, 2u, vk::VK_SHADER_STAGE_COMPUTE_BIT, (m_images.isImmutable()) ? (samplers) : (DE_NULL));
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	};
+
+	return builder.build(m_vki, m_device);
+}
+
+vk::Move<vk::VkDescriptorPool> ImageSampleComputeInstance::createDescriptorPool (void) const
+{
+	vk::DescriptorPoolBuilder builder;
+
+	builder.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+	builder.addType(m_descriptorType, getInterfaceNumResources(m_shaderInterface));
+
+	if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		builder.addType(vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
+
+	return builder.build(m_vki, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> ImageSampleComputeInstance::createDescriptorSet (vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const
+{
+	const vk::VkDescriptorSetAllocateInfo	allocInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet	= allocateDescriptorSet(m_vki, m_device, &allocInfo);
+
+	if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+		writeSamplerDescriptorSet(*descriptorSet);
+	else if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+		writeImageSamplerDescriptorSet(*descriptorSet);
+	else
+		DE_FATAL("Impossible");
+
+	return descriptorSet;
+}
+
+void ImageSampleComputeInstance::writeSamplerDescriptorSet (vk::VkDescriptorSet descriptorSet) const
+{
+	const vk::VkDescriptorBufferInfo	resultInfo			= vk::makeDescriptorBufferInfo(m_result.getBuffer(), 0u, (vk::VkDeviceSize)ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkDescriptorImageInfo		imageInfo			= makeDescriptorImageInfo(m_images.getImageViewA(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+	const vk::VkDescriptorImageInfo		samplersInfos[2]	=
+	{
+		makeDescriptorImageInfo(m_images.getSamplerA()),
+		makeDescriptorImageInfo(m_images.getSamplerB()),
+	};
+
+	vk::DescriptorSetUpdateBuilder		builder;
+
+	// result
+	builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// stand alone texture
+	builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, &imageInfo);
+
+	// samplers
+	if (!m_isImmutableSampler)
+	{
+		switch (m_shaderInterface)
+		{
+			case SHADER_INPUT_SINGLE_DESCRIPTOR:
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[0]);
+				break;
+
+			case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[0]);
+				builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(3u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, &samplersInfos[1]);
+				break;
+
+			case SHADER_INPUT_DESCRIPTOR_ARRAY:
+				builder.writeArray(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), vk::VK_DESCRIPTOR_TYPE_SAMPLER, 2u, samplersInfos);
+				break;
+
+			default:
+				DE_FATAL("Impossible");
+		}
+	}
+
+	builder.update(m_vki, m_device);
+}
+
+void ImageSampleComputeInstance::writeImageSamplerDescriptorSet (vk::VkDescriptorSet descriptorSet) const
+{
+	const vk::VkDescriptorBufferInfo	resultInfo			= vk::makeDescriptorBufferInfo(m_result.getBuffer(), 0u, (vk::VkDeviceSize)ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkSampler					samplers[2]			=
+	{
+		(m_isImmutableSampler) ? (0) : (m_images.getSamplerA()),
+		(m_isImmutableSampler) ? (0) : (m_images.getSamplerB()),
+	};
+	const vk::VkDescriptorImageInfo		imageSamplers[2]	=
+	{
+		makeDescriptorImageInfo(samplers[0], m_images.getImageViewA(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL),
+		makeDescriptorImageInfo(samplers[1], m_images.getImageViewB(), vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL),
+	};
+
+	vk::DescriptorSetUpdateBuilder		builder;
+
+	// result
+	builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// combined image samplers
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[0]);
+			builder.writeSingle(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &imageSamplers[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 2u, imageSamplers);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(m_vki, m_device);
+}
+
+tcu::TestStatus ImageSampleComputeInstance::iterate (void)
+{
+	logTestPlan();
+	return testResourceAccess();
+}
+
+void ImageSampleComputeInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Accessing resource in a compute program.\n";
+
+	if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+	{
+		msg << "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+			<< " VK_DESCRIPTOR_TYPE_SAMPLER descriptor(s) and a single texture.\n";
+	}
+	else if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+	{
+		msg << "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+			<< " VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER descriptor(s).\n";
+	}
+	else
+		DE_FATAL("Impossible");
+
+	msg << "Image view type is " << vk::getImageViewTypeName(m_viewType) << "\n";
+
+	if (m_baseMipLevel)
+		msg << "Image view base mip level = " << m_baseMipLevel << "\n";
+	if (m_baseArraySlice)
+		msg << "Image view base array slice = " << m_baseArraySlice << "\n";
+
+	if (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)
+		msg << "Sampler mode is LINEAR, with WRAP\n";
+	else
+		msg << "Sampler 0 mode is LINEAR, with WRAP\nSampler 1 mode is NEAREST with CLAMP\n";
+
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		msg << "Test sample " << resultNdx << ": sample at position " << m_images.getSamplePos(m_viewType, m_baseMipLevel, m_baseArraySlice, resultNdx);
+
+		if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+		{
+			const int srcResourceNdx = (resultNdx % 2); // ABAB source
+
+			if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+				msg << " using sampler " << srcResourceNdx;
+			else if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+				msg << " from combined image sampler " << srcResourceNdx;
+			else
+				DE_FATAL("Impossible");
+		}
+		msg << "\n";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+tcu::TestStatus ImageSampleComputeInstance::testResourceAccess (void)
+{
+	const vk::Unique<vk::VkDescriptorSetLayout>		descriptorSetLayout	(createDescriptorSetLayout());
+	const vk::Unique<vk::VkDescriptorPool>			descriptorPool		(createDescriptorPool());
+	const vk::Unique<vk::VkDescriptorSet>			descriptorSet		(createDescriptorSet(*descriptorPool, *descriptorSetLayout));
+	const ComputePipeline							pipeline			(m_vki, m_device, m_context.getBinaryCollection(), 1, &descriptorSetLayout.get());
+
+	const vk::VkDescriptorSet						descriptorSets[]	= { *descriptorSet };
+	const int										numDescriptorSets	= DE_LENGTH_OF_ARRAY(descriptorSets);
+	const deUint32* const							dynamicOffsets		= DE_NULL;
+	const int										numDynamicOffsets	= 0;
+	const vk::VkBufferMemoryBarrier* const			preBarriers			= DE_NULL;
+	const int										numPreBarriers		= 0;
+	const vk::VkBufferMemoryBarrier* const			postBarriers		= m_result.getResultReadBarrier();
+	const int										numPostBarriers		= 1;
+
+	const ComputeCommand							compute				(m_vki,
+																		 m_device,
+																		 pipeline.getPipeline(),
+																		 pipeline.getPipelineLayout(),
+																		 tcu::UVec3(4, 1, 1),
+																		 numDescriptorSets,	descriptorSets,
+																		 numDynamicOffsets,	dynamicOffsets,
+																		 numPreBarriers,	preBarriers,
+																		 numPostBarriers,	postBarriers);
+
+	tcu::Vec4										results[4];
+	bool											anyResultSet		= false;
+	bool											allResultsOk		= true;
+
+	compute.submitAndWait(m_queueFamilyIndex, m_queue);
+	m_result.readResultContentsTo(&results);
+
+	// verify
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		const tcu::Vec4	result				= results[resultNdx];
+		const tcu::Vec4	reference			= m_images.fetchSampleValue(resultNdx);
+
+		// source image is high-frequency so the threshold is quite large to tolerate sampling errors
+		const tcu::Vec4	samplingThreshold	= tcu::Vec4(8.0f / 255.0f);
+
+		if (result != tcu::Vec4(-1.0f))
+			anyResultSet = true;
+
+		if (tcu::boolAny(tcu::greaterThan(tcu::abs(result - reference), samplingThreshold)))
+		{
+			allResultsOk = false;
+
+			m_context.getTestContext().getLog()
+				<< tcu::TestLog::Message
+				<< "Test sample " << resultNdx << ":\n"
+				<< "\tSampling at " << m_images.getSamplePos(m_viewType, m_baseMipLevel, m_baseArraySlice, resultNdx) << "\n"
+				<< "\tError expected " << reference << ", got " << result
+				<< tcu::TestLog::EndMessage;
+		}
+	}
+
+	// read back and verify
+	if (allResultsOk)
+		return tcu::TestStatus::pass("Pass");
+	else if (anyResultSet)
+		return tcu::TestStatus::fail("Invalid result values");
+	else
+	{
+		m_context.getTestContext().getLog()
+			<< tcu::TestLog::Message
+			<< "Result buffer was not written to."
+			<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Result buffer was not written to");
+	}
+}
+
+class ImageDescriptorCase : public QuadrantRendederCase
+{
+public:
+	enum
+	{
+		FLAG_BASE_MIP	= (1u << 1u),
+		FLAG_BASE_SLICE	= (1u << 2u),
+	};
+	// enum continues where resource flags ends
+	DE_STATIC_ASSERT((deUint32)FLAG_BASE_MIP == (deUint32)RESOURCE_FLAG_LAST);
+
+								ImageDescriptorCase			(tcu::TestContext&		testCtx,
+															 const char*			name,
+															 const char*			description,
+															 bool					isPrimaryCmdBuf,
+															 vk::VkDescriptorType	descriptorType,
+															 vk::VkShaderStageFlags	exitingStages,
+															 vk::VkShaderStageFlags	activeStages,
+															 ShaderInputInterface	shaderInterface,
+															 vk::VkImageViewType	viewType,
+															 deUint32				flags);
+
+private:
+	std::string					genExtensionDeclarations	(vk::VkShaderStageFlagBits stage) const;
+	std::string					genResourceDeclarations		(vk::VkShaderStageFlagBits stage, int numUsedBindings) const;
+	std::string					genFetchCoordStr			(int fetchPosNdx) const;
+	std::string					genSampleCoordStr			(int samplePosNdx) const;
+	std::string					genResourceAccessSource		(vk::VkShaderStageFlagBits stage) const;
+	std::string					genNoAccessSource			(void) const;
+
+	vkt::TestInstance*			createInstance				(vkt::Context& context) const;
+
+private:
+	const bool					m_isPrimaryCmdBuf;
+	const vk::VkDescriptorType	m_descriptorType;
+	const ShaderInputInterface	m_shaderInterface;
+	const vk::VkImageViewType	m_viewType;
+	const deUint32				m_baseMipLevel;
+	const deUint32				m_baseArraySlice;
+	const bool					m_isImmutableSampler;
+};
+
+ImageDescriptorCase::ImageDescriptorCase (tcu::TestContext&			testCtx,
+										  const char*				name,
+										  const char*				description,
+										  bool						isPrimaryCmdBuf,
+										  vk::VkDescriptorType		descriptorType,
+										  vk::VkShaderStageFlags	exitingStages,
+										  vk::VkShaderStageFlags	activeStages,
+										  ShaderInputInterface		shaderInterface,
+										  vk::VkImageViewType		viewType,
+										  deUint32					flags)
+	: QuadrantRendederCase	(testCtx, name, description,
+							 // \note 1D textures are not supported in ES
+							 (viewType == vk::VK_IMAGE_VIEW_TYPE_1D || viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY) ? glu::GLSL_VERSION_440 : glu::GLSL_VERSION_310_ES,
+							 exitingStages, activeStages)
+	, m_isPrimaryCmdBuf		(isPrimaryCmdBuf)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_viewType			(viewType)
+	, m_baseMipLevel		(((flags & FLAG_BASE_MIP) != 0) ? (1u) : (0u))
+	, m_baseArraySlice		(((flags & FLAG_BASE_SLICE) != 0) ? (1u) : (0u))
+	, m_isImmutableSampler	((flags & RESOURCE_FLAG_IMMUTABLE_SAMPLER) != 0)
+{
+}
+
+std::string ImageDescriptorCase::genExtensionDeclarations (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+
+	if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+		return "#extension GL_OES_texture_cube_map_array : require\n";
+	else
+		return "";
+}
+
+std::string ImageDescriptorCase::genResourceDeclarations (vk::VkShaderStageFlagBits stage, int numUsedBindings) const
+{
+	DE_UNREF(stage);
+
+	// Vulkan-style resources are arrays implicitly, OpenGL-style are not
+	const std::string	dimensionBase	= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? ("1D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? ("2D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_3D)															? ("3D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? ("Cube")
+										: (DE_NULL);
+	const std::string	dimensionArray	= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)		? ("1DArray")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D || m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)		? ("2DArray")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_3D)															? ("3D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE || m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? ("CubeArray")
+										: (DE_NULL);
+	const std::string	dimension		= isImageViewTypeArray(m_viewType) ? dimensionArray : dimensionBase;
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+		{
+			switch (m_descriptorType)
+			{
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimension + " u_separateTexture;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ") uniform highp sampler u_separateSampler;\n";
+				case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp sampler" + dimension + " u_combinedTextureSampler;\n";
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimensionBase + " u_separateTexture;\n";
+				case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ", rgba8) readonly uniform highp image" + dimension + " u_image;\n";
+				default:
+					DE_FATAL("invalid descriptor");
+					return "";
+			}
+		}
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			switch (m_descriptorType)
+			{
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimension + " u_separateTexture;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ") uniform highp sampler u_separateSamplerA;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+2) + ") uniform highp sampler u_separateSamplerB;\n";
+				case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp sampler" + dimension + " u_combinedTextureSamplerA;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ") uniform highp sampler" + dimension + " u_combinedTextureSamplerB;\n";
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimensionBase + " u_separateTextureA;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ") uniform highp texture" + dimensionBase + " u_separateTextureB;\n";
+				case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ", rgba8) readonly uniform highp image" + dimension + " u_imageA;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ", rgba8) readonly uniform highp image" + dimension + " u_imageB;\n";
+				default:
+					DE_FATAL("invalid descriptor");
+					return "";
+			}
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			switch (m_descriptorType)
+			{
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimension + " u_separateTexture;\n"
+						   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + ") uniform highp sampler u_separateSampler[2];\n";
+				case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp sampler" + dimension + " u_combinedTextureSampler[2];\n";
+				case vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ") uniform highp texture" + dimensionBase + " u_separateTexture[2];\n";
+				case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+					return "layout(set = 0, binding = " + de::toString(numUsedBindings) + ", rgba8) readonly uniform highp image" + dimension + " u_image[2];\n";
+				default:
+					DE_FATAL("invalid descriptor");
+					return "";
+			}
+
+		default:
+			DE_FATAL("Impossible");
+			return "";
+	}
+}
+
+std::string ImageDescriptorCase::genFetchCoordStr (int fetchPosNdx) const
+{
+	DE_ASSERT(m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE || m_descriptorType == vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE);
+	const tcu::IVec3 fetchPos = ImageFetchInstanceImages::getFetchPos(m_viewType, m_baseMipLevel, m_baseArraySlice, fetchPosNdx);
+
+	if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D)
+	{
+		return de::toString(fetchPos.x());
+	}
+	else if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY || m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D)
+	{
+		std::ostringstream buf;
+		buf << "ivec2(" << fetchPos.x() << ", " << fetchPos.y() << ")";
+		return buf.str();
+	}
+	else
+	{
+		std::ostringstream buf;
+		buf << "ivec3(" << fetchPos.x() << ", " << fetchPos.y() << ", " << fetchPos.z() << ")";
+		return buf.str();
+	}
+}
+
+std::string ImageDescriptorCase::genSampleCoordStr (int samplePosNdx) const
+{
+	DE_ASSERT(m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER || m_descriptorType == vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+	const tcu::Vec4 fetchPos = ImageSampleInstanceImages::getSamplePos(m_viewType, m_baseMipLevel, m_baseArraySlice, samplePosNdx);
+
+	if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D)
+	{
+		std::ostringstream buf;
+		buf << "float(" << fetchPos.x() << ")";
+		return buf.str();
+	}
+	else if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY || m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D)
+	{
+		std::ostringstream buf;
+		buf << "vec2(float(" << fetchPos.x() << "), float(" << fetchPos.y() << "))";
+		return buf.str();
+	}
+	else if (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+	{
+		std::ostringstream buf;
+		buf << "vec4(float(" << fetchPos.x() << "), float(" << fetchPos.y() << "), float(" << fetchPos.z() << "), float(" << fetchPos.w() << "))";
+		return buf.str();
+	}
+	else
+	{
+		std::ostringstream buf;
+		buf << "vec3(float(" << fetchPos.x() << "), float(" << fetchPos.y() << "), float(" << fetchPos.z() << "))";
+		return buf.str();
+	}
+}
+
+std::string ImageDescriptorCase::genResourceAccessSource (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+
+	const char* const	dimension		= (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D)			? ("1D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY)	? ("1DArray")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D)			? ("2D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY)	? ("2DArray")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_3D)			? ("3D")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE)		? ("Cube")
+										: (m_viewType == vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)	? ("CubeArray")
+										: (DE_NULL);
+	const char* const	accessPostfixA	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? ("")
+										: (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? ("A")
+										: (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? ("[0]")
+										: (DE_NULL);
+	const char* const	accessPostfixB	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? ("")
+										: (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? ("B")
+										: (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? ("[1]")
+										: (DE_NULL);
+
+	switch (m_descriptorType)
+	{
+		case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+		case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+		{
+			const std::string	coodStr[4]	=
+			{
+				genSampleCoordStr(0),
+				genSampleCoordStr(1),
+				genSampleCoordStr(2),
+				genSampleCoordStr(3),
+			};
+			std::ostringstream	buf;
+
+			if (m_descriptorType == vk::VK_DESCRIPTOR_TYPE_SAMPLER)
+			{
+				buf << "	if (quadrant_id == 0)\n"
+					<< "		result_color = textureLod(sampler" << dimension << "(u_separateTexture, u_separateSampler" << accessPostfixA << "), " << coodStr[0] << ", 0.0);\n"
+					<< "	else if (quadrant_id == 1)\n"
+					<< "		result_color = textureLod(sampler" << dimension << "(u_separateTexture, u_separateSampler" << accessPostfixB << "), " << coodStr[1] << ", 0.0);\n"
+					<< "	else if (quadrant_id == 2)\n"
+					<< "		result_color = textureLod(sampler" << dimension << "(u_separateTexture, u_separateSampler" << accessPostfixA << "), " << coodStr[2] << ", 0.0);\n"
+					<< "	else\n"
+					<< "		result_color = textureLod(sampler" << dimension << "(u_separateTexture, u_separateSampler" << accessPostfixB << "), " << coodStr[3] << ", 0.0);\n";
+			}
+			else
+			{
+				buf << "	if (quadrant_id == 0)\n"
+					<< "		result_color = textureLod(u_combinedTextureSampler" << accessPostfixA << ", " << coodStr[0] << ", 0.0);\n"
+					<< "	else if (quadrant_id == 1)\n"
+					<< "		result_color = textureLod(u_combinedTextureSampler" << accessPostfixB << ", " << coodStr[1] << ", 0.0);\n"
+					<< "	else if (quadrant_id == 2)\n"
+					<< "		result_color = textureLod(u_combinedTextureSampler" << accessPostfixA << ", " << coodStr[2] << ", 0.0);\n"
+					<< "	else\n"
+					<< "		result_color = textureLod(u_combinedTextureSampler" << accessPostfixB << ", " << coodStr[3] << ", 0.0);\n";
+			}
+
+			return buf.str();
+		}
+
+		case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+		{
+			const std::string	coodStr[4]	=
+			{
+				genFetchCoordStr(0),
+				genFetchCoordStr(1),
+				genFetchCoordStr(2),
+				genFetchCoordStr(3),
+			};
+			std::ostringstream	buf;
+
+			buf << "	if (quadrant_id == 0)\n"
+				<< "		result_color = imageLoad(u_image" << accessPostfixA << ", " << coodStr[0] << ");\n"
+				<< "	else if (quadrant_id == 1)\n"
+				<< "		result_color = imageLoad(u_image" << accessPostfixB << ", " << coodStr[1] << ");\n"
+				<< "	else if (quadrant_id == 2)\n"
+				<< "		result_color = imageLoad(u_image" << accessPostfixA << ", " << coodStr[2] << ");\n"
+				<< "	else\n"
+				<< "		result_color = imageLoad(u_image" << accessPostfixB << ", " << coodStr[3] << ");\n";
+
+			return buf.str();
+		}
+
+		default:
+			DE_FATAL("invalid descriptor");
+			return "";
+	}
+}
+
+std::string ImageDescriptorCase::genNoAccessSource (void) const
+{
+	return "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+			"		result_color = vec4(0.0, 1.0, 0.0, 1.0);\n"
+			"	else\n"
+			"		result_color = vec4(1.0, 1.0, 0.0, 1.0);\n";
+}
+
+vkt::TestInstance* ImageDescriptorCase::createInstance (vkt::Context& context) const
+{
+	switch (m_descriptorType)
+	{
+		case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+		case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+			if (m_exitingStages == vk::VK_SHADER_STAGE_COMPUTE_BIT)
+			{
+				DE_ASSERT(m_isPrimaryCmdBuf);
+				return new ImageSampleComputeInstance(context, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice, m_isImmutableSampler);
+			}
+			else
+				return new ImageSampleRenderInstance(context, m_isPrimaryCmdBuf, m_descriptorType, m_activeStages, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice, m_isImmutableSampler);
+
+		case vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
+		case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+			if (m_exitingStages == vk::VK_SHADER_STAGE_COMPUTE_BIT)
+			{
+				DE_ASSERT(m_isPrimaryCmdBuf);
+				return new ImageFetchComputeInstance(context, m_descriptorType, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice);
+			}
+			else
+				return new ImageFetchRenderInstance(context, m_isPrimaryCmdBuf, m_descriptorType, m_activeStages, m_shaderInterface, m_viewType, m_baseMipLevel, m_baseArraySlice);
+
+		default:
+			DE_FATAL("Impossible");
+			return DE_NULL;
+	}
+}
+
+class TexelBufferInstanceBuffers
+{
+public:
+											TexelBufferInstanceBuffers	(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator,
+																		 vk::VkDescriptorType			descriptorType,
+																		 int							numTexelBuffers,
+																		 bool							hasViewOffset);
+
+private:
+	static vk::Move<vk::VkBuffer>			createBuffer				(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 vk::Allocator&					allocator,
+																		 vk::VkDescriptorType			descriptorType,
+																		 de::MovePtr<vk::Allocation>	*outAllocation);
+
+	static vk::Move<vk::VkBufferView>		createBufferView			(const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 const tcu::TextureFormat&		textureFormat,
+																		 deUint32						offset,
+																		 vk::VkBuffer					buffer);
+
+	static vk::VkBufferMemoryBarrier		createBarrier				(vk::VkDescriptorType descriptorType, vk::VkBuffer buffer);
+
+	void									populateSourceBuffer		(const tcu::PixelBufferAccess& access);
+	void									uploadData					(const vk::DeviceInterface& vki, vk::VkDevice device, const vk::Allocation& memory, const de::ArrayBuffer<deUint8>& data);
+
+public:
+	static int								getFetchPos					(int fetchPosNdx);
+	tcu::Vec4								fetchTexelValue				(int fetchPosNdx) const;
+
+	inline int								getNumTexelBuffers			(void) const { return m_numTexelBuffers;	}
+	const tcu::TextureFormat&				getTextureFormat			(void) const { return m_imageFormat;		}
+	inline vk::VkBufferView					getBufferViewA				(void) const { return *m_bufferViewA;		}
+	inline vk::VkBufferView					getBufferViewB				(void) const { return *m_bufferViewB;		}
+	inline const vk::VkBufferMemoryBarrier*	getBufferInitBarriers		(void) const { return m_bufferBarriers;		}
+
+private:
+	enum
+	{
+		BUFFER_SIZE			= 512,
+		VIEW_OFFSET_VALUE	= 256,
+		VIEW_DATA_SIZE		= 256,	//!< size in bytes
+		VIEW_WIDTH			= 64,	//!< size in pixels
+	};
+	enum
+	{
+		// some arbitrary points
+		SAMPLE_POINT_0 = 6,
+		SAMPLE_POINT_1 = 51,
+		SAMPLE_POINT_2 = 42,
+		SAMPLE_POINT_3 = 25,
+	};
+
+	const deUint32							m_numTexelBuffers;
+	const tcu::TextureFormat				m_imageFormat;
+	const deUint32							m_viewOffset;
+
+	de::ArrayBuffer<deUint8>				m_sourceBufferA;
+	de::ArrayBuffer<deUint8>				m_sourceBufferB;
+	const tcu::ConstPixelBufferAccess		m_sourceViewA;
+	const tcu::ConstPixelBufferAccess		m_sourceViewB;
+
+	de::MovePtr<vk::Allocation>				m_bufferMemoryA;
+	de::MovePtr<vk::Allocation>				m_bufferMemoryB;
+	const vk::Unique<vk::VkBuffer>			m_bufferA;
+	const vk::Unique<vk::VkBuffer>			m_bufferB;
+	const vk::Unique<vk::VkBufferView>		m_bufferViewA;
+	const vk::Unique<vk::VkBufferView>		m_bufferViewB;
+	vk::VkBufferMemoryBarrier				m_bufferBarriers[2];
+};
+
+TexelBufferInstanceBuffers::TexelBufferInstanceBuffers (const vk::DeviceInterface&		vki,
+														vk::VkDevice					device,
+														vk::Allocator&					allocator,
+														vk::VkDescriptorType			descriptorType,
+														int								numTexelBuffers,
+														bool							hasViewOffset)
+	: m_numTexelBuffers	(numTexelBuffers)
+	, m_imageFormat		(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)
+	, m_viewOffset		((hasViewOffset) ? ((deUint32)VIEW_OFFSET_VALUE) : (0u))
+	, m_sourceBufferA	(BUFFER_SIZE)
+	, m_sourceBufferB	((numTexelBuffers == 1)
+							? (0u)
+							: ((size_t)BUFFER_SIZE))
+	, m_sourceViewA		(m_imageFormat, tcu::IVec3(VIEW_WIDTH, 1, 1), m_sourceBufferA.getElementPtr(m_viewOffset))
+	, m_sourceViewB		(m_imageFormat, tcu::IVec3(VIEW_WIDTH, 1, 1), m_sourceBufferB.getElementPtr(m_viewOffset))
+	, m_bufferMemoryA	(DE_NULL)
+	, m_bufferMemoryB	(DE_NULL)
+	, m_bufferA			(createBuffer(vki, device, allocator, descriptorType, &m_bufferMemoryA))
+	, m_bufferB			((numTexelBuffers == 1)
+							? vk::Move<vk::VkBuffer>()
+							: createBuffer(vki, device, allocator, descriptorType, &m_bufferMemoryB))
+	, m_bufferViewA		(createBufferView(vki, device, m_imageFormat, m_viewOffset, *m_bufferA))
+	, m_bufferViewB		((numTexelBuffers == 1)
+							? vk::Move<vk::VkBufferView>()
+							: createBufferView(vki, device, m_imageFormat, m_viewOffset, *m_bufferB))
+{
+	DE_ASSERT(numTexelBuffers == 1 || numTexelBuffers == 2);
+	DE_ASSERT(VIEW_WIDTH * m_imageFormat.getPixelSize() == VIEW_DATA_SIZE);
+	DE_ASSERT(BUFFER_SIZE % m_imageFormat.getPixelSize() == 0);
+
+	// specify and upload
+
+	populateSourceBuffer(tcu::PixelBufferAccess(m_imageFormat, tcu::IVec3(BUFFER_SIZE / m_imageFormat.getPixelSize(), 1, 1), m_sourceBufferA.getPtr()));
+	uploadData(vki, device, *m_bufferMemoryA, m_sourceBufferA);
+
+	if (numTexelBuffers == 2)
+	{
+		populateSourceBuffer(tcu::PixelBufferAccess(m_imageFormat, tcu::IVec3(BUFFER_SIZE / m_imageFormat.getPixelSize(), 1, 1), m_sourceBufferB.getPtr()));
+		uploadData(vki, device, *m_bufferMemoryB, m_sourceBufferB);
+	}
+
+	m_bufferBarriers[0] = createBarrier(descriptorType, *m_bufferA);
+	m_bufferBarriers[1] = createBarrier(descriptorType, *m_bufferB);
+}
+
+vk::Move<vk::VkBuffer> TexelBufferInstanceBuffers::createBuffer (const vk::DeviceInterface&		vki,
+																 vk::VkDevice					device,
+																 vk::Allocator&					allocator,
+																 vk::VkDescriptorType			descriptorType,
+																 de::MovePtr<vk::Allocation>	*outAllocation)
+{
+	const vk::VkBufferUsageFlags	usage		= (isUniformDescriptorType(descriptorType)) ? (vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) : (vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+	const vk::VkBufferCreateInfo	createInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		0u,									// flags
+		(vk::VkDeviceSize)BUFFER_SIZE,		// size
+		usage,								// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,		// sharingMode
+		0u,									// queueFamilyCount
+		DE_NULL,							// pQueueFamilyIndices
+	};
+	vk::Move<vk::VkBuffer>			buffer		(vk::createBuffer(vki, device, &createInfo));
+	de::MovePtr<vk::Allocation>		allocation	(allocateAndBindObjectMemory(vki, device, allocator, *buffer, vk::MemoryRequirement::HostVisible));
+
+	*outAllocation = allocation;
+	return buffer;
+}
+
+vk::Move<vk::VkBufferView> TexelBufferInstanceBuffers::createBufferView (const vk::DeviceInterface&		vki,
+																		 vk::VkDevice					device,
+																		 const tcu::TextureFormat&		textureFormat,
+																		 deUint32						offset,
+																		 vk::VkBuffer					buffer)
+{
+	const vk::VkBufferViewCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+		DE_NULL,
+		(vk::VkBufferViewCreateFlags)0,
+		buffer,									// buffer
+		vk::mapTextureFormat(textureFormat),	// format
+		(vk::VkDeviceSize)offset,				// offset
+		(vk::VkDeviceSize)VIEW_DATA_SIZE		// range
+	};
+	return vk::createBufferView(vki, device, &createInfo);
+}
+
+vk::VkBufferMemoryBarrier TexelBufferInstanceBuffers::createBarrier (vk::VkDescriptorType descriptorType, vk::VkBuffer buffer)
+{
+	const vk::VkAccessFlags			inputBit	= (isUniformDescriptorType(descriptorType)) ? (vk::VK_ACCESS_UNIFORM_READ_BIT) : (vk::VK_ACCESS_SHADER_READ_BIT);
+	const vk::VkBufferMemoryBarrier	barrier		=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+		vk::VK_ACCESS_HOST_WRITE_BIT,			// outputMask
+		inputBit,								// inputMask
+		vk::VK_QUEUE_FAMILY_IGNORED,			// srcQueueFamilyIndex
+		vk::VK_QUEUE_FAMILY_IGNORED,			// destQueueFamilyIndex
+		buffer	,								// buffer
+		0u,										// offset
+		(vk::VkDeviceSize)BUFFER_SIZE			// size
+	};
+	return barrier;
+}
+
+void TexelBufferInstanceBuffers::populateSourceBuffer (const tcu::PixelBufferAccess& access)
+{
+	DE_ASSERT(access.getHeight() == 1);
+	DE_ASSERT(access.getDepth() == 1);
+
+	const deInt32 width = access.getWidth();
+
+	for (int x = 0; x < width; ++x)
+	{
+		const int			red		= 255 * x / width;												//!< gradient from 0 -> max (detects large offset errors)
+		const int			green	= ((x % 2 == 0) ? (127) : (0)) + ((x % 4 < 3) ? (128) : (0));	//!< 3-level M pattern (detects small offset errors)
+		const int			blue	= 16 * (x % 16);												//!< 16-long triangle wave
+
+		DE_ASSERT(de::inRange(red, 0, 255));
+		DE_ASSERT(de::inRange(green, 0, 255));
+		DE_ASSERT(de::inRange(blue, 0, 255));
+
+		access.setPixel(tcu::IVec4(red, green, blue, 255), x, 0, 0);
+	}
+}
+
+void TexelBufferInstanceBuffers::uploadData (const vk::DeviceInterface& vki, vk::VkDevice device, const vk::Allocation& memory, const de::ArrayBuffer<deUint8>& data)
+{
+	deMemcpy(memory.getHostPtr(), data.getPtr(), data.size());
+	flushMappedMemoryRange(vki, device, memory.getMemory(), memory.getOffset(), data.size());
+}
+
+int TexelBufferInstanceBuffers::getFetchPos (int fetchPosNdx)
+{
+	static const int fetchPositions[4] =
+	{
+		SAMPLE_POINT_0,
+		SAMPLE_POINT_1,
+		SAMPLE_POINT_2,
+		SAMPLE_POINT_3,
+	};
+	return de::getSizedArrayElement<4>(fetchPositions, fetchPosNdx);
+}
+
+tcu::Vec4 TexelBufferInstanceBuffers::fetchTexelValue (int fetchPosNdx) const
+{
+	// source order is ABAB
+	const tcu::ConstPixelBufferAccess&	texelSrcA	= m_sourceViewA;
+	const tcu::ConstPixelBufferAccess&	texelSrcB	= (m_numTexelBuffers == 1) ? (m_sourceViewA) : (m_sourceViewB);
+	const tcu::ConstPixelBufferAccess&	texelSrc	= ((fetchPosNdx % 2) == 0) ? (texelSrcA) : (texelSrcB);
+
+	return texelSrc.getPixel(getFetchPos(fetchPosNdx), 0, 0);
+}
+
+class TexelBufferRenderInstance : public SingleCmdRenderInstance
+{
+public:
+													TexelBufferRenderInstance	(vkt::Context&			context,
+																				 bool					isPrimaryCmdBuf,
+																				 vk::VkDescriptorType	descriptorType,
+																				 vk::VkShaderStageFlags	stageFlags,
+																				 ShaderInputInterface	shaderInterface,
+																				 bool					nonzeroViewOffset);
+
+private:
+	static vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkShaderStageFlags		stageFlags);
+
+	static vk::Move<vk::VkPipelineLayout>			createPipelineLayout		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+	static vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface);
+
+	static vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(const vk::DeviceInterface&	vki,
+																				 vk::VkDevice				device,
+																				 vk::VkDescriptorType		descriptorType,
+																				 ShaderInputInterface		shaderInterface,
+																				 vk::VkDescriptorSetLayout	layout,
+																				 vk::VkDescriptorPool		pool,
+																				 vk::VkBufferView			viewA,
+																				 vk::VkBufferView			viewB);
+
+	void											logTestPlan					(void) const;
+	vk::VkPipelineLayout							getPipelineLayout			(void) const;
+	void											writeDrawCmdBuffer			(vk::VkCommandBuffer cmd) const;
+	tcu::TestStatus									verifyResultImage			(const tcu::ConstPixelBufferAccess& result) const;
+
+	enum
+	{
+		RENDER_SIZE = 128,
+	};
+
+	const vk::VkDescriptorType						m_descriptorType;
+	const vk::VkShaderStageFlags					m_stageFlags;
+	const ShaderInputInterface						m_shaderInterface;
+	const bool										m_nonzeroViewOffset;
+
+	const vk::Unique<vk::VkDescriptorSetLayout>		m_descriptorSetLayout;
+	const vk::Unique<vk::VkPipelineLayout>			m_pipelineLayout;
+	const TexelBufferInstanceBuffers				m_texelBuffers;
+	const vk::Unique<vk::VkDescriptorPool>			m_descriptorPool;
+	const vk::Unique<vk::VkDescriptorSet>			m_descriptorSet;
+};
+
+TexelBufferRenderInstance::TexelBufferRenderInstance (vkt::Context&				context,
+													  bool						isPrimaryCmdBuf,
+													  vk::VkDescriptorType		descriptorType,
+													  vk::VkShaderStageFlags	stageFlags,
+													  ShaderInputInterface		shaderInterface,
+													  bool						nonzeroViewOffset)
+	: SingleCmdRenderInstance	(context, isPrimaryCmdBuf, tcu::UVec2(RENDER_SIZE, RENDER_SIZE))
+	, m_descriptorType			(descriptorType)
+	, m_stageFlags				(stageFlags)
+	, m_shaderInterface			(shaderInterface)
+	, m_nonzeroViewOffset		(nonzeroViewOffset)
+	, m_descriptorSetLayout		(createDescriptorSetLayout(m_vki, m_device, m_descriptorType, m_shaderInterface, m_stageFlags))
+	, m_pipelineLayout			(createPipelineLayout(m_vki, m_device, *m_descriptorSetLayout))
+	, m_texelBuffers			(m_vki, m_device, m_allocator, m_descriptorType, getInterfaceNumResources(m_shaderInterface), m_nonzeroViewOffset)
+	, m_descriptorPool			(createDescriptorPool(m_vki, m_device, m_descriptorType, m_shaderInterface))
+	, m_descriptorSet			(createDescriptorSet(m_vki, m_device, m_descriptorType, m_shaderInterface, *m_descriptorSetLayout, *m_descriptorPool, m_texelBuffers.getBufferViewA(), m_texelBuffers.getBufferViewB()))
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> TexelBufferRenderInstance::createDescriptorSetLayout (const vk::DeviceInterface&	vki,
+																						  vk::VkDevice					device,
+																						  vk::VkDescriptorType			descriptorType,
+																						  ShaderInputInterface			shaderInterface,
+																						  vk::VkShaderStageFlags		stageFlags)
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(descriptorType, stageFlags);
+			builder.addSingleBinding(descriptorType, stageFlags);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(descriptorType, 2u, stageFlags);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	return builder.build(vki, device);
+}
+
+vk::Move<vk::VkPipelineLayout> TexelBufferRenderInstance::createPipelineLayout (const vk::DeviceInterface&	vki,
+																				vk::VkDevice				device,
+																				vk::VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+		DE_NULL,
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1,						// descriptorSetCount
+		&descriptorSetLayout,	// pSetLayouts
+		0u,						// pushConstantRangeCount
+		DE_NULL,				// pPushConstantRanges
+	};
+	return vk::createPipelineLayout(vki, device, &createInfo);
+}
+
+vk::Move<vk::VkDescriptorPool> TexelBufferRenderInstance::createDescriptorPool (const vk::DeviceInterface&	vki,
+																				vk::VkDevice					device,
+																				vk::VkDescriptorType			descriptorType,
+																				ShaderInputInterface			shaderInterface)
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(descriptorType, getInterfaceNumResources(shaderInterface))
+		.build(vki, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> TexelBufferRenderInstance::createDescriptorSet (const vk::DeviceInterface&	vki,
+																			  vk::VkDevice					device,
+																			  vk::VkDescriptorType			descriptorType,
+																			  ShaderInputInterface			shaderInterface,
+																			  vk::VkDescriptorSetLayout		layout,
+																			  vk::VkDescriptorPool			pool,
+																			  vk::VkBufferView				viewA,
+																			  vk::VkBufferView				viewB)
+{
+	const vk::VkBufferView					texelBufferInfos[2]	=
+	{
+		viewA,
+		viewB,
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet		= allocateDescriptorSet(vki, device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder			builder;
+
+	switch (shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &texelBufferInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, &texelBufferInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), descriptorType, &texelBufferInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), descriptorType, 2u, texelBufferInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(vki, device);
+	return descriptorSet;
+}
+
+void TexelBufferRenderInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Rendering 2x2 grid.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+		<< " descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType) << "\n"
+		<< "Buffer view is created with a " << ((m_nonzeroViewOffset) ? ("non-zero") : ("zero")) << " offset.\n"
+		<< "Buffer format is " << vk::getFormatName(vk::mapTextureFormat(m_texelBuffers.getTextureFormat())) << ".\n";
+
+	if (m_stageFlags == 0u)
+	{
+		msg << "Descriptors are not accessed in any shader stage.\n";
+	}
+	else
+	{
+		msg << "Color in each cell is fetched using the descriptor(s):\n";
+
+		for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+		{
+			msg << "Test sample " << resultNdx << ": fetch at position " << m_texelBuffers.getFetchPos(resultNdx);
+
+			if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+			{
+				const int srcResourceNdx = (resultNdx % 2); // ABAB source
+				msg << " from texelBuffer " << srcResourceNdx;
+			}
+
+			msg << "\n";
+		}
+
+		msg << "Descriptors are accessed in {"
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_VERTEX_BIT) != 0)					? (" vertex")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) != 0)	? (" tess_control")		: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) != 0)	? (" tess_evaluation")	: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_GEOMETRY_BIT) != 0)				? (" geometry")			: (""))
+			<< (((m_stageFlags & vk::VK_SHADER_STAGE_FRAGMENT_BIT) != 0)				? (" fragment")			: (""))
+			<< " } stages.";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+vk::VkPipelineLayout TexelBufferRenderInstance::getPipelineLayout (void) const
+{
+	return *m_pipelineLayout;
+}
+
+void TexelBufferRenderInstance::writeDrawCmdBuffer (vk::VkCommandBuffer cmd) const
+{
+	m_vki.cmdBindDescriptorSets(cmd, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, getPipelineLayout(), 0, 1, &m_descriptorSet.get(), 0, DE_NULL);
+	m_vki.cmdDraw(cmd, 6 * 4, 1, 0, 0); // render four quads (two separate triangles)
+}
+
+tcu::TestStatus TexelBufferRenderInstance::verifyResultImage (const tcu::ConstPixelBufferAccess& result) const
+{
+	const tcu::Vec4		green		(0.0f, 1.0f, 0.0f, 1.0f);
+	const tcu::Vec4		yellow		(1.0f, 1.0f, 0.0f, 1.0f);
+	const bool			doFetch		= (m_stageFlags != 0u); // no active stages? Then don't fetch
+	const tcu::Vec4		sample0		= (!doFetch) ? (yellow)	: (m_texelBuffers.fetchTexelValue(0));
+	const tcu::Vec4		sample1		= (!doFetch) ? (green)	: (m_texelBuffers.fetchTexelValue(1));
+	const tcu::Vec4		sample2		= (!doFetch) ? (green)	: (m_texelBuffers.fetchTexelValue(2));
+	const tcu::Vec4		sample3		= (!doFetch) ? (yellow)	: (m_texelBuffers.fetchTexelValue(3));
+	tcu::Surface		reference	(m_targetSize.x(), m_targetSize.y());
+
+	drawQuadrantReferenceResult(reference.getAccess(), sample0, sample1, sample2, sample3);
+
+	if (!bilinearCompare(m_context.getTestContext().getLog(), "Compare", "Result comparison", reference.getAccess(), result, tcu::RGBA(1, 1, 1, 1), tcu::COMPARE_LOG_RESULT))
+		return tcu::TestStatus::fail("Image verification failed");
+	else
+		return tcu::TestStatus::pass("Pass");
+}
+
+class TexelBufferComputeInstance : public vkt::TestInstance
+{
+public:
+											TexelBufferComputeInstance	(vkt::Context&			context,
+																		 vk::VkDescriptorType	descriptorType,
+																		 ShaderInputInterface	shaderInterface,
+																		 bool					nonzeroViewOffset);
+
+private:
+	vk::Move<vk::VkDescriptorSetLayout>		createDescriptorSetLayout	(void) const;
+	vk::Move<vk::VkDescriptorPool>			createDescriptorPool		(void) const;
+	vk::Move<vk::VkDescriptorSet>			createDescriptorSet			(vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const;
+
+	tcu::TestStatus							iterate						(void);
+	void									logTestPlan					(void) const;
+	tcu::TestStatus							testResourceAccess			(void);
+
+	const vk::VkDescriptorType				m_descriptorType;
+	const ShaderInputInterface				m_shaderInterface;
+	const bool								m_nonzeroViewOffset;
+
+	const vk::DeviceInterface&				m_vki;
+	const vk::VkDevice						m_device;
+	const vk::VkQueue						m_queue;
+	const deUint32							m_queueFamilyIndex;
+	vk::Allocator&							m_allocator;
+
+	const ComputeInstanceResultBuffer		m_result;
+	const TexelBufferInstanceBuffers		m_texelBuffers;
+};
+
+TexelBufferComputeInstance::TexelBufferComputeInstance (Context&				context,
+														vk::VkDescriptorType	descriptorType,
+														ShaderInputInterface	shaderInterface,
+														bool					nonzeroViewOffset)
+	: vkt::TestInstance		(context)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_nonzeroViewOffset	(nonzeroViewOffset)
+	, m_vki					(context.getDeviceInterface())
+	, m_device				(context.getDevice())
+	, m_queue				(context.getUniversalQueue())
+	, m_queueFamilyIndex	(context.getUniversalQueueFamilyIndex())
+	, m_allocator			(context.getDefaultAllocator())
+	, m_result				(m_vki, m_device, m_allocator)
+	, m_texelBuffers		(m_vki, m_device, m_allocator, m_descriptorType, getInterfaceNumResources(m_shaderInterface), m_nonzeroViewOffset)
+{
+}
+
+vk::Move<vk::VkDescriptorSetLayout> TexelBufferComputeInstance::createDescriptorSetLayout (void) const
+{
+	vk::DescriptorSetLayoutBuilder builder;
+
+	builder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			builder.addSingleBinding(m_descriptorType, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.addArrayBinding(m_descriptorType, 2u, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	};
+
+	return builder.build(m_vki, m_device);
+}
+
+vk::Move<vk::VkDescriptorPool> TexelBufferComputeInstance::createDescriptorPool (void) const
+{
+	return vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(m_descriptorType, getInterfaceNumResources(m_shaderInterface))
+		.build(m_vki, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1);
+}
+
+vk::Move<vk::VkDescriptorSet> TexelBufferComputeInstance::createDescriptorSet (vk::VkDescriptorPool pool, vk::VkDescriptorSetLayout layout) const
+{
+	const vk::VkDescriptorBufferInfo		resultInfo			= vk::makeDescriptorBufferInfo(m_result.getBuffer(), 0u, (vk::VkDeviceSize)ComputeInstanceResultBuffer::DATA_SIZE);
+	const vk::VkBufferView					texelBufferInfos[2]	=
+	{
+		m_texelBuffers.getBufferViewA(),
+		m_texelBuffers.getBufferViewB(),
+	};
+	const vk::VkDescriptorSetAllocateInfo	allocInfo			=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	vk::Move<vk::VkDescriptorSet>			descriptorSet		= allocateDescriptorSet(m_vki, m_device, &allocInfo);
+	vk::DescriptorSetUpdateBuilder			builder;
+
+	// result
+	builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultInfo);
+
+	// texel buffers
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &texelBufferInfos[0]);
+			break;
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, &texelBufferInfos[0]);
+			builder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(2u), m_descriptorType, &texelBufferInfos[1]);
+			break;
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			builder.writeArray(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(1u), m_descriptorType, 2u, texelBufferInfos);
+			break;
+
+		default:
+			DE_FATAL("Impossible");
+	}
+
+	builder.update(m_vki, m_device);
+	return descriptorSet;
+}
+
+tcu::TestStatus TexelBufferComputeInstance::iterate (void)
+{
+	logTestPlan();
+	return testResourceAccess();
+}
+
+void TexelBufferComputeInstance::logTestPlan (void) const
+{
+	std::ostringstream msg;
+
+	msg << "Fetching 4 values from image in compute shader.\n"
+		<< "Single descriptor set. Descriptor set contains "
+			<< ((m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR) ? "single" :
+			    (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS) ? "two" :
+			    (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY) ? "an array (size 2) of" :
+			    (const char*)DE_NULL)
+		<< " descriptor(s) of type " << vk::getDescriptorTypeName(m_descriptorType) << "\n"
+		<< "Buffer view is created with a " << ((m_nonzeroViewOffset) ? ("non-zero") : ("zero")) << " offset.\n"
+		<< "Buffer format is " << vk::getFormatName(vk::mapTextureFormat(m_texelBuffers.getTextureFormat())) << ".\n";
+
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		msg << "Test sample " << resultNdx << ": fetch at position " << m_texelBuffers.getFetchPos(resultNdx);
+
+		if (m_shaderInterface != SHADER_INPUT_SINGLE_DESCRIPTOR)
+		{
+			const int srcResourceNdx = (resultNdx % 2); // ABAB source
+			msg << " from texelBuffer " << srcResourceNdx;
+		}
+
+		msg << "\n";
+	}
+
+	m_context.getTestContext().getLog()
+		<< tcu::TestLog::Message
+		<< msg.str()
+		<< tcu::TestLog::EndMessage;
+}
+
+tcu::TestStatus TexelBufferComputeInstance::testResourceAccess (void)
+{
+	const vk::Unique<vk::VkDescriptorSetLayout>		descriptorSetLayout	(createDescriptorSetLayout());
+	const vk::Unique<vk::VkDescriptorPool>			descriptorPool		(createDescriptorPool());
+	const vk::Unique<vk::VkDescriptorSet>			descriptorSet		(createDescriptorSet(*descriptorPool, *descriptorSetLayout));
+	const ComputePipeline							pipeline			(m_vki, m_device, m_context.getBinaryCollection(), 1, &descriptorSetLayout.get());
+
+	const vk::VkDescriptorSet						descriptorSets[]	= { *descriptorSet };
+	const int										numDescriptorSets	= DE_LENGTH_OF_ARRAY(descriptorSets);
+	const deUint32* const							dynamicOffsets		= DE_NULL;
+	const int										numDynamicOffsets	= 0;
+	const vk::VkBufferMemoryBarrier* const			preBarriers			= m_texelBuffers.getBufferInitBarriers();
+	const int										numPreBarriers		= m_texelBuffers.getNumTexelBuffers();
+	const vk::VkBufferMemoryBarrier* const			postBarriers		= m_result.getResultReadBarrier();
+	const int										numPostBarriers		= 1;
+
+	const ComputeCommand							compute				(m_vki,
+																		 m_device,
+																		 pipeline.getPipeline(),
+																		 pipeline.getPipelineLayout(),
+																		 tcu::UVec3(4, 1, 1),
+																		 numDescriptorSets,	descriptorSets,
+																		 numDynamicOffsets,	dynamicOffsets,
+																		 numPreBarriers,	preBarriers,
+																		 numPostBarriers,	postBarriers);
+
+	tcu::Vec4										results[4];
+	bool											anyResultSet		= false;
+	bool											allResultsOk		= true;
+
+	compute.submitAndWait(m_queueFamilyIndex, m_queue);
+	m_result.readResultContentsTo(&results);
+
+	// verify
+	for (int resultNdx = 0; resultNdx < 4; ++resultNdx)
+	{
+		const tcu::Vec4	result				= results[resultNdx];
+		const tcu::Vec4	reference			= m_texelBuffers.fetchTexelValue(resultNdx);
+		const tcu::Vec4	conversionThreshold	= tcu::Vec4(1.0f / 255.0f);
+
+		if (result != tcu::Vec4(-1.0f))
+			anyResultSet = true;
+
+		if (tcu::boolAny(tcu::greaterThan(tcu::abs(result - reference), conversionThreshold)))
+		{
+			allResultsOk = false;
+
+			m_context.getTestContext().getLog()
+				<< tcu::TestLog::Message
+				<< "Test sample " << resultNdx << ": Expected " << reference << ", got " << result
+				<< tcu::TestLog::EndMessage;
+		}
+	}
+
+	// read back and verify
+	if (allResultsOk)
+		return tcu::TestStatus::pass("Pass");
+	else if (anyResultSet)
+		return tcu::TestStatus::fail("Invalid result values");
+	else
+	{
+		m_context.getTestContext().getLog()
+			<< tcu::TestLog::Message
+			<< "Result buffer was not written to."
+			<< tcu::TestLog::EndMessage;
+		return tcu::TestStatus::fail("Result buffer was not written to");
+	}
+}
+
+class TexelBufferDescriptorCase : public QuadrantRendederCase
+{
+public:
+	enum
+	{
+		FLAG_VIEW_OFFSET = (1u << 1u),
+	};
+	// enum continues where resource flags ends
+	DE_STATIC_ASSERT((deUint32)FLAG_VIEW_OFFSET == (deUint32)RESOURCE_FLAG_LAST);
+
+								TexelBufferDescriptorCase	(tcu::TestContext&		testCtx,
+															 const char*			name,
+															 const char*			description,
+															 bool					isPrimaryCmdBuf,
+															 vk::VkDescriptorType	descriptorType,
+															 vk::VkShaderStageFlags	exitingStages,
+															 vk::VkShaderStageFlags	activeStages,
+															 ShaderInputInterface	shaderInterface,
+															 deUint32				flags);
+
+private:
+	std::string					genExtensionDeclarations	(vk::VkShaderStageFlagBits stage) const;
+	std::string					genResourceDeclarations		(vk::VkShaderStageFlagBits stage, int numUsedBindings) const;
+	std::string					genResourceAccessSource		(vk::VkShaderStageFlagBits stage) const;
+	std::string					genNoAccessSource			(void) const;
+
+	vkt::TestInstance*			createInstance				(vkt::Context& context) const;
+
+	const bool					m_isPrimaryCmdBuf;
+	const vk::VkDescriptorType	m_descriptorType;
+	const ShaderInputInterface	m_shaderInterface;
+	const bool					m_nonzeroViewOffset;
+};
+
+TexelBufferDescriptorCase::TexelBufferDescriptorCase (tcu::TestContext&			testCtx,
+													  const char*				name,
+													  const char*				description,
+													  bool						isPrimaryCmdBuf,
+													  vk::VkDescriptorType		descriptorType,
+													  vk::VkShaderStageFlags	exitingStages,
+													  vk::VkShaderStageFlags	activeStages,
+													  ShaderInputInterface		shaderInterface,
+													  deUint32					flags)
+	: QuadrantRendederCase	(testCtx, name, description, glu::GLSL_VERSION_310_ES, exitingStages, activeStages)
+	, m_isPrimaryCmdBuf		(isPrimaryCmdBuf)
+	, m_descriptorType		(descriptorType)
+	, m_shaderInterface		(shaderInterface)
+	, m_nonzeroViewOffset	(((flags & FLAG_VIEW_OFFSET) != 0) ? (1u) : (0u))
+{
+}
+
+std::string TexelBufferDescriptorCase::genExtensionDeclarations (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+	return "#extension GL_EXT_texture_buffer : require\n";
+}
+
+std::string TexelBufferDescriptorCase::genResourceDeclarations (vk::VkShaderStageFlagBits stage, int numUsedBindings) const
+{
+	DE_UNREF(stage);
+
+	const bool			isUniform		= isUniformDescriptorType(m_descriptorType);
+	const char* const	storageType		= (isUniform) ? ("samplerBuffer ") : ("readonly imageBuffer ");
+	const char* const	formatQualifier	= (isUniform) ? ("") : (", rgba8");
+
+	switch (m_shaderInterface)
+	{
+		case SHADER_INPUT_SINGLE_DESCRIPTOR:
+			return "layout(set = 0, binding = " + de::toString(numUsedBindings) + formatQualifier + ") uniform highp " + storageType + " u_texelBuffer;\n";
+
+		case SHADER_INPUT_MULTIPLE_DESCRIPTORS:
+			return "layout(set = 0, binding = " + de::toString(numUsedBindings) + formatQualifier + ") uniform highp " + storageType + " u_texelBufferA;\n"
+				   "layout(set = 0, binding = " + de::toString(numUsedBindings+1) + formatQualifier + ") uniform highp " + storageType + " u_texelBufferB;\n";
+
+		case SHADER_INPUT_DESCRIPTOR_ARRAY:
+			return "layout(set = 0, binding = " + de::toString(numUsedBindings) + formatQualifier + ") uniform highp " + storageType + " u_texelBuffer[2];\n";
+
+		default:
+			DE_FATAL("Impossible");
+			return "";
+	}
+}
+
+std::string TexelBufferDescriptorCase::genResourceAccessSource (vk::VkShaderStageFlagBits stage) const
+{
+	DE_UNREF(stage);
+
+	const char* const	accessPostfixA	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? ("")
+										: (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? ("A")
+										: (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? ("[0]")
+										: (DE_NULL);
+	const char* const	accessPostfixB	= (m_shaderInterface == SHADER_INPUT_SINGLE_DESCRIPTOR)		? ("")
+										: (m_shaderInterface == SHADER_INPUT_MULTIPLE_DESCRIPTORS)	? ("B")
+										: (m_shaderInterface == SHADER_INPUT_DESCRIPTOR_ARRAY)		? ("[1]")
+										: (DE_NULL);
+	const char* const	fetchFunc		= (isUniformDescriptorType(m_descriptorType)) ? ("texelFetch") : ("imageLoad");
+	std::ostringstream	buf;
+
+	buf << "	if (quadrant_id == 0)\n"
+		<< "		result_color = " << fetchFunc << "(u_texelBuffer" << accessPostfixA << ", " << TexelBufferInstanceBuffers::getFetchPos(0) << ");\n"
+		<< "	else if (quadrant_id == 1)\n"
+		<< "		result_color = " << fetchFunc << "(u_texelBuffer" << accessPostfixB << ", " << TexelBufferInstanceBuffers::getFetchPos(1) << ");\n"
+		<< "	else if (quadrant_id == 2)\n"
+		<< "		result_color = " << fetchFunc << "(u_texelBuffer" << accessPostfixA << ", " << TexelBufferInstanceBuffers::getFetchPos(2) << ");\n"
+		<< "	else\n"
+		<< "		result_color = " << fetchFunc << "(u_texelBuffer" << accessPostfixB << ", " << TexelBufferInstanceBuffers::getFetchPos(3) << ");\n";
+
+	return buf.str();
+}
+
+std::string TexelBufferDescriptorCase::genNoAccessSource (void) const
+{
+	return "	if (quadrant_id == 1 || quadrant_id == 2)\n"
+			"		result_color = vec4(0.0, 1.0, 0.0, 1.0);\n"
+			"	else\n"
+			"		result_color = vec4(1.0, 1.0, 0.0, 1.0);\n";
+}
+
+vkt::TestInstance* TexelBufferDescriptorCase::createInstance (vkt::Context& context) const
+{
+	if (m_exitingStages == vk::VK_SHADER_STAGE_COMPUTE_BIT)
+	{
+		DE_ASSERT(m_isPrimaryCmdBuf); // secondaries are only valid within renderpass
+		return new TexelBufferComputeInstance(context, m_descriptorType, m_shaderInterface, m_nonzeroViewOffset);
+	}
+	else
+		return new TexelBufferRenderInstance(context, m_isPrimaryCmdBuf, m_descriptorType, m_activeStages, m_shaderInterface, m_nonzeroViewOffset);
+}
+
+void createShaderAccessImageTests (tcu::TestCaseGroup*		group,
+								   bool						isPrimaryCmdBuf,
+								   vk::VkDescriptorType		descriptorType,
+								   vk::VkShaderStageFlags	exitingStages,
+								   vk::VkShaderStageFlags	activeStages,
+								   ShaderInputInterface		dimension,
+								   deUint32					resourceFlags)
+{
+	static const struct
+	{
+		vk::VkImageViewType	viewType;
+		const char*			name;
+		const char*			description;
+		deUint32			flags;
+	} s_imageTypes[] =
+	{
+		{ vk::VK_IMAGE_VIEW_TYPE_1D,			"1d",						"1D image view",								0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_1D,			"1d_base_mip",				"1D image subview with base mip level",			ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_1D,			"1d_base_slice",			"1D image subview with base array slice",		ImageDescriptorCase::FLAG_BASE_SLICE	},
+
+		{ vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array",					"1D array image view",							0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array_base_mip",		"1D array image subview with base mip level",	ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array_base_slice",		"1D array image subview with base array slice",	ImageDescriptorCase::FLAG_BASE_SLICE	},
+
+		{ vk::VK_IMAGE_VIEW_TYPE_2D,			"2d",						"2D image view",								0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_2D,			"2d_base_mip",				"2D image subview with base mip level",			ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_2D,			"2d_base_slice",			"2D image subview with base array slice",		ImageDescriptorCase::FLAG_BASE_SLICE	},
+
+		{ vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array",					"2D array image view",							0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array_base_mip",		"2D array image subview with base mip level",	ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array_base_slice",		"2D array image subview with base array slice",	ImageDescriptorCase::FLAG_BASE_SLICE	},
+
+		{ vk::VK_IMAGE_VIEW_TYPE_3D,			"3d",						"3D image view",								0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_3D,			"3d_base_mip",				"3D image subview with base mip level",			ImageDescriptorCase::FLAG_BASE_MIP		},
+		// no 3d array textures
+
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE,			"cube",						"Cube image view",								0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE,			"cube_base_mip",			"Cube image subview with base mip level",		ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE,			"cube_base_slice",			"Cube image subview with base array slice",		ImageDescriptorCase::FLAG_BASE_SLICE	},
+
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array",				"Cube image view",								0u										},
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array_base_mip",		"Cube image subview with base mip level",		ImageDescriptorCase::FLAG_BASE_MIP		},
+		{ vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array_base_slice",	"Cube image subview with base array slice",		ImageDescriptorCase::FLAG_BASE_SLICE	},
+	};
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_imageTypes); ++ndx)
+	{
+		// never overlap
+		DE_ASSERT((s_imageTypes[ndx].flags & resourceFlags) == 0u);
+
+		group->addChild(new ImageDescriptorCase(group->getTestContext(),
+												s_imageTypes[ndx].name,
+												s_imageTypes[ndx].description,
+												isPrimaryCmdBuf,
+												descriptorType,
+												exitingStages,
+												activeStages,
+												dimension,
+												s_imageTypes[ndx].viewType,
+												s_imageTypes[ndx].flags | resourceFlags));
+	}
+}
+
+void createShaderAccessTexelBufferTests (tcu::TestCaseGroup*	group,
+										 bool					isPrimaryCmdBuf,
+										 vk::VkDescriptorType	descriptorType,
+										 vk::VkShaderStageFlags	exitingStages,
+										 vk::VkShaderStageFlags	activeStages,
+										 ShaderInputInterface	dimension,
+										 deUint32				resourceFlags)
+{
+	DE_ASSERT(resourceFlags == 0);
+	DE_UNREF(resourceFlags);
+
+	static const struct
+	{
+		const char*	name;
+		const char*	description;
+		deUint32	flags;
+	} s_texelBufferTypes[] =
+	{
+		{ "offset_zero",		"View offset is zero",		0u											},
+		{ "offset_nonzero",		"View offset is non-zero",	TexelBufferDescriptorCase::FLAG_VIEW_OFFSET	},
+	};
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_texelBufferTypes); ++ndx)
+	{
+		group->addChild(new TexelBufferDescriptorCase(group->getTestContext(),
+													  s_texelBufferTypes[ndx].name,
+													  s_texelBufferTypes[ndx].description,
+													  isPrimaryCmdBuf,
+													  descriptorType,
+													  exitingStages,
+													  activeStages,
+													  dimension,
+													  s_texelBufferTypes[ndx].flags));
+	}
+}
+
+void createShaderAccessBufferTests (tcu::TestCaseGroup*		group,
+									bool					isPrimaryCmdBuf,
+									vk::VkDescriptorType	descriptorType,
+									vk::VkShaderStageFlags	exitingStages,
+									vk::VkShaderStageFlags	activeStages,
+									ShaderInputInterface	dimension,
+									deUint32				resourceFlags)
+{
+	DE_ASSERT(resourceFlags == 0u);
+	DE_UNREF(resourceFlags);
+
+	static const struct
+	{
+		const char*	name;
+		const char*	description;
+		bool		isForDynamicCases;
+		deUint32	flags;
+	} s_bufferTypes[] =
+	{
+		{ "offset_view_zero",						"View offset is zero",									false,	0u																							},
+		{ "offset_view_nonzero",					"View offset is non-zero",								false,	BufferDescriptorCase::FLAG_VIEW_OFFSET														},
+
+		{ "offset_view_zero_dynamic_zero",			"View offset is zero, dynamic offset is zero",			true,	BufferDescriptorCase::FLAG_DYNAMIC_OFFSET_ZERO												},
+		{ "offset_view_zero_dynamic_nonzero",		"View offset is zero, dynamic offset is non-zero",		true,	BufferDescriptorCase::FLAG_DYNAMIC_OFFSET_NONZERO											},
+		{ "offset_view_nonzero_dynamic_zero",		"View offset is non-zero, dynamic offset is zero",		true,	BufferDescriptorCase::FLAG_VIEW_OFFSET | BufferDescriptorCase::FLAG_DYNAMIC_OFFSET_ZERO		},
+		{ "offset_view_nonzero_dynamic_nonzero",	"View offset is non-zero, dynamic offset is non-zero",	true,	BufferDescriptorCase::FLAG_VIEW_OFFSET | BufferDescriptorCase::FLAG_DYNAMIC_OFFSET_NONZERO	},
+	};
+
+	const bool isDynamicCase = isDynamicDescriptorType(descriptorType);
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_bufferTypes); ++ndx)
+	{
+		if (isDynamicCase == s_bufferTypes[ndx].isForDynamicCases)
+			group->addChild(new BufferDescriptorCase(group->getTestContext(),
+													 s_bufferTypes[ndx].name,
+													 s_bufferTypes[ndx].description,
+													 isPrimaryCmdBuf,
+													 descriptorType,
+													 exitingStages,
+													 activeStages,
+													 dimension,
+													 s_bufferTypes[ndx].flags));
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createShaderAccessTests (tcu::TestContext& testCtx)
+{
+	static const struct
+	{
+		const bool	isPrimary;
+		const char*	name;
+		const char*	description;
+	} s_bindTypes[] =
+	{
+		{ true,		"primary_cmd_buf",	"Bind in primary command buffer"	},
+		{ false,	"secondary_cmd_buf",	"Bind in secondary command buffer"	},
+	};
+	static const struct
+	{
+		const vk::VkDescriptorType	descriptorType;
+		const char*					name;
+		const char*					description;
+		deUint32					flags;
+	} s_descriptorTypes[] =
+	{
+		{ vk::VK_DESCRIPTOR_TYPE_SAMPLER,					"sampler_mutable",					"VK_DESCRIPTOR_TYPE_SAMPLER with mutable sampler",					0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_SAMPLER,					"sampler_immutable",				"VK_DESCRIPTOR_TYPE_SAMPLER with immutable sampler",				RESOURCE_FLAG_IMMUTABLE_SAMPLER	},
+		{ vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,	"combined_image_sampler_mutable",	"VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER with mutable sampler",	0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,	"combined_image_sampler_immutable",	"VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER with immutable sampler",	RESOURCE_FLAG_IMMUTABLE_SAMPLER	},
+		// \note No way to access SAMPLED_IMAGE without a sampler
+//		{ vk::VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,				"sampled_image",					"VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE",									0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,				"storage_image",					"VK_DESCRIPTOR_TYPE_STORAGE_IMAGE",									0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,		"uniform_texel_buffer",				"VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER",							0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,		"storage_texel_buffer",				"VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER",							0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,			"uniform_buffer",					"VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER",								0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,			"storage_buffer",					"VK_DESCRIPTOR_TYPE_STORAGE_BUFFER",								0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,	"uniform_buffer_dynamic",			"VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC",						0u								},
+		{ vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,	"storage_buffer_dynamic",			"VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC",						0u								},
+	};
+	static const struct
+	{
+		const char*				name;
+		const char*				description;
+		vk::VkShaderStageFlags	existingStages;				//!< stages that exists
+		vk::VkShaderStageFlags	activeStages;				//!< stages that access resource
+		bool					supportsSecondaryCmdBufs;
+	} s_shaderStages[] =
+	{
+		{
+			"no_access",
+			"No accessing stages",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			0u,
+			true,
+		},
+		{
+			"vertex",
+			"Vertex stage",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_VERTEX_BIT,
+			true,
+		},
+		{
+			"tess_ctrl",
+			"Tessellation control stage",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+			true,
+		},
+		{
+			"tess_eval",
+			"Tessellation evaluation stage",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+			true,
+		},
+		{
+			"geometry",
+			"Geometry stage",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_GEOMETRY_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_GEOMETRY_BIT,
+			true,
+		},
+		{
+			"fragment",
+			"Fragment stage",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			true,
+		},
+		{
+			"compute",
+			"Compute stage",
+			vk::VK_SHADER_STAGE_COMPUTE_BIT,
+			vk::VK_SHADER_STAGE_COMPUTE_BIT,
+			false,
+		},
+		{
+			"vertex_fragment",
+			"Vertex and fragment stages",
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			vk::VK_SHADER_STAGE_VERTEX_BIT | vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+			true,
+		},
+	};
+	static const struct
+	{
+		ShaderInputInterface	dimension;
+		const char*				name;
+		const char*				description;
+	} s_variableDimensions[] =
+	{
+		{ SHADER_INPUT_SINGLE_DESCRIPTOR,		"single_descriptor",	"Single descriptor"		},
+		{ SHADER_INPUT_MULTIPLE_DESCRIPTORS,	"multiple_descriptors",	"Multiple descriptors"	},
+		{ SHADER_INPUT_DESCRIPTOR_ARRAY,		"descriptor_array",		"Descriptor array"		},
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> group(new tcu::TestCaseGroup(testCtx, "shader_access", "Access resource via descriptor in a single descriptor set"));
+
+	// .primary_cmd_buf...
+	for (int bindTypeNdx = 0; bindTypeNdx < DE_LENGTH_OF_ARRAY(s_bindTypes); ++bindTypeNdx)
+	{
+		de::MovePtr<tcu::TestCaseGroup> bindGroup(new tcu::TestCaseGroup(testCtx, s_bindTypes[bindTypeNdx].name, s_bindTypes[bindTypeNdx].description));
+
+		// .sampler, .combined_image_sampler, other resource types ...
+		for (int descriptorNdx = 0; descriptorNdx < DE_LENGTH_OF_ARRAY(s_descriptorTypes); ++descriptorNdx)
+		{
+			de::MovePtr<tcu::TestCaseGroup> typeGroup(new tcu::TestCaseGroup(testCtx, s_descriptorTypes[descriptorNdx].name, s_descriptorTypes[descriptorNdx].description));
+
+			for (int stageNdx = 0; stageNdx < DE_LENGTH_OF_ARRAY(s_shaderStages); ++stageNdx)
+			{
+				if (s_bindTypes[bindTypeNdx].isPrimary || s_shaderStages[stageNdx].supportsSecondaryCmdBufs)
+				{
+					de::MovePtr<tcu::TestCaseGroup> stageGroup(new tcu::TestCaseGroup(testCtx, s_shaderStages[stageNdx].name, s_shaderStages[stageNdx].description));
+
+					for (int dimensionNdx = 0; dimensionNdx < DE_LENGTH_OF_ARRAY(s_variableDimensions); ++dimensionNdx)
+					{
+						de::MovePtr<tcu::TestCaseGroup>	dimensionGroup(new tcu::TestCaseGroup(testCtx, s_variableDimensions[dimensionNdx].name, s_variableDimensions[dimensionNdx].description));
+						void							(*createTestsFunc)(tcu::TestCaseGroup*		group,
+																		   bool						isPrimaryCmdBuf,
+																		   vk::VkDescriptorType		descriptorType,
+																		   vk::VkShaderStageFlags	existingStages,
+																		   vk::VkShaderStageFlags	activeStages,
+																		   ShaderInputInterface		dimension,
+																		   deUint32					resourceFlags);
+
+						switch (s_descriptorTypes[descriptorNdx].descriptorType)
+						{
+							case vk::VK_DESCRIPTOR_TYPE_SAMPLER:
+							case vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+							case vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
+								createTestsFunc = createShaderAccessImageTests;
+								break;
+
+							case vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
+							case vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
+								createTestsFunc = createShaderAccessTexelBufferTests;
+								break;
+
+							case vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+							case vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+							case vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
+							case vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
+								createTestsFunc = createShaderAccessBufferTests;
+								break;
+
+							default:
+								createTestsFunc = DE_NULL;
+								DE_FATAL("Impossible");
+						}
+
+						if (createTestsFunc)
+						{
+							createTestsFunc(dimensionGroup.get(),
+											s_bindTypes[bindTypeNdx].isPrimary,
+											s_descriptorTypes[descriptorNdx].descriptorType,
+											s_shaderStages[stageNdx].existingStages,
+											s_shaderStages[stageNdx].activeStages,
+											s_variableDimensions[dimensionNdx].dimension,
+											s_descriptorTypes[descriptorNdx].flags);
+						}
+						else
+							DE_FATAL("Impossible");
+
+						stageGroup->addChild(dimensionGroup.release());
+					}
+
+					typeGroup->addChild(stageGroup.release());
+				}
+			}
+
+			bindGroup->addChild(typeGroup.release());
+		}
+
+		group->addChild(bindGroup.release());
+	}
+
+	return group.release();
+}
+
+} // BindingModel
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.hpp b/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.hpp
new file mode 100644
index 0000000..7ff1b1a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/binding_model/vktBindingShaderAccessTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTBINDINGSHADERACCESSTESTS_HPP
+#define _VKTBINDINGSHADERACCESSTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Binding shader access tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace BindingModel
+{
+
+tcu::TestCaseGroup* createShaderAccessTests (tcu::TestContext& testCtx);
+
+} // BindingModel
+} // vkt
+
+#endif // _VKTBINDINGSHADERACCESSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/compute/CMakeLists.txt b/external/vulkancts/modules/vulkan/compute/CMakeLists.txt
new file mode 100644
index 0000000..6460db3
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/CMakeLists.txt
@@ -0,0 +1,23 @@
+include_directories(..)
+
+set(DEQP_VK_COMPUTE_SRCS
+	vktComputeTests.cpp
+	vktComputeTests.hpp
+	vktComputeBasicComputeShaderTests.cpp
+	vktComputeBasicComputeShaderTests.hpp
+	vktComputeIndirectComputeDispatchTests.cpp
+	vktComputeIndirectComputeDispatchTests.hpp
+	vktComputeShaderBuiltinVarTests.cpp
+	vktComputeShaderBuiltinVarTests.hpp
+	vktComputeTestsUtil.cpp
+	vktComputeTestsUtil.hpp
+	)
+
+set(DEQP_VK_COMPUTE_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-compute STATIC ${DEQP_VK_COMPUTE_SRCS})
+target_link_libraries(deqp-vk-compute ${DEQP_VK_COMPUTE_LIBS})
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.cpp b/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.cpp
new file mode 100644
index 0000000..c7e58fe
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.cpp
@@ -0,0 +1,2464 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktComputeBasicComputeShaderTests.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vktComputeTestsUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkRefUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "deRandom.hpp"
+
+#include <vector>
+
+using namespace vk;
+
+namespace vkt
+{
+namespace compute
+{
+namespace
+{
+
+template<typename T, int size>
+T multiplyComponents (const tcu::Vector<T, size>& v)
+{
+	T accum = 1;
+	for (int i = 0; i < size; ++i)
+		accum *= v[i];
+	return accum;
+}
+
+template<typename T>
+inline T squared (const T& a)
+{
+	return a * a;
+}
+
+inline VkImageCreateInfo make2DImageCreateInfo (const tcu::IVec2& imageSize, const VkImageUsageFlags usage)
+{
+	const VkImageCreateInfo imageParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,				// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		0u,													// VkImageCreateFlags		flags;
+		VK_IMAGE_TYPE_2D,									// VkImageType				imageType;
+		VK_FORMAT_R32_UINT,									// VkFormat					format;
+		vk::makeExtent3D(imageSize.x(), imageSize.y(), 1),	// VkExtent3D				extent;
+		1u,													// deUint32					mipLevels;
+		1u,													// deUint32					arrayLayers;
+		VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits	samples;
+		VK_IMAGE_TILING_OPTIMAL,							// VkImageTiling			tiling;
+		usage,												// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,							// VkSharingMode			sharingMode;
+		0u,													// deUint32					queueFamilyIndexCount;
+		DE_NULL,											// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,							// VkImageLayout			initialLayout;
+	};
+	return imageParams;
+}
+
+inline VkBufferImageCopy makeBufferImageCopy(const tcu::IVec2& imageSize)
+{
+	return compute::makeBufferImageCopy(vk::makeExtent3D(imageSize.x(), imageSize.y(), 1), 1u);
+}
+
+enum BufferType
+{
+	BUFFER_TYPE_UNIFORM,
+	BUFFER_TYPE_SSBO,
+};
+
+class SharedVarTest : public vkt::TestCase
+{
+public:
+						SharedVarTest	(tcu::TestContext&		testCtx,
+										 const std::string&		name,
+										 const std::string&		description,
+										 const tcu::IVec3&		localSize,
+										 const tcu::IVec3&		workSize);
+
+	void				initPrograms	(SourceCollections&		sourceCollections) const;
+	TestInstance*		createInstance	(Context&				context) const;
+
+private:
+	const tcu::IVec3	m_localSize;
+	const tcu::IVec3	m_workSize;
+};
+
+class SharedVarTestInstance : public vkt::TestInstance
+{
+public:
+									SharedVarTestInstance	(Context&			context,
+															 const tcu::IVec3&	localSize,
+															 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate					(void);
+
+private:
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+SharedVarTest::SharedVarTest (tcu::TestContext&		testCtx,
+							  const std::string&	name,
+							  const std::string&	description,
+							  const tcu::IVec3&		localSize,
+							  const tcu::IVec3&		workSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+void SharedVarTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+	const int numValues = workGroupSize * workGroupCount;
+
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+		<< "layout(binding = 0) writeonly buffer Output {\n"
+		<< "    uint values[" << numValues << "];\n"
+		<< "} sb_out;\n\n"
+		<< "shared uint offsets[" << workGroupSize << "];\n\n"
+		<< "void main (void) {\n"
+		<< "    uint localSize  = gl_WorkGroupSize.x*gl_WorkGroupSize.y*gl_WorkGroupSize.z;\n"
+		<< "    uint globalNdx  = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		<< "    uint globalOffs = localSize*globalNdx;\n"
+		<< "    uint localOffs  = gl_WorkGroupSize.x*gl_WorkGroupSize.y*gl_LocalInvocationID.z + gl_WorkGroupSize.x*gl_LocalInvocationID.y + gl_LocalInvocationID.x;\n"
+		<< "\n"
+		<< "    offsets[localSize-localOffs-1u] = globalOffs + localOffs*localOffs;\n"
+		<< "    memoryBarrierShared();\n"
+		<< "    barrier();\n"
+		<< "    sb_out.values[globalOffs + localOffs] = offsets[localOffs];\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* SharedVarTest::createInstance (Context& context) const
+{
+	return new SharedVarTestInstance(context, m_localSize, m_workSize);
+}
+
+SharedVarTestInstance::SharedVarTestInstance (Context& context, const tcu::IVec3& localSize, const tcu::IVec3& workSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus SharedVarTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+
+	// Create a buffer and host-visible memory for it
+
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * workGroupSize * workGroupCount;
+	const Buffer buffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*buffer, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier computeFinishBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer, 0ull, bufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &computeFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& bufferAllocation = buffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, bufferAllocation.getMemory(), bufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(bufferAllocation.getHostPtr());
+
+	for (int groupNdx = 0; groupNdx < workGroupCount; ++groupNdx)
+	{
+		const int globalOffset = groupNdx * workGroupSize;
+		for (int localOffset = 0; localOffset < workGroupSize; ++localOffset)
+		{
+			const deUint32 res = bufferPtr[globalOffset + localOffset];
+			const deUint32 ref = globalOffset + squared(workGroupSize - localOffset - 1);
+
+			if (res != ref)
+			{
+				std::ostringstream msg;
+				msg << "Comparison failed for Output.values[" << (globalOffset + localOffset) << "]";
+				return tcu::TestStatus::fail(msg.str());
+			}
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class SharedVarAtomicOpTest : public vkt::TestCase
+{
+public:
+						SharedVarAtomicOpTest	(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 const tcu::IVec3&	localSize,
+												 const tcu::IVec3&	workSize);
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const tcu::IVec3	m_localSize;
+	const tcu::IVec3	m_workSize;
+};
+
+class SharedVarAtomicOpTestInstance : public vkt::TestInstance
+{
+public:
+									SharedVarAtomicOpTestInstance	(Context&			context,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+SharedVarAtomicOpTest::SharedVarAtomicOpTest (tcu::TestContext&		testCtx,
+											  const std::string&	name,
+											  const std::string&	description,
+											  const tcu::IVec3&		localSize,
+											  const tcu::IVec3&		workSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+void SharedVarAtomicOpTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+	const int numValues = workGroupSize * workGroupCount;
+
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+		<< "layout(binding = 0) writeonly buffer Output {\n"
+		<< "    uint values[" << numValues << "];\n"
+		<< "} sb_out;\n\n"
+		<< "shared uint count;\n\n"
+		<< "void main (void) {\n"
+		<< "    uint localSize  = gl_WorkGroupSize.x*gl_WorkGroupSize.y*gl_WorkGroupSize.z;\n"
+		<< "    uint globalNdx  = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		<< "    uint globalOffs = localSize*globalNdx;\n"
+		<< "\n"
+		<< "    count = 0u;\n"
+		<< "    memoryBarrierShared();\n"
+		<< "    barrier();\n"
+		<< "    uint oldVal = atomicAdd(count, 1u);\n"
+		<< "    sb_out.values[globalOffs+oldVal] = oldVal+1u;\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* SharedVarAtomicOpTest::createInstance (Context& context) const
+{
+	return new SharedVarAtomicOpTestInstance(context, m_localSize, m_workSize);
+}
+
+SharedVarAtomicOpTestInstance::SharedVarAtomicOpTestInstance (Context& context, const tcu::IVec3& localSize, const tcu::IVec3& workSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus SharedVarAtomicOpTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+
+	// Create a buffer and host-visible memory for it
+
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * workGroupSize * workGroupCount;
+	const Buffer buffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*buffer, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier computeFinishBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer, 0ull, bufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &computeFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& bufferAllocation = buffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, bufferAllocation.getMemory(), bufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(bufferAllocation.getHostPtr());
+
+	for (int groupNdx = 0; groupNdx < workGroupCount; ++groupNdx)
+	{
+		const int globalOffset = groupNdx * workGroupSize;
+		for (int localOffset = 0; localOffset < workGroupSize; ++localOffset)
+		{
+			const deUint32 res = bufferPtr[globalOffset + localOffset];
+			const deUint32 ref = localOffset + 1;
+
+			if (res != ref)
+			{
+				std::ostringstream msg;
+				msg << "Comparison failed for Output.values[" << (globalOffset + localOffset) << "]";
+				return tcu::TestStatus::fail(msg.str());
+			}
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class SSBOLocalBarrierTest : public vkt::TestCase
+{
+public:
+						SSBOLocalBarrierTest	(tcu::TestContext&	testCtx,
+												 const std::string& name,
+												 const std::string&	description,
+												 const tcu::IVec3&	localSize,
+												 const tcu::IVec3&	workSize);
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const tcu::IVec3	m_localSize;
+	const tcu::IVec3	m_workSize;
+};
+
+class SSBOLocalBarrierTestInstance : public vkt::TestInstance
+{
+public:
+									SSBOLocalBarrierTestInstance	(Context&			context,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+SSBOLocalBarrierTest::SSBOLocalBarrierTest (tcu::TestContext&	testCtx,
+											const std::string&	name,
+											const std::string&	description,
+											const tcu::IVec3&	localSize,
+											const tcu::IVec3&	workSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+void SSBOLocalBarrierTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+	const int numValues = workGroupSize * workGroupCount;
+
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+		<< "layout(binding = 0) coherent buffer Output {\n"
+		<< "    uint values[" << numValues << "];\n"
+		<< "} sb_out;\n\n"
+		<< "void main (void) {\n"
+		<< "    uint localSize  = gl_WorkGroupSize.x*gl_WorkGroupSize.y*gl_WorkGroupSize.z;\n"
+		<< "    uint globalNdx  = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		<< "    uint globalOffs = localSize*globalNdx;\n"
+		<< "    uint localOffs  = gl_WorkGroupSize.x*gl_WorkGroupSize.y*gl_LocalInvocationID.z + gl_WorkGroupSize.x*gl_LocalInvocationID.y + gl_LocalInvocationID.x;\n"
+		<< "\n"
+		<< "    sb_out.values[globalOffs + localOffs] = globalOffs;\n"
+		<< "    memoryBarrierBuffer();\n"
+		<< "    barrier();\n"
+		<< "    sb_out.values[globalOffs + ((localOffs+1u)%localSize)] += localOffs;\n"		// += so we read and write
+		<< "    memoryBarrierBuffer();\n"
+		<< "    barrier();\n"
+		<< "    sb_out.values[globalOffs + ((localOffs+2u)%localSize)] += localOffs;\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* SSBOLocalBarrierTest::createInstance (Context& context) const
+{
+	return new SSBOLocalBarrierTestInstance(context, m_localSize, m_workSize);
+}
+
+SSBOLocalBarrierTestInstance::SSBOLocalBarrierTestInstance (Context& context, const tcu::IVec3& localSize, const tcu::IVec3& workSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus SSBOLocalBarrierTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	const int workGroupSize = multiplyComponents(m_localSize);
+	const int workGroupCount = multiplyComponents(m_workSize);
+
+	// Create a buffer and host-visible memory for it
+
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * workGroupSize * workGroupCount;
+	const Buffer buffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*buffer, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier computeFinishBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer, 0ull, bufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &computeFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& bufferAllocation = buffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, bufferAllocation.getMemory(), bufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(bufferAllocation.getHostPtr());
+
+	for (int groupNdx = 0; groupNdx < workGroupCount; ++groupNdx)
+	{
+		const int globalOffset = groupNdx * workGroupSize;
+		for (int localOffset = 0; localOffset < workGroupSize; ++localOffset)
+		{
+			const deUint32	res		= bufferPtr[globalOffset + localOffset];
+			const int		offs0	= localOffset - 1 < 0 ? ((localOffset + workGroupSize - 1) % workGroupSize) : ((localOffset - 1) % workGroupSize);
+			const int		offs1	= localOffset - 2 < 0 ? ((localOffset + workGroupSize - 2) % workGroupSize) : ((localOffset - 2) % workGroupSize);
+			const deUint32	ref		= static_cast<deUint32>(globalOffset + offs0 + offs1);
+
+			if (res != ref)
+			{
+				std::ostringstream msg;
+				msg << "Comparison failed for Output.values[" << (globalOffset + localOffset) << "]";
+				return tcu::TestStatus::fail(msg.str());
+			}
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class CopyImageToSSBOTest : public vkt::TestCase
+{
+public:
+						CopyImageToSSBOTest		(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 const tcu::IVec2&	localSize,
+												 const tcu::IVec2&	imageSize);
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const tcu::IVec2	m_localSize;
+	const tcu::IVec2	m_imageSize;
+};
+
+class CopyImageToSSBOTestInstance : public vkt::TestInstance
+{
+public:
+									CopyImageToSSBOTestInstance		(Context&			context,
+																	 const tcu::IVec2&	localSize,
+																	 const tcu::IVec2&	imageSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const tcu::IVec2				m_localSize;
+	const tcu::IVec2				m_imageSize;
+};
+
+CopyImageToSSBOTest::CopyImageToSSBOTest (tcu::TestContext&		testCtx,
+										  const std::string&	name,
+										  const std::string&	description,
+										  const tcu::IVec2&		localSize,
+										  const tcu::IVec2&		imageSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+	DE_ASSERT(m_imageSize.x() % m_localSize.x() == 0);
+	DE_ASSERT(m_imageSize.y() % m_localSize.y() == 0);
+}
+
+void CopyImageToSSBOTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ") in;\n"
+		<< "layout(binding = 1, r32ui) readonly uniform highp uimage2D u_srcImg;\n"
+		<< "layout(binding = 0) writeonly buffer Output {\n"
+		<< "    uint values[" << (m_imageSize.x() * m_imageSize.y()) << "];\n"
+		<< "} sb_out;\n\n"
+		<< "void main (void) {\n"
+		<< "    uint stride = gl_NumWorkGroups.x*gl_WorkGroupSize.x;\n"
+		<< "    uint value  = imageLoad(u_srcImg, ivec2(gl_GlobalInvocationID.xy)).x;\n"
+		<< "    sb_out.values[gl_GlobalInvocationID.y*stride + gl_GlobalInvocationID.x] = value;\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* CopyImageToSSBOTest::createInstance (Context& context) const
+{
+	return new CopyImageToSSBOTestInstance(context, m_localSize, m_imageSize);
+}
+
+CopyImageToSSBOTestInstance::CopyImageToSSBOTestInstance (Context& context, const tcu::IVec2& localSize, const tcu::IVec2& imageSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+}
+
+tcu::TestStatus CopyImageToSSBOTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create an image
+
+	const VkImageCreateInfo imageParams = make2DImageCreateInfo(m_imageSize, VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT);
+	const Image image(vk, device, allocator, imageParams, MemoryRequirement::Any);
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
+	const Unique<VkImageView> imageView(makeImageView(vk, device, *image, VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_R32_UINT, subresourceRange));
+
+	// Staging buffer (source data for image)
+
+	const deUint32 imageArea = multiplyComponents(m_imageSize);
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * imageArea;
+
+	const Buffer stagingBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible);
+
+	// Populate the staging buffer with test data
+	{
+		de::Random rnd(0xab2c7);
+		const Allocation& stagingBufferAllocation = stagingBuffer.getAllocation();
+		deUint32* bufferPtr = static_cast<deUint32*>(stagingBufferAllocation.getHostPtr());
+		for (deUint32 i = 0; i < imageArea; ++i)
+			*bufferPtr++ = rnd.getUint32();
+
+		flushMappedMemoryRange(vk, device, stagingBufferAllocation.getMemory(), stagingBufferAllocation.getOffset(), bufferSizeBytes);
+	}
+
+	// Create a buffer to store shader output
+
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	// Set the bindings
+
+	const VkDescriptorImageInfo imageDescriptorInfo = makeDescriptorImageInfo(DE_NULL, *imageView, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+	const VkDescriptorBufferInfo bufferDescriptorInfo = makeDescriptorBufferInfo(*outputBuffer, 0ull, bufferSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+	{
+		const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+		const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+		const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+		const VkBufferMemoryBarrier stagingBufferPostHostWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, *stagingBuffer, 0ull, bufferSizeBytes);
+
+		const VkImageMemoryBarrier imagePreCopyBarrier = makeImageMemoryBarrier(
+			0u, 0u,
+			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			*image, subresourceRange);
+
+		const VkImageMemoryBarrier imagePostCopyBarrier = makeImageMemoryBarrier(
+			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
+			VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+			*image, subresourceRange);
+
+		const VkBufferMemoryBarrier computeFinishBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, bufferSizeBytes);
+
+		const VkBufferImageCopy copyParams = makeBufferImageCopy(m_imageSize);
+		const tcu::IVec2 workSize = m_imageSize / m_localSize;
+
+		// Prepare the command buffer
+
+		const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+		const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+		// Start recording commands
+
+		beginCommandBuffer(vk, *cmdBuffer);
+
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+		vk.cmdPipelineBarrier(*cmdBuffer, 0u, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &stagingBufferPostHostWriteBarrier, 1, &imagePreCopyBarrier);
+		vk.cmdCopyBufferToImage(*cmdBuffer, *stagingBuffer, *image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyParams);
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imagePostCopyBarrier);
+
+		vk.cmdDispatch(*cmdBuffer, workSize.x(), workSize.y(), 1u);
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &computeFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+		endCommandBuffer(vk, *cmdBuffer);
+
+		// Wait for completion
+
+		submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+	}
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+	const deUint32* refBufferPtr = static_cast<deUint32*>(stagingBuffer.getAllocation().getHostPtr());
+
+	for (deUint32 ndx = 0; ndx < imageArea; ++ndx)
+	{
+		const deUint32 res = *(bufferPtr + ndx);
+		const deUint32 ref = *(refBufferPtr + ndx);
+
+		if (res != ref)
+		{
+			std::ostringstream msg;
+			msg << "Comparison failed for Output.values[" << ndx << "]";
+			return tcu::TestStatus::fail(msg.str());
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class CopySSBOToImageTest : public vkt::TestCase
+{
+public:
+						CopySSBOToImageTest	(tcu::TestContext&	testCtx,
+											 const std::string&	name,
+											 const std::string&	description,
+											 const tcu::IVec2&	localSize,
+											 const tcu::IVec2&	imageSize);
+
+	void				initPrograms		(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance		(Context&			context) const;
+
+private:
+	const tcu::IVec2	m_localSize;
+	const tcu::IVec2	m_imageSize;
+};
+
+class CopySSBOToImageTestInstance : public vkt::TestInstance
+{
+public:
+									CopySSBOToImageTestInstance	(Context&			context,
+																 const tcu::IVec2&	localSize,
+																 const tcu::IVec2&	imageSize);
+
+	tcu::TestStatus					iterate						(void);
+
+private:
+	const tcu::IVec2				m_localSize;
+	const tcu::IVec2				m_imageSize;
+};
+
+CopySSBOToImageTest::CopySSBOToImageTest (tcu::TestContext&		testCtx,
+										  const std::string&	name,
+										  const std::string&	description,
+										  const tcu::IVec2&		localSize,
+										  const tcu::IVec2&		imageSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+	DE_ASSERT(m_imageSize.x() % m_localSize.x() == 0);
+	DE_ASSERT(m_imageSize.y() % m_localSize.y() == 0);
+}
+
+void CopySSBOToImageTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ") in;\n"
+		<< "layout(binding = 1, r32ui) writeonly uniform highp uimage2D u_dstImg;\n"
+		<< "layout(binding = 0) readonly buffer Input {\n"
+		<< "    uint values[" << (m_imageSize.x() * m_imageSize.y()) << "];\n"
+		<< "} sb_in;\n\n"
+		<< "void main (void) {\n"
+		<< "    uint stride = gl_NumWorkGroups.x*gl_WorkGroupSize.x;\n"
+		<< "    uint value  = sb_in.values[gl_GlobalInvocationID.y*stride + gl_GlobalInvocationID.x];\n"
+		<< "    imageStore(u_dstImg, ivec2(gl_GlobalInvocationID.xy), uvec4(value, 0, 0, 0));\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* CopySSBOToImageTest::createInstance (Context& context) const
+{
+	return new CopySSBOToImageTestInstance(context, m_localSize, m_imageSize);
+}
+
+CopySSBOToImageTestInstance::CopySSBOToImageTestInstance (Context& context, const tcu::IVec2& localSize, const tcu::IVec2& imageSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+}
+
+tcu::TestStatus CopySSBOToImageTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create an image
+
+	const VkImageCreateInfo imageParams = make2DImageCreateInfo(m_imageSize, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_STORAGE_BIT);
+	const Image image(vk, device, allocator, imageParams, MemoryRequirement::Any);
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
+	const Unique<VkImageView> imageView(makeImageView(vk, device, *image, VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_R32_UINT, subresourceRange));
+
+	// Create an input buffer (data to be read in the shader)
+
+	const deUint32 imageArea = multiplyComponents(m_imageSize);
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * imageArea;
+
+	const Buffer inputBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Populate the buffer with test data
+	{
+		de::Random rnd(0x77238ac2);
+		const Allocation& inputBufferAllocation = inputBuffer.getAllocation();
+		deUint32* bufferPtr = static_cast<deUint32*>(inputBufferAllocation.getHostPtr());
+		for (deUint32 i = 0; i < imageArea; ++i)
+			*bufferPtr++ = rnd.getUint32();
+
+		flushMappedMemoryRange(vk, device, inputBufferAllocation.getMemory(), inputBufferAllocation.getOffset(), bufferSizeBytes);
+	}
+
+	// Create a buffer to store shader output (copied from image data)
+
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	// Set the bindings
+
+	const VkDescriptorImageInfo imageDescriptorInfo = makeDescriptorImageInfo(DE_NULL, *imageView, VK_IMAGE_LAYOUT_GENERAL);
+	const VkDescriptorBufferInfo bufferDescriptorInfo = makeDescriptorBufferInfo(*inputBuffer, 0ull, bufferSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+	{
+		const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+		const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+		const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+		const VkBufferMemoryBarrier inputBufferPostHostWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, *inputBuffer, 0ull, bufferSizeBytes);
+
+		const VkImageMemoryBarrier imageLayoutBarrier = makeImageMemoryBarrier(
+			0u, 0u,
+			VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
+			*image, subresourceRange);
+
+		const VkImageMemoryBarrier imagePreCopyBarrier = makeImageMemoryBarrier(
+			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
+			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+			*image, subresourceRange);
+
+		const VkBufferMemoryBarrier outputBufferPostCopyBarrier = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, bufferSizeBytes);
+
+		const VkBufferImageCopy copyParams = makeBufferImageCopy(m_imageSize);
+		const tcu::IVec2 workSize = m_imageSize / m_localSize;
+
+		// Prepare the command buffer
+
+		const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+		const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+		// Start recording commands
+
+		beginCommandBuffer(vk, *cmdBuffer);
+
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &inputBufferPostHostWriteBarrier, 1, &imageLayoutBarrier);
+		vk.cmdDispatch(*cmdBuffer, workSize.x(), workSize.y(), 1u);
+
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imagePreCopyBarrier);
+		vk.cmdCopyImageToBuffer(*cmdBuffer, *image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *outputBuffer, 1u, &copyParams);
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &outputBufferPostCopyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+		endCommandBuffer(vk, *cmdBuffer);
+
+		// Wait for completion
+
+		submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+	}
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+	const deUint32* refBufferPtr = static_cast<deUint32*>(inputBuffer.getAllocation().getHostPtr());
+
+	for (deUint32 ndx = 0; ndx < imageArea; ++ndx)
+	{
+		const deUint32 res = *(bufferPtr + ndx);
+		const deUint32 ref = *(refBufferPtr + ndx);
+
+		if (res != ref)
+		{
+			std::ostringstream msg;
+			msg << "Comparison failed for pixel " << ndx;
+			return tcu::TestStatus::fail(msg.str());
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class BufferToBufferInvertTest : public vkt::TestCase
+{
+public:
+	void								initPrograms				(SourceCollections&	sourceCollections) const;
+	TestInstance*						createInstance				(Context&			context) const;
+
+	static BufferToBufferInvertTest*	UBOToSSBOInvertCase			(tcu::TestContext&	testCtx,
+																	 const std::string& name,
+																	 const std::string& description,
+																	 const deUint32		numValues,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+	static BufferToBufferInvertTest*	CopyInvertSSBOCase			(tcu::TestContext&	testCtx,
+																	 const std::string& name,
+																	 const std::string& description,
+																	 const deUint32		numValues,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+private:
+										BufferToBufferInvertTest	(tcu::TestContext&	testCtx,
+																	 const std::string& name,
+																	 const std::string& description,
+																	 const deUint32		numValues,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize,
+																	 const BufferType	bufferType);
+
+	const BufferType					m_bufferType;
+	const deUint32						m_numValues;
+	const tcu::IVec3					m_localSize;
+	const tcu::IVec3					m_workSize;
+};
+
+class BufferToBufferInvertTestInstance : public vkt::TestInstance
+{
+public:
+									BufferToBufferInvertTestInstance	(Context&			context,
+																		 const deUint32		numValues,
+																		 const tcu::IVec3&	localSize,
+																		 const tcu::IVec3&	workSize,
+																		 const BufferType	bufferType);
+
+	tcu::TestStatus					iterate								(void);
+
+private:
+	const BufferType				m_bufferType;
+	const deUint32					m_numValues;
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+BufferToBufferInvertTest::BufferToBufferInvertTest (tcu::TestContext&	testCtx,
+													const std::string&	name,
+													const std::string&	description,
+													const deUint32		numValues,
+													const tcu::IVec3&	localSize,
+													const tcu::IVec3&	workSize,
+													const BufferType	bufferType)
+	: TestCase		(testCtx, name, description)
+	, m_bufferType	(bufferType)
+	, m_numValues	(numValues)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+	DE_ASSERT(m_numValues % (multiplyComponents(m_workSize) * multiplyComponents(m_localSize)) == 0);
+	DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_SSBO);
+}
+
+BufferToBufferInvertTest* BufferToBufferInvertTest::UBOToSSBOInvertCase (tcu::TestContext&	testCtx,
+																		 const std::string&	name,
+																		 const std::string&	description,
+																		 const deUint32		numValues,
+																		 const tcu::IVec3&	localSize,
+																		 const tcu::IVec3&	workSize)
+{
+	return new BufferToBufferInvertTest(testCtx, name, description, numValues, localSize, workSize, BUFFER_TYPE_UNIFORM);
+}
+
+BufferToBufferInvertTest* BufferToBufferInvertTest::CopyInvertSSBOCase (tcu::TestContext&	testCtx,
+																		const std::string&	name,
+																		const std::string&	description,
+																		const deUint32		numValues,
+																		const tcu::IVec3&	localSize,
+																		const tcu::IVec3&	workSize)
+{
+	return new BufferToBufferInvertTest(testCtx, name, description, numValues, localSize, workSize, BUFFER_TYPE_SSBO);
+}
+
+void BufferToBufferInvertTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	if (m_bufferType == BUFFER_TYPE_UNIFORM)
+	{
+		src << "#version 310 es\n"
+			<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+			<< "layout(binding = 0) readonly uniform Input {\n"
+			<< "    uint values[" << m_numValues << "];\n"
+			<< "} ub_in;\n"
+			<< "layout(binding = 1, std140) writeonly buffer Output {\n"
+			<< "    uint values[" << m_numValues << "];\n"
+			<< "} sb_out;\n"
+			<< "void main (void) {\n"
+			<< "    uvec3 size           = gl_NumWorkGroups * gl_WorkGroupSize;\n"
+			<< "    uint numValuesPerInv = uint(ub_in.values.length()) / (size.x*size.y*size.z);\n"
+			<< "    uint groupNdx        = size.x*size.y*gl_GlobalInvocationID.z + size.x*gl_GlobalInvocationID.y + gl_GlobalInvocationID.x;\n"
+			<< "    uint offset          = numValuesPerInv*groupNdx;\n"
+			<< "\n"
+			<< "    for (uint ndx = 0u; ndx < numValuesPerInv; ndx++)\n"
+			<< "        sb_out.values[offset + ndx] = ~ub_in.values[offset + ndx];\n"
+			<< "}\n";
+	}
+	else if (m_bufferType == BUFFER_TYPE_SSBO)
+	{
+		src << "#version 310 es\n"
+			<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+			<< "layout(binding = 0, std140) readonly buffer Input {\n"
+			<< "    uint values[" << m_numValues << "];\n"
+			<< "} sb_in;\n"
+			<< "layout (binding = 1, std140) writeonly buffer Output {\n"
+			<< "    uint values[" << m_numValues << "];\n"
+			<< "} sb_out;\n"
+			<< "void main (void) {\n"
+			<< "    uvec3 size           = gl_NumWorkGroups * gl_WorkGroupSize;\n"
+			<< "    uint numValuesPerInv = uint(sb_in.values.length()) / (size.x*size.y*size.z);\n"
+			<< "    uint groupNdx        = size.x*size.y*gl_GlobalInvocationID.z + size.x*gl_GlobalInvocationID.y + gl_GlobalInvocationID.x;\n"
+			<< "    uint offset          = numValuesPerInv*groupNdx;\n"
+			<< "\n"
+			<< "    for (uint ndx = 0u; ndx < numValuesPerInv; ndx++)\n"
+			<< "        sb_out.values[offset + ndx] = ~sb_in.values[offset + ndx];\n"
+			<< "}\n";
+	}
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* BufferToBufferInvertTest::createInstance (Context& context) const
+{
+	return new BufferToBufferInvertTestInstance(context, m_numValues, m_localSize, m_workSize, m_bufferType);
+}
+
+BufferToBufferInvertTestInstance::BufferToBufferInvertTestInstance (Context&			context,
+																	const deUint32		numValues,
+																	const tcu::IVec3&	localSize,
+																	const tcu::IVec3&	workSize,
+																	const BufferType	bufferType)
+	: TestInstance	(context)
+	, m_bufferType	(bufferType)
+	, m_numValues	(numValues)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus BufferToBufferInvertTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Customize the test based on buffer type
+
+	const VkBufferUsageFlags inputBufferUsageFlags		= (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+	const VkDescriptorType inputBufferDescriptorType	= (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+	const deUint32 randomSeed							= (m_bufferType == BUFFER_TYPE_UNIFORM ? 0x111223f : 0x124fef);
+
+	// Create an input buffer
+
+	const VkDeviceSize bufferSizeBytes = sizeof(tcu::UVec4) * m_numValues;
+	const Buffer inputBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, inputBufferUsageFlags), MemoryRequirement::HostVisible);
+
+	// Fill the input buffer with data
+	{
+		de::Random rnd(randomSeed);
+		const Allocation& inputBufferAllocation = inputBuffer.getAllocation();
+		tcu::UVec4* bufferPtr = static_cast<tcu::UVec4*>(inputBufferAllocation.getHostPtr());
+		for (deUint32 i = 0; i < m_numValues; ++i)
+			bufferPtr[i].x() = rnd.getUint32();
+
+		flushMappedMemoryRange(vk, device, inputBufferAllocation.getMemory(), inputBufferAllocation.getOffset(), bufferSizeBytes);
+	}
+
+	// Create an output buffer
+
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(inputBufferDescriptorType, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(inputBufferDescriptorType)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo inputBufferDescriptorInfo = makeDescriptorBufferInfo(*inputBuffer, 0ull, bufferSizeBytes);
+	const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(*outputBuffer, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), inputBufferDescriptorType, &inputBufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier hostWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, *inputBuffer, 0ull, bufferSizeBytes);
+
+	const VkBufferMemoryBarrier shaderWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, bufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &hostWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), bufferSizeBytes);
+
+	const tcu::UVec4* bufferPtr = static_cast<tcu::UVec4*>(outputBufferAllocation.getHostPtr());
+	const tcu::UVec4* refBufferPtr = static_cast<tcu::UVec4*>(inputBuffer.getAllocation().getHostPtr());
+
+	for (deUint32 ndx = 0; ndx < m_numValues; ++ndx)
+	{
+		const deUint32 res = bufferPtr[ndx].x();
+		const deUint32 ref = ~refBufferPtr[ndx].x();
+
+		if (res != ref)
+		{
+			std::ostringstream msg;
+			msg << "Comparison failed for Output.values[" << ndx << "]";
+			return tcu::TestStatus::fail(msg.str());
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class InvertSSBOInPlaceTest : public vkt::TestCase
+{
+public:
+						InvertSSBOInPlaceTest	(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 const deUint32		numValues,
+												 const bool			sized,
+												 const tcu::IVec3&	localSize,
+												 const tcu::IVec3&	workSize);
+
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const deUint32		m_numValues;
+	const bool			m_sized;
+	const tcu::IVec3	m_localSize;
+	const tcu::IVec3	m_workSize;
+};
+
+class InvertSSBOInPlaceTestInstance : public vkt::TestInstance
+{
+public:
+									InvertSSBOInPlaceTestInstance	(Context&			context,
+																	 const deUint32		numValues,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const deUint32					m_numValues;
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+InvertSSBOInPlaceTest::InvertSSBOInPlaceTest (tcu::TestContext&		testCtx,
+											  const std::string&	name,
+											  const std::string&	description,
+											  const deUint32		numValues,
+											  const bool			sized,
+											  const tcu::IVec3&		localSize,
+											  const tcu::IVec3&		workSize)
+	: TestCase		(testCtx, name, description)
+	, m_numValues	(numValues)
+	, m_sized		(sized)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+	DE_ASSERT(m_numValues % (multiplyComponents(m_workSize) * multiplyComponents(m_localSize)) == 0);
+}
+
+void InvertSSBOInPlaceTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+		<< "layout(binding = 0) buffer InOut {\n"
+		<< "    uint values[" << (m_sized ? de::toString(m_numValues) : "") << "];\n"
+		<< "} sb_inout;\n"
+		<< "void main (void) {\n"
+		<< "    uvec3 size           = gl_NumWorkGroups * gl_WorkGroupSize;\n"
+		<< "    uint numValuesPerInv = uint(sb_inout.values.length()) / (size.x*size.y*size.z);\n"
+		<< "    uint groupNdx        = size.x*size.y*gl_GlobalInvocationID.z + size.x*gl_GlobalInvocationID.y + gl_GlobalInvocationID.x;\n"
+		<< "    uint offset          = numValuesPerInv*groupNdx;\n"
+		<< "\n"
+		<< "    for (uint ndx = 0u; ndx < numValuesPerInv; ndx++)\n"
+		<< "        sb_inout.values[offset + ndx] = ~sb_inout.values[offset + ndx];\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* InvertSSBOInPlaceTest::createInstance (Context& context) const
+{
+	return new InvertSSBOInPlaceTestInstance(context, m_numValues, m_localSize, m_workSize);
+}
+
+InvertSSBOInPlaceTestInstance::InvertSSBOInPlaceTestInstance (Context&			context,
+															  const deUint32	numValues,
+															  const tcu::IVec3&	localSize,
+															  const tcu::IVec3&	workSize)
+	: TestInstance	(context)
+	, m_numValues	(numValues)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus InvertSSBOInPlaceTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create an input/output buffer
+
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * m_numValues;
+	const Buffer buffer(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Fill the buffer with data
+
+	typedef std::vector<deUint32> data_vector_t;
+	data_vector_t inputData(m_numValues);
+
+	{
+		de::Random rnd(0x82ce7f);
+		const Allocation& bufferAllocation = buffer.getAllocation();
+		deUint32* bufferPtr = static_cast<deUint32*>(bufferAllocation.getHostPtr());
+		for (deUint32 i = 0; i < m_numValues; ++i)
+			inputData[i] = *bufferPtr++ = rnd.getUint32();
+
+		flushMappedMemoryRange(vk, device, bufferAllocation.getMemory(), bufferAllocation.getOffset(), bufferSizeBytes);
+	}
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo bufferDescriptorInfo = makeDescriptorBufferInfo(*buffer, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier hostWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, *buffer, 0ull, bufferSizeBytes);
+
+	const VkBufferMemoryBarrier shaderWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer, 0ull, bufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &hostWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& bufferAllocation = buffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, bufferAllocation.getMemory(), bufferAllocation.getOffset(), bufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(bufferAllocation.getHostPtr());
+
+	for (deUint32 ndx = 0; ndx < m_numValues; ++ndx)
+	{
+		const deUint32 res = bufferPtr[ndx];
+		const deUint32 ref = ~inputData[ndx];
+
+		if (res != ref)
+		{
+			std::ostringstream msg;
+			msg << "Comparison failed for InOut.values[" << ndx << "]";
+			return tcu::TestStatus::fail(msg.str());
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class WriteToMultipleSSBOTest : public vkt::TestCase
+{
+public:
+						WriteToMultipleSSBOTest	(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 const deUint32		numValues,
+												 const bool			sized,
+												 const tcu::IVec3&	localSize,
+												 const tcu::IVec3&	workSize);
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const deUint32		m_numValues;
+	const bool			m_sized;
+	const tcu::IVec3	m_localSize;
+	const tcu::IVec3	m_workSize;
+};
+
+class WriteToMultipleSSBOTestInstance : public vkt::TestInstance
+{
+public:
+									WriteToMultipleSSBOTestInstance	(Context&			context,
+																	 const deUint32		numValues,
+																	 const tcu::IVec3&	localSize,
+																	 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const deUint32					m_numValues;
+	const tcu::IVec3				m_localSize;
+	const tcu::IVec3				m_workSize;
+};
+
+WriteToMultipleSSBOTest::WriteToMultipleSSBOTest (tcu::TestContext&		testCtx,
+												  const std::string&	name,
+												  const std::string&	description,
+												  const deUint32		numValues,
+												  const bool			sized,
+												  const tcu::IVec3&		localSize,
+												  const tcu::IVec3&		workSize)
+	: TestCase		(testCtx, name, description)
+	, m_numValues	(numValues)
+	, m_sized		(sized)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+	DE_ASSERT(m_numValues % (multiplyComponents(m_workSize) * multiplyComponents(m_localSize)) == 0);
+}
+
+void WriteToMultipleSSBOTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << m_localSize.x() << ", local_size_y = " << m_localSize.y() << ", local_size_z = " << m_localSize.z() << ") in;\n"
+		<< "layout(binding = 0) writeonly buffer Out0 {\n"
+		<< "    uint values[" << (m_sized ? de::toString(m_numValues) : "") << "];\n"
+		<< "} sb_out0;\n"
+		<< "layout(binding = 1) writeonly buffer Out1 {\n"
+		<< "    uint values[" << (m_sized ? de::toString(m_numValues) : "") << "];\n"
+		<< "} sb_out1;\n"
+		<< "void main (void) {\n"
+		<< "    uvec3 size      = gl_NumWorkGroups * gl_WorkGroupSize;\n"
+		<< "    uint groupNdx   = size.x*size.y*gl_GlobalInvocationID.z + size.x*gl_GlobalInvocationID.y + gl_GlobalInvocationID.x;\n"
+		<< "\n"
+		<< "    {\n"
+		<< "        uint numValuesPerInv = uint(sb_out0.values.length()) / (size.x*size.y*size.z);\n"
+		<< "        uint offset          = numValuesPerInv*groupNdx;\n"
+		<< "\n"
+		<< "        for (uint ndx = 0u; ndx < numValuesPerInv; ndx++)\n"
+		<< "            sb_out0.values[offset + ndx] = offset + ndx;\n"
+		<< "    }\n"
+		<< "    {\n"
+		<< "        uint numValuesPerInv = uint(sb_out1.values.length()) / (size.x*size.y*size.z);\n"
+		<< "        uint offset          = numValuesPerInv*groupNdx;\n"
+		<< "\n"
+		<< "        for (uint ndx = 0u; ndx < numValuesPerInv; ndx++)\n"
+		<< "            sb_out1.values[offset + ndx] = uint(sb_out1.values.length()) - offset - ndx;\n"
+		<< "    }\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* WriteToMultipleSSBOTest::createInstance (Context& context) const
+{
+	return new WriteToMultipleSSBOTestInstance(context, m_numValues, m_localSize, m_workSize);
+}
+
+WriteToMultipleSSBOTestInstance::WriteToMultipleSSBOTestInstance (Context&			context,
+																  const deUint32	numValues,
+																  const tcu::IVec3&	localSize,
+																  const tcu::IVec3&	workSize)
+	: TestInstance	(context)
+	, m_numValues	(numValues)
+	, m_localSize	(localSize)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus WriteToMultipleSSBOTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create two output buffers
+
+	const VkDeviceSize bufferSizeBytes = sizeof(deUint32) * m_numValues;
+	const Buffer buffer0(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+	const Buffer buffer1(vk, device, allocator, makeBufferCreateInfo(bufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 2u)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo buffer0DescriptorInfo = makeDescriptorBufferInfo(*buffer0, 0ull, bufferSizeBytes);
+	const VkDescriptorBufferInfo buffer1DescriptorInfo = makeDescriptorBufferInfo(*buffer1, 0ull, bufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &buffer0DescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &buffer1DescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const VkBufferMemoryBarrier shaderWriteBarriers[] =
+	{
+		makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer0, 0ull, bufferSizeBytes),
+		makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *buffer1, 0ull, bufferSizeBytes)
+	};
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(shaderWriteBarriers), shaderWriteBarriers, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+	{
+		const Allocation& buffer0Allocation = buffer0.getAllocation();
+		invalidateMappedMemoryRange(vk, device, buffer0Allocation.getMemory(), buffer0Allocation.getOffset(), bufferSizeBytes);
+		const deUint32* buffer0Ptr = static_cast<deUint32*>(buffer0Allocation.getHostPtr());
+
+		for (deUint32 ndx = 0; ndx < m_numValues; ++ndx)
+		{
+			const deUint32 res = buffer0Ptr[ndx];
+			const deUint32 ref = ndx;
+
+			if (res != ref)
+			{
+				std::ostringstream msg;
+				msg << "Comparison failed for Out0.values[" << ndx << "] res=" << res << " ref=" << ref;
+				return tcu::TestStatus::fail(msg.str());
+			}
+		}
+	}
+	{
+		const Allocation& buffer1Allocation = buffer1.getAllocation();
+		invalidateMappedMemoryRange(vk, device, buffer1Allocation.getMemory(), buffer1Allocation.getOffset(), bufferSizeBytes);
+		const deUint32* buffer1Ptr = static_cast<deUint32*>(buffer1Allocation.getHostPtr());
+
+		for (deUint32 ndx = 0; ndx < m_numValues; ++ndx)
+		{
+			const deUint32 res = buffer1Ptr[ndx];
+			const deUint32 ref = m_numValues - ndx;
+
+			if (res != ref)
+			{
+				std::ostringstream msg;
+				msg << "Comparison failed for Out1.values[" << ndx << "] res=" << res << " ref=" << ref;
+				return tcu::TestStatus::fail(msg.str());
+			}
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class SSBOBarrierTest : public vkt::TestCase
+{
+public:
+						SSBOBarrierTest		(tcu::TestContext&	testCtx,
+											 const std::string&	name,
+											 const std::string&	description,
+											 const tcu::IVec3&	workSize);
+
+	void				initPrograms		(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance		(Context&			context) const;
+
+private:
+	const tcu::IVec3	m_workSize;
+};
+
+class SSBOBarrierTestInstance : public vkt::TestInstance
+{
+public:
+									SSBOBarrierTestInstance		(Context&			context,
+																 const tcu::IVec3&	workSize);
+
+	tcu::TestStatus					iterate						(void);
+
+private:
+	const tcu::IVec3				m_workSize;
+};
+
+SSBOBarrierTest::SSBOBarrierTest (tcu::TestContext&		testCtx,
+								  const std::string&	name,
+								  const std::string&	description,
+								  const tcu::IVec3&		workSize)
+	: TestCase		(testCtx, name, description)
+	, m_workSize	(workSize)
+{
+}
+
+void SSBOBarrierTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	sourceCollections.glslSources.add("comp0") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout (local_size_x = 1) in;\n"
+		"layout(binding = 2) readonly uniform Constants {\n"
+		"    uint u_baseVal;\n"
+		"};\n"
+		"layout(binding = 1) writeonly buffer Output {\n"
+		"    uint values[];\n"
+		"};\n"
+		"void main (void) {\n"
+		"    uint offset = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		"    values[offset] = u_baseVal + offset;\n"
+		"}\n");
+
+	sourceCollections.glslSources.add("comp1") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout (local_size_x = 1) in;\n"
+		"layout(binding = 1) readonly buffer Input {\n"
+		"    uint values[];\n"
+		"};\n"
+		"layout(binding = 0) coherent buffer Output {\n"
+		"    uint sum;\n"
+		"};\n"
+		"void main (void) {\n"
+		"    uint offset = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		"    uint value  = values[offset];\n"
+		"    atomicAdd(sum, value);\n"
+		"}\n");
+}
+
+TestInstance* SSBOBarrierTest::createInstance (Context& context) const
+{
+	return new SSBOBarrierTestInstance(context, m_workSize);
+}
+
+SSBOBarrierTestInstance::SSBOBarrierTestInstance (Context& context, const tcu::IVec3& workSize)
+	: TestInstance	(context)
+	, m_workSize	(workSize)
+{
+}
+
+tcu::TestStatus SSBOBarrierTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create a work buffer used by both shaders
+
+	const int workGroupCount = multiplyComponents(m_workSize);
+	const VkDeviceSize workBufferSizeBytes = sizeof(deUint32) * workGroupCount;
+	const Buffer workBuffer(vk, device, allocator, makeBufferCreateInfo(workBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::Any);
+
+	// Create an output buffer
+
+	const VkDeviceSize outputBufferSizeBytes = sizeof(deUint32);
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(outputBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Initialize atomic counter value to zero
+	{
+		const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+		deUint32* outputBufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+		*outputBufferPtr = 0;
+		flushMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), outputBufferSizeBytes);
+	}
+
+	// Create a uniform buffer (to pass uniform constants)
+
+	const VkDeviceSize uniformBufferSizeBytes = sizeof(deUint32);
+	const Buffer uniformBuffer(vk, device, allocator, makeBufferCreateInfo(uniformBufferSizeBytes, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Set the constants in the uniform buffer
+
+	const deUint32	baseValue = 127;
+	{
+		const Allocation& uniformBufferAllocation = uniformBuffer.getAllocation();
+		deUint32* uniformBufferPtr = static_cast<deUint32*>(uniformBufferAllocation.getHostPtr());
+		uniformBufferPtr[0] = baseValue;
+
+		flushMappedMemoryRange(vk, device, uniformBufferAllocation.getMemory(), uniformBufferAllocation.getOffset(), uniformBufferSizeBytes);
+	}
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 2u)
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo workBufferDescriptorInfo = makeDescriptorBufferInfo(*workBuffer, 0ull, workBufferSizeBytes);
+	const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(*outputBuffer, 0ull, outputBufferSizeBytes);
+	const VkDescriptorBufferInfo uniformBufferDescriptorInfo = makeDescriptorBufferInfo(*uniformBuffer, 0ull, uniformBufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &workBufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &uniformBufferDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule> shaderModule0(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp0"), 0));
+	const Unique<VkShaderModule> shaderModule1(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp1"), 0));
+
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline0(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule0));
+	const Unique<VkPipeline> pipeline1(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule1));
+
+	const VkBufferMemoryBarrier writeUniformConstantsBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT, *uniformBuffer, 0ull, uniformBufferSizeBytes);
+
+	const VkBufferMemoryBarrier betweenShadersBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, *workBuffer, 0ull, workBufferSizeBytes);
+
+	const VkBufferMemoryBarrier afterComputeBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, outputBufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline0);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &writeUniformConstantsBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &betweenShadersBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	// Switch to the second shader program
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline1);
+
+	vk.cmdDispatch(*cmdBuffer, m_workSize.x(), m_workSize.y(), m_workSize.z());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &afterComputeBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), outputBufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+	const deUint32	res = *bufferPtr;
+	deUint32		ref = 0;
+
+	for (int ndx = 0; ndx < workGroupCount; ++ndx)
+		ref += baseValue + ndx;
+
+	if (res != ref)
+	{
+		std::ostringstream msg;
+		msg << "ERROR: comparison failed, expected " << ref << ", got " << res;
+		return tcu::TestStatus::fail(msg.str());
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class ImageAtomicOpTest : public vkt::TestCase
+{
+public:
+						ImageAtomicOpTest		(tcu::TestContext&	testCtx,
+												 const std::string& name,
+												 const std::string& description,
+												 const deUint32		localSize,
+												 const tcu::IVec2&	imageSize);
+
+	void				initPrograms			(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance			(Context&			context) const;
+
+private:
+	const deUint32		m_localSize;
+	const tcu::IVec2	m_imageSize;
+};
+
+class ImageAtomicOpTestInstance : public vkt::TestInstance
+{
+public:
+									ImageAtomicOpTestInstance		(Context&			context,
+																	 const deUint32		localSize,
+																	 const tcu::IVec2&	imageSize);
+
+	tcu::TestStatus					iterate							(void);
+
+private:
+	const deUint32					m_localSize;
+	const tcu::IVec2				m_imageSize;
+};
+
+ImageAtomicOpTest::ImageAtomicOpTest (tcu::TestContext&		testCtx,
+									  const std::string&	name,
+									  const std::string&	description,
+									  const deUint32		localSize,
+									  const tcu::IVec2&		imageSize)
+	: TestCase		(testCtx, name, description)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+}
+
+void ImageAtomicOpTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream src;
+	src << "#version 310 es\n"
+		<< "#extension GL_OES_shader_image_atomic : require\n"
+		<< "layout (local_size_x = " << m_localSize << ") in;\n"
+		<< "layout(binding = 1, r32ui) coherent uniform highp uimage2D u_dstImg;\n"
+		<< "layout(binding = 0) readonly buffer Input {\n"
+		<< "    uint values[" << (multiplyComponents(m_imageSize) * m_localSize) << "];\n"
+		<< "} sb_in;\n\n"
+		<< "void main (void) {\n"
+		<< "    uint stride = gl_NumWorkGroups.x*gl_WorkGroupSize.x;\n"
+		<< "    uint value  = sb_in.values[gl_GlobalInvocationID.y*stride + gl_GlobalInvocationID.x];\n"
+		<< "\n"
+		<< "    if (gl_LocalInvocationIndex == 0u)\n"
+		<< "        imageStore(u_dstImg, ivec2(gl_WorkGroupID.xy), uvec4(0));\n"
+		<< "    memoryBarrierImage();\n"
+		<< "    barrier();\n"
+		<< "    imageAtomicAdd(u_dstImg, ivec2(gl_WorkGroupID.xy), value);\n"
+		<< "}\n";
+
+	sourceCollections.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+TestInstance* ImageAtomicOpTest::createInstance (Context& context) const
+{
+	return new ImageAtomicOpTestInstance(context, m_localSize, m_imageSize);
+}
+
+ImageAtomicOpTestInstance::ImageAtomicOpTestInstance (Context& context, const deUint32 localSize, const tcu::IVec2& imageSize)
+	: TestInstance	(context)
+	, m_localSize	(localSize)
+	, m_imageSize	(imageSize)
+{
+}
+
+tcu::TestStatus ImageAtomicOpTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create an image
+
+	const VkImageCreateInfo imageParams = make2DImageCreateInfo(m_imageSize, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_STORAGE_BIT);
+	const Image image(vk, device, allocator, imageParams, MemoryRequirement::Any);
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
+	const Unique<VkImageView> imageView(makeImageView(vk, device, *image, VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_R32_UINT, subresourceRange));
+
+	// Input buffer
+
+	const deUint32 numInputValues = multiplyComponents(m_imageSize) * m_localSize;
+	const VkDeviceSize inputBufferSizeBytes = sizeof(deUint32) * numInputValues;
+
+	const Buffer inputBuffer(vk, device, allocator, makeBufferCreateInfo(inputBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible);
+
+	// Populate the input buffer with test data
+	{
+		de::Random rnd(0x77238ac2);
+		const Allocation& inputBufferAllocation = inputBuffer.getAllocation();
+		deUint32* bufferPtr = static_cast<deUint32*>(inputBufferAllocation.getHostPtr());
+		for (deUint32 i = 0; i < numInputValues; ++i)
+			*bufferPtr++ = rnd.getUint32();
+
+		flushMappedMemoryRange(vk, device, inputBufferAllocation.getMemory(), inputBufferAllocation.getOffset(), inputBufferSizeBytes);
+	}
+
+	// Create a buffer to store shader output (copied from image data)
+
+	const deUint32 imageArea = multiplyComponents(m_imageSize);
+	const VkDeviceSize outputBufferSizeBytes = sizeof(deUint32) * imageArea;
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(outputBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	// Set the bindings
+
+	const VkDescriptorImageInfo imageDescriptorInfo = makeDescriptorImageInfo(DE_NULL, *imageView, VK_IMAGE_LAYOUT_GENERAL);
+	const VkDescriptorBufferInfo bufferDescriptorInfo = makeDescriptorBufferInfo(*inputBuffer, 0ull, inputBufferSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+	{
+		const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0u));
+		const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+		const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+		const VkBufferMemoryBarrier inputBufferPostHostWriteBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, *inputBuffer, 0ull, inputBufferSizeBytes);
+
+		const VkImageMemoryBarrier imagePreCopyBarrier = makeImageMemoryBarrier(
+			VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
+			VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+			*image, subresourceRange);
+
+		const VkBufferMemoryBarrier outputBufferPostCopyBarrier = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, outputBufferSizeBytes);
+
+		const VkBufferImageCopy copyParams = makeBufferImageCopy(m_imageSize);
+
+		// Prepare the command buffer
+
+		const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+		const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+		// Start recording commands
+
+		beginCommandBuffer(vk, *cmdBuffer);
+
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &inputBufferPostHostWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+		vk.cmdDispatch(*cmdBuffer, m_imageSize.x(), m_imageSize.y(), 1u);
+
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imagePreCopyBarrier);
+		vk.cmdCopyImageToBuffer(*cmdBuffer, *image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *outputBuffer, 1u, &copyParams);
+		vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &outputBufferPostCopyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+		endCommandBuffer(vk, *cmdBuffer);
+
+		// Wait for completion
+
+		submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+	}
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), outputBufferSizeBytes);
+
+	const deUint32* bufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+	const deUint32* refBufferPtr = static_cast<deUint32*>(inputBuffer.getAllocation().getHostPtr());
+
+	for (deUint32 pixelNdx = 0; pixelNdx < imageArea; ++pixelNdx)
+	{
+		const deUint32	res = bufferPtr[pixelNdx];
+		deUint32		ref = 0;
+
+		for (deUint32 offs = 0; offs < m_localSize; ++offs)
+			ref += refBufferPtr[pixelNdx * m_localSize + offs];
+
+		if (res != ref)
+		{
+			std::ostringstream msg;
+			msg << "Comparison failed for pixel " << pixelNdx;
+			return tcu::TestStatus::fail(msg.str());
+		}
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+class ImageBarrierTest : public vkt::TestCase
+{
+public:
+						ImageBarrierTest	(tcu::TestContext&	testCtx,
+											const std::string&	name,
+											const std::string&	description,
+											const tcu::IVec2&	imageSize);
+
+	void				initPrograms		(SourceCollections& sourceCollections) const;
+	TestInstance*		createInstance		(Context&			context) const;
+
+private:
+	const tcu::IVec2	m_imageSize;
+};
+
+class ImageBarrierTestInstance : public vkt::TestInstance
+{
+public:
+									ImageBarrierTestInstance	(Context&			context,
+																 const tcu::IVec2&	imageSize);
+
+	tcu::TestStatus					iterate						(void);
+
+private:
+	const tcu::IVec2				m_imageSize;
+};
+
+ImageBarrierTest::ImageBarrierTest (tcu::TestContext&	testCtx,
+									const std::string&	name,
+									const std::string&	description,
+									const tcu::IVec2&	imageSize)
+	: TestCase		(testCtx, name, description)
+	, m_imageSize	(imageSize)
+{
+}
+
+void ImageBarrierTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	sourceCollections.glslSources.add("comp0") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout (local_size_x = 1) in;\n"
+		"layout(binding = 2) readonly uniform Constants {\n"
+		"    uint u_baseVal;\n"
+		"};\n"
+		"layout(binding = 1, r32ui) writeonly uniform highp uimage2D u_img;\n"
+		"void main (void) {\n"
+		"    uint offset = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n"
+		"    imageStore(u_img, ivec2(gl_WorkGroupID.xy), uvec4(offset + u_baseVal, 0, 0, 0));\n"
+		"}\n");
+
+	sourceCollections.glslSources.add("comp1") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout (local_size_x = 1) in;\n"
+		"layout(binding = 1, r32ui) readonly uniform highp uimage2D u_img;\n"
+		"layout(binding = 0) coherent buffer Output {\n"
+		"    uint sum;\n"
+		"};\n"
+		"void main (void) {\n"
+		"    uint value = imageLoad(u_img, ivec2(gl_WorkGroupID.xy)).x;\n"
+		"    atomicAdd(sum, value);\n"
+		"}\n");
+}
+
+TestInstance* ImageBarrierTest::createInstance (Context& context) const
+{
+	return new ImageBarrierTestInstance(context, m_imageSize);
+}
+
+ImageBarrierTestInstance::ImageBarrierTestInstance (Context& context, const tcu::IVec2& imageSize)
+	: TestInstance	(context)
+	, m_imageSize	(imageSize)
+{
+}
+
+tcu::TestStatus ImageBarrierTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	// Create an image used by both shaders
+
+	const VkImageCreateInfo imageParams = make2DImageCreateInfo(m_imageSize, VK_IMAGE_USAGE_STORAGE_BIT);
+	const Image image(vk, device, allocator, imageParams, MemoryRequirement::Any);
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
+	const Unique<VkImageView> imageView(makeImageView(vk, device, *image, VK_IMAGE_VIEW_TYPE_2D, VK_FORMAT_R32_UINT, subresourceRange));
+
+	// Create an output buffer
+
+	const VkDeviceSize outputBufferSizeBytes = sizeof(deUint32);
+	const Buffer outputBuffer(vk, device, allocator, makeBufferCreateInfo(outputBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Initialize atomic counter value to zero
+	{
+		const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+		deUint32* outputBufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+		*outputBufferPtr = 0;
+		flushMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), outputBufferSizeBytes);
+	}
+
+	// Create a uniform buffer (to pass uniform constants)
+
+	const VkDeviceSize uniformBufferSizeBytes = sizeof(deUint32);
+	const Buffer uniformBuffer(vk, device, allocator, makeBufferCreateInfo(uniformBufferSizeBytes, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	// Set the constants in the uniform buffer
+
+	const deUint32	baseValue = 127;
+	{
+		const Allocation& uniformBufferAllocation = uniformBuffer.getAllocation();
+		deUint32* uniformBufferPtr = static_cast<deUint32*>(uniformBufferAllocation.getHostPtr());
+		uniformBufferPtr[0] = baseValue;
+
+		flushMappedMemoryRange(vk, device, uniformBufferAllocation.getMemory(), uniformBufferAllocation.getOffset(), uniformBufferSizeBytes);
+	}
+
+	// Create descriptor set
+
+	const Unique<VkDescriptorSetLayout> descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorImageInfo imageDescriptorInfo = makeDescriptorImageInfo(DE_NULL, *imageView, VK_IMAGE_LAYOUT_GENERAL);
+	const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(*outputBuffer, 0ull, outputBufferSizeBytes);
+	const VkDescriptorBufferInfo uniformBufferDescriptorInfo = makeDescriptorBufferInfo(*uniformBuffer, 0ull, uniformBufferSizeBytes);
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &uniformBufferDescriptorInfo)
+		.update(vk, device);
+
+	// Perform the computation
+
+	const Unique<VkShaderModule>	shaderModule0(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp0"), 0));
+	const Unique<VkShaderModule>	shaderModule1(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp1"), 0));
+
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline0(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule0));
+	const Unique<VkPipeline> pipeline1(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule1));
+
+	const VkBufferMemoryBarrier writeUniformConstantsBarrier = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_UNIFORM_READ_BIT, *uniformBuffer, 0ull, uniformBufferSizeBytes);
+
+	const VkImageMemoryBarrier imageLayoutBarrier = makeImageMemoryBarrier(
+		0u, 0u,
+		VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
+		*image, subresourceRange);
+
+	const VkImageMemoryBarrier imageBarrierBetweenShaders = makeImageMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
+		VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+		*image, subresourceRange);
+
+	const VkBufferMemoryBarrier afterComputeBarrier = makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *outputBuffer, 0ull, outputBufferSizeBytes);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline0);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &writeUniformConstantsBarrier, 1, &imageLayoutBarrier);
+
+	vk.cmdDispatch(*cmdBuffer, m_imageSize.x(), m_imageSize.y(), 1u);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrierBetweenShaders);
+
+	// Switch to the second shader program
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline1);
+
+	vk.cmdDispatch(*cmdBuffer, m_imageSize.x(), m_imageSize.y(), 1u);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &afterComputeBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	// Wait for completion
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Validate the results
+
+	const Allocation& outputBufferAllocation = outputBuffer.getAllocation();
+	invalidateMappedMemoryRange(vk, device, outputBufferAllocation.getMemory(), outputBufferAllocation.getOffset(), outputBufferSizeBytes);
+
+	const int		numValues = multiplyComponents(m_imageSize);
+	const deUint32* bufferPtr = static_cast<deUint32*>(outputBufferAllocation.getHostPtr());
+	const deUint32	res = *bufferPtr;
+	deUint32		ref = 0;
+
+	for (int ndx = 0; ndx < numValues; ++ndx)
+		ref += baseValue + ndx;
+
+	if (res != ref)
+	{
+		std::ostringstream msg;
+		msg << "ERROR: comparison failed, expected " << ref << ", got " << res;
+		return tcu::TestStatus::fail(msg.str());
+	}
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+namespace EmptyShaderTest
+{
+
+void createProgram (SourceCollections& dst)
+{
+	dst.glslSources.add("comp") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout (local_size_x = 1) in;\n"
+		"void main (void) {}\n"
+	);
+}
+
+tcu::TestStatus createTest (Context& context)
+{
+	const DeviceInterface&	vk					= context.getDeviceInterface();
+	const VkDevice			device				= context.getDevice();
+	const VkQueue			queue				= context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get("comp"), 0u));
+
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	// Start recording commands
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+
+	const tcu::IVec3 workGroups(1, 1, 1);
+	vk.cmdDispatch(*cmdBuffer, workGroups.x(), workGroups.y(), workGroups.z());
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	return tcu::TestStatus::pass("Compute succeeded");
+}
+
+} // EmptyShaderTest ns
+} // anonymous
+
+tcu::TestCaseGroup* createBasicComputeShaderTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> basicComputeTests(new tcu::TestCaseGroup(testCtx, "basic", "Basic compute tests"));
+
+	addFunctionCaseWithPrograms(basicComputeTests.get(), "empty_shader", "Shader that does nothing", EmptyShaderTest::createProgram, EmptyShaderTest::createTest);
+
+	basicComputeTests->addChild(BufferToBufferInvertTest::UBOToSSBOInvertCase(testCtx,	"ubo_to_ssbo_single_invocation",	"Copy from UBO to SSBO, inverting bits",	256,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(BufferToBufferInvertTest::UBOToSSBOInvertCase(testCtx,	"ubo_to_ssbo_single_group",			"Copy from UBO to SSBO, inverting bits",	1024,	tcu::IVec3(2,1,4),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(BufferToBufferInvertTest::UBOToSSBOInvertCase(testCtx,	"ubo_to_ssbo_multiple_invocations",	"Copy from UBO to SSBO, inverting bits",	1024,	tcu::IVec3(1,1,1),	tcu::IVec3(2,4,1)));
+	basicComputeTests->addChild(BufferToBufferInvertTest::UBOToSSBOInvertCase(testCtx,	"ubo_to_ssbo_multiple_groups",		"Copy from UBO to SSBO, inverting bits",	1024,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+
+	basicComputeTests->addChild(BufferToBufferInvertTest::CopyInvertSSBOCase(testCtx,	"copy_ssbo_single_invocation",		"Copy between SSBOs, inverting bits",	256,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(BufferToBufferInvertTest::CopyInvertSSBOCase(testCtx,	"copy_ssbo_multiple_invocations",	"Copy between SSBOs, inverting bits",	1024,	tcu::IVec3(1,1,1),	tcu::IVec3(2,4,1)));
+	basicComputeTests->addChild(BufferToBufferInvertTest::CopyInvertSSBOCase(testCtx,	"copy_ssbo_multiple_groups",		"Copy between SSBOs, inverting bits",	1024,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+
+	basicComputeTests->addChild(new InvertSSBOInPlaceTest(testCtx,	"ssbo_rw_single_invocation",			"Read and write same SSBO",		256,	true,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new InvertSSBOInPlaceTest(testCtx,	"ssbo_rw_multiple_groups",				"Read and write same SSBO",		1024,	true,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+	basicComputeTests->addChild(new InvertSSBOInPlaceTest(testCtx,	"ssbo_unsized_arr_single_invocation",	"Read and write same SSBO",		256,	false,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new InvertSSBOInPlaceTest(testCtx,	"ssbo_unsized_arr_multiple_groups",		"Read and write same SSBO",		1024,	false,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+
+	basicComputeTests->addChild(new WriteToMultipleSSBOTest(testCtx,	"write_multiple_arr_single_invocation",			"Write to multiple SSBOs",	256,	true,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new WriteToMultipleSSBOTest(testCtx,	"write_multiple_arr_multiple_groups",			"Write to multiple SSBOs",	1024,	true,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+	basicComputeTests->addChild(new WriteToMultipleSSBOTest(testCtx,	"write_multiple_unsized_arr_single_invocation",	"Write to multiple SSBOs",	256,	false,	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new WriteToMultipleSSBOTest(testCtx,	"write_multiple_unsized_arr_multiple_groups",	"Write to multiple SSBOs",	1024,	false,	tcu::IVec3(1,4,2),	tcu::IVec3(2,2,4)));
+
+	basicComputeTests->addChild(new SSBOLocalBarrierTest(testCtx,	"ssbo_local_barrier_single_invocation",	"SSBO local barrier usage",	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SSBOLocalBarrierTest(testCtx,	"ssbo_local_barrier_single_group",		"SSBO local barrier usage",	tcu::IVec3(3,2,5),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SSBOLocalBarrierTest(testCtx,	"ssbo_local_barrier_multiple_groups",	"SSBO local barrier usage",	tcu::IVec3(3,4,1),	tcu::IVec3(2,7,3)));
+
+	basicComputeTests->addChild(new SSBOBarrierTest(testCtx,	"ssbo_cmd_barrier_single",		"SSBO memory barrier usage",	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SSBOBarrierTest(testCtx,	"ssbo_cmd_barrier_multiple",	"SSBO memory barrier usage",	tcu::IVec3(11,5,7)));
+
+	basicComputeTests->addChild(new SharedVarTest(testCtx,	"shared_var_single_invocation",		"Basic shared variable usage",	tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SharedVarTest(testCtx,	"shared_var_single_group",			"Basic shared variable usage",	tcu::IVec3(3,2,5),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SharedVarTest(testCtx,	"shared_var_multiple_invocations",	"Basic shared variable usage",	tcu::IVec3(1,1,1),	tcu::IVec3(2,5,4)));
+	basicComputeTests->addChild(new SharedVarTest(testCtx,	"shared_var_multiple_groups",		"Basic shared variable usage",	tcu::IVec3(3,4,1),	tcu::IVec3(2,7,3)));
+
+	basicComputeTests->addChild(new SharedVarAtomicOpTest(testCtx,	"shared_atomic_op_single_invocation",		"Atomic operation with shared var",		tcu::IVec3(1,1,1),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SharedVarAtomicOpTest(testCtx,	"shared_atomic_op_single_group",			"Atomic operation with shared var",		tcu::IVec3(3,2,5),	tcu::IVec3(1,1,1)));
+	basicComputeTests->addChild(new SharedVarAtomicOpTest(testCtx,	"shared_atomic_op_multiple_invocations",	"Atomic operation with shared var",		tcu::IVec3(1,1,1),	tcu::IVec3(2,5,4)));
+	basicComputeTests->addChild(new SharedVarAtomicOpTest(testCtx,	"shared_atomic_op_multiple_groups",			"Atomic operation with shared var",		tcu::IVec3(3,4,1),	tcu::IVec3(2,7,3)));
+
+	basicComputeTests->addChild(new CopyImageToSSBOTest(testCtx,	"copy_image_to_ssbo_small",	"Image to SSBO copy",	tcu::IVec2(1,1),	tcu::IVec2(64,64)));
+	basicComputeTests->addChild(new CopyImageToSSBOTest(testCtx,	"copy_image_to_ssbo_large",	"Image to SSBO copy",	tcu::IVec2(2,4),	tcu::IVec2(512,512)));
+
+	basicComputeTests->addChild(new CopySSBOToImageTest(testCtx,	"copy_ssbo_to_image_small",	"SSBO to image copy",	tcu::IVec2(1, 1),	tcu::IVec2(64, 64)));
+	basicComputeTests->addChild(new CopySSBOToImageTest(testCtx,	"copy_ssbo_to_image_large",	"SSBO to image copy",	tcu::IVec2(2, 4),	tcu::IVec2(512, 512)));
+
+	basicComputeTests->addChild(new ImageAtomicOpTest(testCtx,	"image_atomic_op_local_size_1",	"Atomic operation with image",	1,	tcu::IVec2(64,64)));
+	basicComputeTests->addChild(new ImageAtomicOpTest(testCtx,	"image_atomic_op_local_size_8",	"Atomic operation with image",	8,	tcu::IVec2(64,64)));
+
+	basicComputeTests->addChild(new ImageBarrierTest(testCtx,	"image_barrier_single",		"Image barrier",	tcu::IVec2(1,1)));
+	basicComputeTests->addChild(new ImageBarrierTest(testCtx,	"image_barrier_multiple",	"Image barrier",	tcu::IVec2(64,64)));
+
+	return basicComputeTests.release();
+}
+
+} // compute
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.hpp b/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.hpp
new file mode 100644
index 0000000..3449908
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeBasicComputeShaderTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTCOMPUTEBASICCOMPUTESHADERTESTS_HPP
+#define _VKTCOMPUTEBASICCOMPUTESHADERTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+
+tcu::TestCaseGroup* createBasicComputeShaderTests (tcu::TestContext& testCtx);
+
+} // compute
+} // vkt
+
+#endif // _VKTCOMPUTEBASICCOMPUTESHADERTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.cpp b/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.cpp
new file mode 100644
index 0000000..15b361a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.cpp
@@ -0,0 +1,694 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Indirect Compute Dispatch tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktComputeIndirectComputeDispatchTests.hpp"
+#include "vktComputeTestsUtil.hpp"
+
+#include <string>
+#include <map>
+#include <vector>
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkQueryUtil.hpp"
+
+#include "tcuVector.hpp"
+#include "tcuVectorUtil.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuRGBA.hpp"
+#include "tcuStringTemplate.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+#include "deStringUtil.hpp"
+#include "deArrayUtil.hpp"
+
+#include "gluShaderUtil.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+namespace
+{
+
+enum
+{
+	RESULT_BLOCK_BASE_SIZE			= 4 * (int)sizeof(deUint32), // uvec3 + uint
+	RESULT_BLOCK_NUM_PASSED_OFFSET	= 3 * (int)sizeof(deUint32),
+	INDIRECT_COMMAND_OFFSET			= 3 * (int)sizeof(deUint32),
+};
+
+vk::VkDeviceSize getResultBlockAlignedSize (const vk::InstanceInterface&	instance_interface,
+											const vk::VkPhysicalDevice		physicalDevice,
+											const vk::VkDeviceSize			baseSize)
+{
+	// TODO getPhysicalDeviceProperties() was added to vkQueryUtil in 41-image-load-store-tests. Use it once it's merged.
+	vk::VkPhysicalDeviceProperties deviceProperties;
+	instance_interface.getPhysicalDeviceProperties(physicalDevice, &deviceProperties);
+	vk::VkDeviceSize alignment = deviceProperties.limits.minStorageBufferOffsetAlignment;
+
+	if (alignment == 0 || (baseSize % alignment == 0))
+		return baseSize;
+	else
+		return (baseSize / alignment + 1)*alignment;
+}
+
+struct DispatchCommand
+{
+				DispatchCommand (const deIntptr		offset,
+								 const tcu::UVec3&	numWorkGroups)
+					: m_offset			(offset)
+					, m_numWorkGroups	(numWorkGroups) {}
+
+	deIntptr	m_offset;
+	tcu::UVec3	m_numWorkGroups;
+};
+
+typedef std::vector<DispatchCommand> DispatchCommandsVec;
+
+struct DispatchCaseDesc
+{
+								DispatchCaseDesc (const char*					name,
+												  const char*					description,
+												  const deUintptr				bufferSize,
+												  const tcu::UVec3				workGroupSize,
+												  const DispatchCommandsVec&	dispatchCommands)
+									: m_name				(name)
+									, m_description			(description)
+									, m_bufferSize			(bufferSize)
+									, m_workGroupSize		(workGroupSize)
+									, m_dispatchCommands	(dispatchCommands) {}
+
+	const char*					m_name;
+	const char*					m_description;
+	const deUintptr				m_bufferSize;
+	const tcu::UVec3			m_workGroupSize;
+	const DispatchCommandsVec	m_dispatchCommands;
+};
+
+class IndirectDispatchInstanceBufferUpload : public vkt::TestInstance
+{
+public:
+									IndirectDispatchInstanceBufferUpload	(Context&					context,
+																			 const std::string&			name,
+																			 const deUintptr			bufferSize,
+																			 const tcu::UVec3&			workGroupSize,
+																			 const DispatchCommandsVec& dispatchCommands);
+
+	virtual							~IndirectDispatchInstanceBufferUpload	(void) {}
+
+	virtual tcu::TestStatus			iterate									(void);
+
+protected:
+	virtual void					fillIndirectBufferData					(const vk::VkCommandBuffer	commandBuffer,
+																			 const Buffer&				indirectBuffer);
+
+	deBool							verifyResultBuffer						(const Buffer&				resultBuffer,
+																			 const vk::VkDeviceSize		resultBlockSize,
+																			 const vk::VkDeviceSize		resultBufferSize) const;
+
+	Context&						m_context;
+	const std::string				m_name;
+
+	const vk::DeviceInterface&		m_device_interface;
+	const vk::VkDevice				m_device;
+
+	const vk::VkQueue				m_queue;
+	const deUint32					m_queueFamilyIndex;
+
+	const deUintptr					m_bufferSize;
+	const tcu::UVec3				m_workGroupSize;
+	const DispatchCommandsVec		m_dispatchCommands;
+
+	vk::Allocator&					m_allocator;
+
+private:
+	IndirectDispatchInstanceBufferUpload (const vkt::TestInstance&);
+	IndirectDispatchInstanceBufferUpload& operator= (const vkt::TestInstance&);
+};
+
+IndirectDispatchInstanceBufferUpload::IndirectDispatchInstanceBufferUpload (Context&					context,
+																			const std::string&			name,
+																			const deUintptr				bufferSize,
+																			const tcu::UVec3&			workGroupSize,
+																			const DispatchCommandsVec&	dispatchCommands)
+	: vkt::TestInstance		(context)
+	, m_context				(context)
+	, m_name				(name)
+	, m_device_interface	(context.getDeviceInterface())
+	, m_device				(context.getDevice())
+	, m_queue				(context.getUniversalQueue())
+	, m_queueFamilyIndex	(context.getUniversalQueueFamilyIndex())
+	, m_bufferSize			(bufferSize)
+	, m_workGroupSize		(workGroupSize)
+	, m_dispatchCommands	(dispatchCommands)
+	, m_allocator			(context.getDefaultAllocator())
+{
+}
+
+void IndirectDispatchInstanceBufferUpload::fillIndirectBufferData (const vk::VkCommandBuffer commandBuffer, const Buffer& indirectBuffer)
+{
+	DE_UNREF(commandBuffer);
+
+	const vk::Allocation& alloc = indirectBuffer.getAllocation();
+	deUint8* indirectDataPtr = reinterpret_cast<deUint8*>(alloc.getHostPtr());
+
+	for (DispatchCommandsVec::const_iterator cmdIter = m_dispatchCommands.begin(); cmdIter != m_dispatchCommands.end(); ++cmdIter)
+	{
+		DE_ASSERT(cmdIter->m_offset >= 0);
+		DE_ASSERT(cmdIter->m_offset % sizeof(deUint32) == 0);
+		DE_ASSERT(cmdIter->m_offset + INDIRECT_COMMAND_OFFSET <= (deIntptr)m_bufferSize);
+
+		deUint32* const dstPtr = (deUint32*)&indirectDataPtr[cmdIter->m_offset];
+
+		dstPtr[0] = cmdIter->m_numWorkGroups[0];
+		dstPtr[1] = cmdIter->m_numWorkGroups[1];
+		dstPtr[2] = cmdIter->m_numWorkGroups[2];
+	}
+
+	vk::flushMappedMemoryRange(m_device_interface, m_device, alloc.getMemory(), alloc.getOffset(), m_bufferSize);
+}
+
+tcu::TestStatus IndirectDispatchInstanceBufferUpload::iterate (void)
+{
+	tcu::TestContext& testCtx = m_context.getTestContext();
+
+	testCtx.getLog() << tcu::TestLog::Message << "GL_DISPATCH_INDIRECT_BUFFER size = " << m_bufferSize << tcu::TestLog::EndMessage;
+	{
+		tcu::ScopedLogSection section(testCtx.getLog(), "Commands", "Indirect Dispatch Commands (" + de::toString(m_dispatchCommands.size()) + " in total)");
+
+		for (deUint32 cmdNdx = 0; cmdNdx < m_dispatchCommands.size(); ++cmdNdx)
+		{
+			testCtx.getLog()
+				<< tcu::TestLog::Message
+				<< cmdNdx << ": " << "offset = " << m_dispatchCommands[cmdNdx].m_offset << ", numWorkGroups = " << m_dispatchCommands[cmdNdx].m_numWorkGroups
+				<< tcu::TestLog::EndMessage;
+		}
+	}
+
+	// Create result buffer
+	const vk::VkDeviceSize resultBlockSize = getResultBlockAlignedSize(m_context.getInstanceInterface(), m_context.getPhysicalDevice(), RESULT_BLOCK_BASE_SIZE);
+	const vk::VkDeviceSize resultBufferSize = resultBlockSize * (deUint32)m_dispatchCommands.size();
+
+	Buffer resultBuffer(
+		m_device_interface, m_device, m_allocator,
+		makeBufferCreateInfo(resultBufferSize, vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
+		vk::MemoryRequirement::HostVisible);
+
+	{
+		const vk::Allocation& alloc = resultBuffer.getAllocation();
+		deUint8* resultDataPtr = reinterpret_cast<deUint8*>(alloc.getHostPtr());
+
+		for (deUint32 cmdNdx = 0; cmdNdx < m_dispatchCommands.size(); ++cmdNdx)
+		{
+			deUint8* const	dstPtr = &resultDataPtr[resultBlockSize*cmdNdx];
+
+			*(deUint32*)(dstPtr + 0 * sizeof(deUint32)) = m_dispatchCommands[cmdNdx].m_numWorkGroups[0];
+			*(deUint32*)(dstPtr + 1 * sizeof(deUint32)) = m_dispatchCommands[cmdNdx].m_numWorkGroups[1];
+			*(deUint32*)(dstPtr + 2 * sizeof(deUint32)) = m_dispatchCommands[cmdNdx].m_numWorkGroups[2];
+			*(deUint32*)(dstPtr + RESULT_BLOCK_NUM_PASSED_OFFSET) = 0;
+		}
+
+		vk::flushMappedMemoryRange(m_device_interface, m_device, alloc.getMemory(), alloc.getOffset(), resultBufferSize);
+	}
+
+	// Create verify compute shader
+	const vk::Unique<vk::VkShaderModule> verifyShader(createShaderModule(
+		m_device_interface, m_device, m_context.getBinaryCollection().get("indirect_dispatch_" + m_name + "_verify"), 0u));
+
+	// Create descriptorSetLayout
+	vk::DescriptorSetLayoutBuilder layoutBuilder;
+	layoutBuilder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+	vk::Unique<vk::VkDescriptorSetLayout> descriptorSetLayout(layoutBuilder.build(m_device_interface, m_device));
+
+	// Create compute pipeline
+	const vk::Unique<vk::VkPipelineLayout> pipelineLayout(makePipelineLayout(m_device_interface, m_device, *descriptorSetLayout));
+	const vk::Unique<vk::VkPipeline> computePipeline(makeComputePipeline(m_device_interface, m_device, *pipelineLayout, *verifyShader));
+
+	// Create descriptor pool
+	const vk::Unique<vk::VkDescriptorPool> descriptorPool(
+		vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, (deUint32)m_dispatchCommands.size())
+		.build(m_device_interface, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, static_cast<deUint32>(m_dispatchCommands.size())));
+
+	const vk::VkBufferMemoryBarrier ssboPostBarrier = makeBufferMemoryBarrier(
+		vk::VK_ACCESS_SHADER_WRITE_BIT, vk::VK_ACCESS_HOST_READ_BIT, *resultBuffer, 0ull, resultBufferSize);
+
+	// Create command buffer
+	const vk::Unique<vk::VkCommandPool> cmdPool(makeCommandPool(m_device_interface, m_device, m_queueFamilyIndex));
+	const vk::Unique<vk::VkCommandBuffer> cmdBuffer(makeCommandBuffer(m_device_interface, m_device, *cmdPool));
+
+	// Begin recording commands
+	beginCommandBuffer(m_device_interface, *cmdBuffer);
+
+	// Create indirect buffer
+	Buffer indirectBuffer(
+		m_device_interface, m_device, m_allocator,
+		makeBufferCreateInfo(m_bufferSize, vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT | vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
+		vk::MemoryRequirement::HostVisible);
+	fillIndirectBufferData(*cmdBuffer, indirectBuffer);
+
+	// Bind compute pipeline
+	m_device_interface.cmdBindPipeline(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *computePipeline);
+
+	// Allocate descriptor sets
+	DynArray< vk::Move<vk::VkDescriptorSet> > descriptorSets(m_dispatchCommands.size());
+
+	vk::VkDeviceSize curOffset = 0;
+
+	// Create descriptor sets
+	for (deUint32 cmdNdx = 0; cmdNdx < m_dispatchCommands.size(); ++cmdNdx)
+	{
+		descriptorSets[cmdNdx] = makeDescriptorSet(m_device_interface, m_device, *descriptorPool, *descriptorSetLayout);
+
+		const vk::VkDescriptorBufferInfo resultDescriptorInfo = makeDescriptorBufferInfo(*resultBuffer, curOffset, resultBlockSize);
+
+		vk::DescriptorSetUpdateBuilder descriptorSetBuilder;
+		descriptorSetBuilder.writeSingle(*descriptorSets[cmdNdx], vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultDescriptorInfo);
+		descriptorSetBuilder.update(m_device_interface, m_device);
+
+		// Bind descriptor set
+		m_device_interface.cmdBindDescriptorSets(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSets[cmdNdx].get(), 0u, DE_NULL);
+
+		// Dispatch indirect compute command
+		m_device_interface.cmdDispatchIndirect(*cmdBuffer, *indirectBuffer, m_dispatchCommands[cmdNdx].m_offset);
+
+		curOffset += resultBlockSize;
+	}
+
+	// Insert memory barrier
+	m_device_interface.cmdPipelineBarrier(*cmdBuffer, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0,
+										  0, (const vk::VkMemoryBarrier*)DE_NULL,
+										  1, &ssboPostBarrier,
+										  0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+
+	// End recording commands
+	endCommandBuffer(m_device_interface, *cmdBuffer);
+
+	// Wait for command buffer execution finish
+	submitCommandsAndWait(m_device_interface, m_device, m_queue, *cmdBuffer);
+
+	// Check if result buffer contains valid values
+	if (verifyResultBuffer(resultBuffer, resultBlockSize, resultBufferSize))
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Pass");
+	else
+		return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Invalid values in result buffer");
+}
+
+deBool IndirectDispatchInstanceBufferUpload::verifyResultBuffer (const Buffer&			resultBuffer,
+																 const vk::VkDeviceSize	resultBlockSize,
+																 const vk::VkDeviceSize	resultBufferSize) const
+{
+	deBool allOk = true;
+	const vk::Allocation& alloc = resultBuffer.getAllocation();
+	vk::invalidateMappedMemoryRange(m_device_interface, m_device, alloc.getMemory(), alloc.getOffset(), resultBufferSize);
+
+	const deUint8* const resultDataPtr = reinterpret_cast<deUint8*>(alloc.getHostPtr());
+
+	for (deUint32 cmdNdx = 0; cmdNdx < m_dispatchCommands.size(); cmdNdx++)
+	{
+		const DispatchCommand&	cmd = m_dispatchCommands[cmdNdx];
+		const deUint8* const	srcPtr = (const deUint8*)resultDataPtr + cmdNdx*resultBlockSize;
+		const deUint32			numPassed = *(const deUint32*)(srcPtr + RESULT_BLOCK_NUM_PASSED_OFFSET);
+		const deUint32			numInvocationsPerGroup = m_workGroupSize[0] * m_workGroupSize[1] * m_workGroupSize[2];
+		const deUint32			numGroups = cmd.m_numWorkGroups[0] * cmd.m_numWorkGroups[1] * cmd.m_numWorkGroups[2];
+		const deUint32			expectedCount = numInvocationsPerGroup * numGroups;
+
+		if (numPassed != expectedCount)
+		{
+			tcu::TestContext& testCtx = m_context.getTestContext();
+
+			testCtx.getLog()
+				<< tcu::TestLog::Message
+				<< "ERROR: got invalid result for invocation " << cmdNdx
+				<< ": got numPassed = " << numPassed << ", expected " << expectedCount
+				<< tcu::TestLog::EndMessage;
+
+			allOk = false;
+		}
+	}
+
+	return allOk;
+}
+
+class IndirectDispatchCaseBufferUpload : public vkt::TestCase
+{
+public:
+								IndirectDispatchCaseBufferUpload	(tcu::TestContext&			testCtx,
+																	 const DispatchCaseDesc&	caseDesc,
+																	 const glu::GLSLVersion		glslVersion);
+
+	virtual						~IndirectDispatchCaseBufferUpload	(void) {}
+
+	virtual void				initPrograms						(vk::SourceCollections&		programCollection) const;
+	virtual TestInstance*		createInstance						(Context&					context) const;
+
+protected:
+	const deUintptr				m_bufferSize;
+	const tcu::UVec3			m_workGroupSize;
+	const DispatchCommandsVec	m_dispatchCommands;
+	const glu::GLSLVersion		m_glslVersion;
+
+private:
+	IndirectDispatchCaseBufferUpload (const vkt::TestCase&);
+	IndirectDispatchCaseBufferUpload& operator= (const vkt::TestCase&);
+};
+
+IndirectDispatchCaseBufferUpload::IndirectDispatchCaseBufferUpload (tcu::TestContext&		testCtx,
+																	const DispatchCaseDesc& caseDesc,
+																	const glu::GLSLVersion	glslVersion)
+	: vkt::TestCase			(testCtx, caseDesc.m_name, caseDesc.m_description)
+	, m_bufferSize			(caseDesc.m_bufferSize)
+	, m_workGroupSize		(caseDesc.m_workGroupSize)
+	, m_dispatchCommands	(caseDesc.m_dispatchCommands)
+	, m_glslVersion			(glslVersion)
+{
+}
+
+void IndirectDispatchCaseBufferUpload::initPrograms (vk::SourceCollections& programCollection) const
+{
+	const char* const	versionDecl = glu::getGLSLVersionDeclaration(m_glslVersion);
+
+	std::ostringstream	verifyBuffer;
+
+	verifyBuffer
+		<< versionDecl << "\n"
+		<< "layout(local_size_x = ${LOCAL_SIZE_X}, local_size_y = ${LOCAL_SIZE_Y}, local_size_z = ${LOCAL_SIZE_Z}) in;\n"
+		<< "layout(set = 0, binding = 0, std430) buffer Result\n"
+		<< "{\n"
+		<< "    uvec3           expectedGroupCount;\n"
+		<< "    coherent uint   numPassed;\n"
+		<< "} result;\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "    if (all(equal(result.expectedGroupCount, gl_NumWorkGroups)))\n"
+		<< "        atomicAdd(result.numPassed, 1u);\n"
+		<< "}\n";
+
+	std::map<std::string, std::string> args;
+
+	args["LOCAL_SIZE_X"] = de::toString(m_workGroupSize.x());
+	args["LOCAL_SIZE_Y"] = de::toString(m_workGroupSize.y());
+	args["LOCAL_SIZE_Z"] = de::toString(m_workGroupSize.z());
+
+	std::string verifyProgramString = tcu::StringTemplate(verifyBuffer.str()).specialize(args);
+
+	programCollection.glslSources.add("indirect_dispatch_" + m_name + "_verify") << glu::ComputeSource(verifyProgramString);
+}
+
+TestInstance* IndirectDispatchCaseBufferUpload::createInstance (Context& context) const
+{
+	return new IndirectDispatchInstanceBufferUpload(context, m_name, m_bufferSize, m_workGroupSize, m_dispatchCommands);
+}
+
+class IndirectDispatchInstanceBufferGenerate : public IndirectDispatchInstanceBufferUpload
+{
+public:
+									IndirectDispatchInstanceBufferGenerate	(Context&					context,
+																			 const std::string&			name,
+																			 const deUintptr			bufferSize,
+																			 const tcu::UVec3&			workGroupSize,
+																			 const DispatchCommandsVec&	dispatchCommands)
+										: IndirectDispatchInstanceBufferUpload(context, name, bufferSize, workGroupSize, dispatchCommands) {}
+
+	virtual							~IndirectDispatchInstanceBufferGenerate	(void) {}
+
+protected:
+	virtual void					fillIndirectBufferData					(const vk::VkCommandBuffer	commandBuffer,
+																			 const Buffer&				indirectBuffer);
+
+	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
+	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
+	vk::Move<vk::VkPipelineLayout>	m_pipelineLayout;
+	vk::Move<vk::VkPipeline>		m_computePipeline;
+
+private:
+	IndirectDispatchInstanceBufferGenerate (const vkt::TestInstance&);
+	IndirectDispatchInstanceBufferGenerate& operator= (const vkt::TestInstance&);
+};
+
+void IndirectDispatchInstanceBufferGenerate::fillIndirectBufferData (const vk::VkCommandBuffer commandBuffer, const Buffer& indirectBuffer)
+{
+	// Create compute shader that generates data for indirect buffer
+	const vk::Unique<vk::VkShaderModule> genIndirectBufferDataShader(createShaderModule(
+		m_device_interface, m_device, m_context.getBinaryCollection().get("indirect_dispatch_" + m_name + "_generate"), 0u));
+
+	// Create descriptorSetLayout
+	vk::DescriptorSetLayoutBuilder layoutBuilder;
+	layoutBuilder.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+	vk::Unique<vk::VkDescriptorSetLayout> descriptorSetLayout(layoutBuilder.build(m_device_interface, m_device));
+
+	// Create compute pipeline
+	m_pipelineLayout = makePipelineLayout(m_device_interface, m_device, *descriptorSetLayout);
+	m_computePipeline = makeComputePipeline(m_device_interface, m_device, *m_pipelineLayout, *genIndirectBufferDataShader);
+
+	// Create descriptor pool
+	m_descriptorPool = vk::DescriptorPoolBuilder()
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(m_device_interface, m_device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	// Create descriptor set
+	m_descriptorSet = makeDescriptorSet(m_device_interface, m_device, *m_descriptorPool, *descriptorSetLayout);
+
+	const vk::VkDescriptorBufferInfo indirectDescriptorInfo = makeDescriptorBufferInfo(*indirectBuffer, 0ull, m_bufferSize);
+
+	vk::DescriptorSetUpdateBuilder	descriptorSetBuilder;
+	descriptorSetBuilder.writeSingle(*m_descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &indirectDescriptorInfo);
+	descriptorSetBuilder.update(m_device_interface, m_device);
+
+	const vk::VkBufferMemoryBarrier bufferBarrier = makeBufferMemoryBarrier(
+		vk::VK_ACCESS_SHADER_WRITE_BIT, vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT, *indirectBuffer, 0ull, m_bufferSize);
+
+	// Bind compute pipeline
+	m_device_interface.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *m_computePipeline);
+
+	// Bind descriptor set
+	m_device_interface.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+
+	// Dispatch compute command
+	m_device_interface.cmdDispatch(commandBuffer, 1u, 1u, 1u);
+
+	// Insert memory barrier
+	m_device_interface.cmdPipelineBarrier(commandBuffer, vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT, (vk::VkDependencyFlags)0,
+										  0, (const vk::VkMemoryBarrier*)DE_NULL,
+										  1, &bufferBarrier,
+										  0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+}
+
+class IndirectDispatchCaseBufferGenerate : public IndirectDispatchCaseBufferUpload
+{
+public:
+							IndirectDispatchCaseBufferGenerate	(tcu::TestContext&			testCtx,
+																 const DispatchCaseDesc&	caseDesc,
+																 const glu::GLSLVersion		glslVersion)
+								: IndirectDispatchCaseBufferUpload(testCtx, caseDesc, glslVersion) {}
+
+	virtual					~IndirectDispatchCaseBufferGenerate	(void) {}
+
+	virtual void			initPrograms						(vk::SourceCollections&		programCollection) const;
+	virtual TestInstance*	createInstance						(Context&					context) const;
+
+private:
+	IndirectDispatchCaseBufferGenerate (const vkt::TestCase&);
+	IndirectDispatchCaseBufferGenerate& operator= (const vkt::TestCase&);
+};
+
+void IndirectDispatchCaseBufferGenerate::initPrograms (vk::SourceCollections& programCollection) const
+{
+	IndirectDispatchCaseBufferUpload::initPrograms(programCollection);
+
+	const char* const	versionDecl = glu::getGLSLVersionDeclaration(m_glslVersion);
+
+	std::ostringstream computeBuffer;
+
+	// Header
+	computeBuffer
+		<< versionDecl << "\n"
+		<< "layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+		<< "layout(set = 0, binding = 0, std430) buffer Out\n"
+		<< "{\n"
+		<< "	highp uint data[];\n"
+		<< "};\n"
+		<< "void writeCmd (uint offset, uvec3 numWorkGroups)\n"
+		<< "{\n"
+		<< "	data[offset+0u] = numWorkGroups.x;\n"
+		<< "	data[offset+1u] = numWorkGroups.y;\n"
+		<< "	data[offset+2u] = numWorkGroups.z;\n"
+		<< "}\n"
+		<< "void main (void)\n"
+		<< "{\n";
+
+	// Dispatch commands
+	for (DispatchCommandsVec::const_iterator cmdIter = m_dispatchCommands.begin(); cmdIter != m_dispatchCommands.end(); ++cmdIter)
+	{
+		const deUint32 offs = (deUint32)(cmdIter->m_offset / sizeof(deUint32));
+		DE_ASSERT((size_t)offs * sizeof(deUint32) == (size_t)cmdIter->m_offset);
+
+		computeBuffer
+			<< "\twriteCmd(" << offs << "u, uvec3("
+			<< cmdIter->m_numWorkGroups.x() << "u, "
+			<< cmdIter->m_numWorkGroups.y() << "u, "
+			<< cmdIter->m_numWorkGroups.z() << "u));\n";
+	}
+
+	// Ending
+	computeBuffer << "}\n";
+
+	std::string computeString = computeBuffer.str();
+
+	programCollection.glslSources.add("indirect_dispatch_" + m_name + "_generate") << glu::ComputeSource(computeString);
+}
+
+TestInstance* IndirectDispatchCaseBufferGenerate::createInstance (Context& context) const
+{
+	return new IndirectDispatchInstanceBufferGenerate(context, m_name, m_bufferSize, m_workGroupSize, m_dispatchCommands);
+}
+
+DispatchCommandsVec commandsVec (const DispatchCommand& cmd)
+{
+	DispatchCommandsVec vec;
+	vec.push_back(cmd);
+	return vec;
+}
+
+DispatchCommandsVec commandsVec (const DispatchCommand& cmd0,
+								 const DispatchCommand& cmd1,
+								 const DispatchCommand& cmd2,
+								 const DispatchCommand& cmd3,
+								 const DispatchCommand& cmd4)
+{
+	DispatchCommandsVec vec;
+	vec.push_back(cmd0);
+	vec.push_back(cmd1);
+	vec.push_back(cmd2);
+	vec.push_back(cmd3);
+	vec.push_back(cmd4);
+	return vec;
+}
+
+DispatchCommandsVec commandsVec (const DispatchCommand& cmd0,
+								 const DispatchCommand& cmd1,
+								 const DispatchCommand& cmd2,
+								 const DispatchCommand& cmd3,
+								 const DispatchCommand& cmd4,
+								 const DispatchCommand& cmd5,
+								 const DispatchCommand& cmd6)
+{
+	DispatchCommandsVec vec;
+	vec.push_back(cmd0);
+	vec.push_back(cmd1);
+	vec.push_back(cmd2);
+	vec.push_back(cmd3);
+	vec.push_back(cmd4);
+	vec.push_back(cmd5);
+	vec.push_back(cmd6);
+	return vec;
+}
+
+} // anonymous ns
+
+tcu::TestCaseGroup* createIndirectComputeDispatchTests (tcu::TestContext& testCtx)
+{
+	static const DispatchCaseDesc s_dispatchCases[] =
+	{
+		DispatchCaseDesc("single_invocation", "Single invocation only from offset 0", INDIRECT_COMMAND_OFFSET, tcu::UVec3(1, 1, 1),
+			commandsVec(DispatchCommand(0, tcu::UVec3(1, 1, 1)))
+        ),
+		DispatchCaseDesc("multiple_groups", "Multiple groups dispatched from offset 0", INDIRECT_COMMAND_OFFSET, tcu::UVec3(1, 1, 1),
+			commandsVec(DispatchCommand(0, tcu::UVec3(2, 3, 5)))
+		),
+		DispatchCaseDesc("multiple_groups_multiple_invocations", "Multiple groups of size 2x3x1 from offset 0", INDIRECT_COMMAND_OFFSET, tcu::UVec3(2, 3, 1),
+			commandsVec(DispatchCommand(0, tcu::UVec3(1, 2, 3)))
+		),
+		DispatchCaseDesc("small_offset", "Small offset", 16 + INDIRECT_COMMAND_OFFSET, tcu::UVec3(1, 1, 1),
+			commandsVec(DispatchCommand(16, tcu::UVec3(1, 1, 1)))
+		),
+		DispatchCaseDesc("large_offset", "Large offset", (2 << 20), tcu::UVec3(1, 1, 1),
+			commandsVec(DispatchCommand((1 << 20) + 12, tcu::UVec3(1, 1, 1)))
+		),
+		DispatchCaseDesc("large_offset_multiple_invocations", "Large offset, multiple invocations", (2 << 20), tcu::UVec3(2, 3, 1),
+			commandsVec(DispatchCommand((1 << 20) + 12, tcu::UVec3(1, 2, 3)))
+		),
+		DispatchCaseDesc("empty_command", "Empty command", INDIRECT_COMMAND_OFFSET, tcu::UVec3(1, 1, 1),
+			commandsVec(DispatchCommand(0, tcu::UVec3(0, 0, 0)))
+		),
+		DispatchCaseDesc("multi_dispatch", "Dispatch multiple compute commands from single buffer", 1 << 10, tcu::UVec3(3, 1, 2),
+			commandsVec(DispatchCommand(0, tcu::UVec3(1, 1, 1)),
+						DispatchCommand(INDIRECT_COMMAND_OFFSET, tcu::UVec3(2, 1, 1)),
+						DispatchCommand(104, tcu::UVec3(1, 3, 1)),
+						DispatchCommand(40, tcu::UVec3(1, 1, 7)),
+						DispatchCommand(52, tcu::UVec3(1, 1, 4)))
+		),
+		DispatchCaseDesc("multi_dispatch_reuse_command", "Dispatch multiple compute commands from single buffer", 1 << 10, tcu::UVec3(3, 1, 2),
+			commandsVec(DispatchCommand(0, tcu::UVec3(1, 1, 1)),
+						DispatchCommand(0, tcu::UVec3(1, 1, 1)),
+						DispatchCommand(0, tcu::UVec3(1, 1, 1)),
+						DispatchCommand(104, tcu::UVec3(1, 3, 1)),
+						DispatchCommand(104, tcu::UVec3(1, 3, 1)),
+						DispatchCommand(52, tcu::UVec3(1, 1, 4)),
+						DispatchCommand(52, tcu::UVec3(1, 1, 4)))
+		),
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> indirectComputeDispatchTests(new tcu::TestCaseGroup(testCtx, "indirect_dispatch", "Indirect dispatch tests"));
+
+	tcu::TestCaseGroup* const	groupBufferUpload = new tcu::TestCaseGroup(testCtx, "upload_buffer", "");
+	indirectComputeDispatchTests->addChild(groupBufferUpload);
+
+	for (deUint32 ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_dispatchCases); ndx++)
+	{
+		groupBufferUpload->addChild(new IndirectDispatchCaseBufferUpload(testCtx, s_dispatchCases[ndx], glu::GLSL_VERSION_310_ES));
+	}
+
+	tcu::TestCaseGroup* const	groupBufferGenerate = new tcu::TestCaseGroup(testCtx, "gen_in_compute", "");
+	indirectComputeDispatchTests->addChild(groupBufferGenerate);
+
+	for (deUint32 ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_dispatchCases); ndx++)
+	{
+		groupBufferGenerate->addChild(new IndirectDispatchCaseBufferGenerate(testCtx, s_dispatchCases[ndx], glu::GLSL_VERSION_310_ES));
+	}
+
+	return indirectComputeDispatchTests.release();
+}
+
+} // compute
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.hpp b/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.hpp
new file mode 100644
index 0000000..33ea518
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeIndirectComputeDispatchTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTCOMPUTEINDIRECTCOMPUTEDISPATCHTESTS_HPP
+#define _VKTCOMPUTEINDIRECTCOMPUTEDISPATCHTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Indirect Compute Dispatch tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+
+tcu::TestCaseGroup* createIndirectComputeDispatchTests (tcu::TestContext& testCtx);
+
+} // compute
+} // vkt
+
+#endif // _VKTCOMPUTEINDIRECTCOMPUTEDISPATCHTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.cpp b/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.cpp
new file mode 100644
index 0000000..ec7345c
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.cpp
@@ -0,0 +1,575 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Built-in variable tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktComputeShaderBuiltinVarTests.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vktComputeTestsUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkPlatform.hpp"
+#include "vkRef.hpp"
+#include "vkPrograms.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkBuilderUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "gluShaderUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+
+#include <map>
+#include <string>
+#include <vector>
+
+namespace vkt
+{
+namespace compute
+{
+namespace
+{
+
+using namespace vk;
+using std::string;
+using std::vector;
+using std::map;
+using tcu::TestLog;
+using tcu::UVec3;
+using tcu::IVec3;
+
+class ComputeBuiltinVarInstance;
+class ComputeBuiltinVarCase;
+
+static const string s_prefixProgramName ="compute_";
+
+static inline bool compareNumComponents (const UVec3& a, const UVec3& b,const int numComps)
+{
+	DE_ASSERT(numComps == 1 || numComps == 3);
+	return numComps == 3 ? tcu::allEqual(a, b) : a.x() == b.x();
+}
+
+static inline UVec3 readResultVec (const deUint32* ptr, const int numComps)
+{
+	UVec3 res;
+	for (int ndx = 0; ndx < numComps; ndx++)
+		res[ndx] = ptr[ndx];
+	return res;
+}
+
+struct LogComps
+{
+	const UVec3&	v;
+	int				numComps;
+
+					LogComps	(const UVec3 &v_, int numComps_) : v(v_), numComps(numComps_) {}
+};
+
+static inline std::ostream& operator<< (std::ostream& str, const LogComps& c)
+{
+	DE_ASSERT(c.numComps == 1 || c.numComps == 3);
+	return c.numComps == 3 ? str << c.v : str << c.v.x();
+}
+
+class SubCase
+{
+public:
+	// Use getters instead of public const members, because SubCase must be assignable
+	// in order to be stored in a vector.
+
+	const UVec3&	localSize		(void) const { return m_localSize; }
+	const UVec3&	numWorkGroups	(void) const { return m_numWorkGroups; }
+
+					SubCase			(void) {}
+					SubCase			(const UVec3& localSize_, const UVec3& numWorkGroups_)
+						: m_localSize		(localSize_)
+						, m_numWorkGroups	(numWorkGroups_) {}
+
+private:
+	UVec3	m_localSize;
+	UVec3	m_numWorkGroups;
+};
+
+
+class ComputeBuiltinVarInstance : public vkt::TestInstance
+{
+public:
+									ComputeBuiltinVarInstance	(Context&						context,
+																 const vector<SubCase>&			subCases,
+																 const glu::DataType			varType,
+																 const ComputeBuiltinVarCase*	builtinVarCase);
+
+	virtual tcu::TestStatus			iterate						(void);
+
+private:
+	const VkDevice					m_device;
+	const DeviceInterface&			m_vki;
+	const VkQueue					m_queue;
+	const deUint32					m_queueFamilyIndex;
+	vector<SubCase>					m_subCases;
+	const ComputeBuiltinVarCase*	m_builtin_var_case;
+	int								m_subCaseNdx;
+	const glu::DataType				m_varType;
+};
+
+class ComputeBuiltinVarCase : public vkt::TestCase
+{
+public:
+							ComputeBuiltinVarCase	(tcu::TestContext& context, const char* name, const char* varName, glu::DataType varType);
+							~ComputeBuiltinVarCase	(void);
+
+	TestInstance*			createInstance			(Context& context) const
+	{
+		return new ComputeBuiltinVarInstance(context, m_subCases, m_varType, this);
+	};
+
+	virtual void			initPrograms			(SourceCollections& programCollection) const;
+	virtual UVec3			computeReference		(const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const = 0;
+
+protected:
+	string					genBuiltinVarSource		(const string& varName, glu::DataType varType, const UVec3& localSize) const;
+	vector<SubCase>			m_subCases;
+
+private:
+	deUint32				getProgram				(const tcu::UVec3& localSize);
+
+	const string			m_varName;
+	const glu::DataType		m_varType;
+	int						m_subCaseNdx;
+
+	ComputeBuiltinVarCase (const ComputeBuiltinVarCase& other);
+	ComputeBuiltinVarCase& operator= (const ComputeBuiltinVarCase& other);
+};
+
+ComputeBuiltinVarCase::ComputeBuiltinVarCase (tcu::TestContext& context, const char* name, const char* varName, glu::DataType varType)
+	: TestCase		(context, name, varName)
+	, m_varName		(varName)
+	, m_varType		(varType)
+	, m_subCaseNdx	(0)
+{
+}
+
+ComputeBuiltinVarCase::~ComputeBuiltinVarCase (void)
+{
+	ComputeBuiltinVarCase::deinit();
+}
+
+void ComputeBuiltinVarCase::initPrograms (SourceCollections& programCollection) const
+{
+	for (std::size_t i = 0; i < m_subCases.size(); i++)
+	{
+		const SubCase&	subCase = m_subCases[i];
+		std::ostringstream name;
+		name << s_prefixProgramName << i;
+		programCollection.glslSources.add(name.str()) << glu::ComputeSource(genBuiltinVarSource(m_varName, m_varType, subCase.localSize()).c_str());
+	}
+}
+
+string ComputeBuiltinVarCase::genBuiltinVarSource (const string& varName, glu::DataType varType, const UVec3& localSize) const
+{
+	std::ostringstream src;
+
+	src << "#version 310 es\n"
+		<< "layout (local_size_x = " << localSize.x() << ", local_size_y = " << localSize.y() << ", local_size_z = " << localSize.z() << ") in;\n"
+		<< "layout(set = 0, binding = 0) uniform Stride\n"
+		<< "{\n"
+		<< "	uvec2 u_stride;\n"
+		<< "}stride;\n"
+		<< "layout(set = 0, binding = 1, std430) buffer Output\n"
+		<< "{\n"
+		<< "	" << glu::getDataTypeName(varType) << " result[];\n"
+		<< "} sb_out;\n"
+		<< "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	highp uint offset = stride.u_stride.x*gl_GlobalInvocationID.z + stride.u_stride.y*gl_GlobalInvocationID.y + gl_GlobalInvocationID.x;\n"
+		<< "	sb_out.result[offset] = " << varName << ";\n"
+		<< "}\n";
+
+	return src.str();
+}
+
+class NumWorkGroupsCase : public ComputeBuiltinVarCase
+{
+public:
+	NumWorkGroupsCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "num_work_groups", "gl_NumWorkGroups", glu::TYPE_UINT_VEC3)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(52, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 39, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 78)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(2, 3, 4), UVec3(4, 7, 11)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(numWorkGroups);
+		DE_UNREF(workGroupSize);
+		DE_UNREF(workGroupID);
+		DE_UNREF(localInvocationID);
+		return numWorkGroups;
+	}
+};
+
+class WorkGroupSizeCase : public ComputeBuiltinVarCase
+{
+public:
+	WorkGroupSizeCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "work_group_size", "gl_WorkGroupSize", glu::TYPE_UINT_VEC3)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(2, 7, 3)));
+		m_subCases.push_back(SubCase(UVec3(2, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(2, 1, 1), UVec3(1, 3, 5)));
+		m_subCases.push_back(SubCase(UVec3(1, 3, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 7), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 7), UVec3(3, 3, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(3, 1, 2)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(numWorkGroups);
+		DE_UNREF(workGroupID);
+		DE_UNREF(localInvocationID);
+		return workGroupSize;
+	}
+};
+
+//-----------------------------------------------------------------------
+class WorkGroupIDCase : public ComputeBuiltinVarCase
+{
+public:
+	WorkGroupIDCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "work_group_id", "gl_WorkGroupID", glu::TYPE_UINT_VEC3)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(52, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 39, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 78)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(2, 3, 4), UVec3(4, 7, 11)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(numWorkGroups);
+		DE_UNREF(workGroupSize);
+		DE_UNREF(localInvocationID);
+		return workGroupID;
+	}
+};
+
+class LocalInvocationIDCase : public ComputeBuiltinVarCase
+{
+public:
+	LocalInvocationIDCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "local_invocation_id", "gl_LocalInvocationID", glu::TYPE_UINT_VEC3)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(2, 7, 3)));
+		m_subCases.push_back(SubCase(UVec3(2, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(2, 1, 1), UVec3(1, 3, 5)));
+		m_subCases.push_back(SubCase(UVec3(1, 3, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 7), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 7), UVec3(3, 3, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(3, 1, 2)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(numWorkGroups);
+		DE_UNREF(workGroupSize);
+		DE_UNREF(workGroupID);
+		return localInvocationID;
+	}
+};
+
+class GlobalInvocationIDCase : public ComputeBuiltinVarCase
+{
+public:
+	GlobalInvocationIDCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "global_invocation_id", "gl_GlobalInvocationID", glu::TYPE_UINT_VEC3)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(52, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 39, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 78)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(2, 3, 4), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(3, 1, 2)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(numWorkGroups);
+		return workGroupID * workGroupSize + localInvocationID;
+	}
+};
+
+class LocalInvocationIndexCase : public ComputeBuiltinVarCase
+{
+public:
+	LocalInvocationIndexCase (tcu::TestContext& context)
+		: ComputeBuiltinVarCase(context, "local_invocation_index", "gl_LocalInvocationIndex", glu::TYPE_UINT)
+	{
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(1, 39, 1)));
+		m_subCases.push_back(SubCase(UVec3(1, 1, 1), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(2, 3, 4), UVec3(4, 7, 11)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(1, 1, 1)));
+		m_subCases.push_back(SubCase(UVec3(10, 3, 4), UVec3(3, 1, 2)));
+	}
+
+	UVec3 computeReference (const UVec3& numWorkGroups, const UVec3& workGroupSize, const UVec3& workGroupID, const UVec3& localInvocationID) const
+	{
+		DE_UNREF(workGroupID);
+		DE_UNREF(numWorkGroups);
+		return UVec3(localInvocationID.z()*workGroupSize.x()*workGroupSize.y() + localInvocationID.y()*workGroupSize.x() + localInvocationID.x(), 0, 0);
+	}
+};
+
+ComputeBuiltinVarInstance::ComputeBuiltinVarInstance (Context&						context,
+													  const vector<SubCase>&		subCases,
+													  const glu::DataType			varType,
+													  const ComputeBuiltinVarCase*	builtinVarCase)
+	: vkt::TestInstance		(context)
+	, m_device				(m_context.getDevice())
+	, m_vki					(m_context.getDeviceInterface())
+	, m_queue				(context.getUniversalQueue())
+	, m_queueFamilyIndex	(context.getUniversalQueueFamilyIndex())
+	, m_subCases			(subCases)
+	, m_builtin_var_case	(builtinVarCase)
+	, m_subCaseNdx			(0)
+	, m_varType				(varType)
+{
+}
+
+tcu::TestStatus	ComputeBuiltinVarInstance::iterate (void)
+{
+	std::ostringstream program_name;
+	program_name << s_prefixProgramName << m_subCaseNdx;
+
+	const SubCase&				subCase				= m_subCases[m_subCaseNdx];
+	const tcu::UVec3			globalSize			= subCase.localSize()*subCase.numWorkGroups();
+	const tcu::UVec2			stride				(globalSize[0] * globalSize[1], globalSize[0]);
+	const deUint32				sizeOfUniformBuffer	= sizeof(stride);
+	const int					numScalars			= glu::getDataTypeScalarSize(m_varType);
+	const deUint32				numInvocations		= subCase.localSize()[0] * subCase.localSize()[1] * subCase.localSize()[2] * subCase.numWorkGroups()[0] * subCase.numWorkGroups()[1] * subCase.numWorkGroups()[2];
+
+	deUint32					resultBufferStride = 0;
+	switch (m_varType)
+	{
+		case glu::TYPE_UINT:
+			resultBufferStride = sizeof(deUint32);
+			break;
+		case glu::TYPE_UINT_VEC2:
+			resultBufferStride = sizeof(tcu::UVec2);
+			break;
+		case glu::TYPE_UINT_VEC3:
+		case glu::TYPE_UINT_VEC4:
+			resultBufferStride = sizeof(tcu::UVec4);
+			break;
+		default:
+			DE_ASSERT("Illegal data type");
+	}
+
+	const deUint32				resultBufferSize	= numInvocations * resultBufferStride;
+
+	// Create result buffer
+	Buffer uniformBuffer(m_vki, m_device, m_context.getDefaultAllocator(), makeBufferCreateInfo(sizeOfUniformBuffer, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT), MemoryRequirement::HostVisible);
+	Buffer resultBuffer(m_vki, m_device, m_context.getDefaultAllocator(), makeBufferCreateInfo(resultBufferSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible);
+
+	{
+		const Allocation& alloc = uniformBuffer.getAllocation();
+		memcpy(alloc.getHostPtr(), &stride, sizeOfUniformBuffer);
+		flushMappedMemoryRange(m_vki, m_device, alloc.getMemory(), alloc.getOffset(), sizeOfUniformBuffer);
+	}
+
+	// Create descriptorSetLayout
+	const Unique<VkDescriptorSetLayout>	descriptorSetLayout(
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(m_vki, m_device));
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(m_vki, m_device, m_context.getBinaryCollection().get(program_name.str()), 0u));
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(m_vki, m_device, *descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(m_vki, m_device, *pipelineLayout, *shaderModule));
+
+	const Unique<VkDescriptorPool> descriptorPool(
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(m_vki, m_device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const VkBufferMemoryBarrier bufferBarrier = makeBufferMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT, *resultBuffer, 0ull, resultBufferSize);
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(m_vki, m_device, m_queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(m_vki, m_device, *cmdPool));
+
+	// Start recording commands
+	beginCommandBuffer(m_vki, *cmdBuffer);
+
+	m_vki.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+
+	// Create descriptor set
+	const Unique<VkDescriptorSet> descriptorSet(makeDescriptorSet(m_vki, m_device, *descriptorPool, *descriptorSetLayout));
+
+	const VkDescriptorBufferInfo resultDescriptorInfo = makeDescriptorBufferInfo(*resultBuffer, 0ull, resultBufferSize);
+	const VkDescriptorBufferInfo uniformDescriptorInfo = makeDescriptorBufferInfo(*uniformBuffer, 0ull, sizeOfUniformBuffer);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &uniformDescriptorInfo)
+		.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &resultDescriptorInfo)
+		.update(m_vki, m_device);
+
+	m_vki.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	// Dispatch indirect compute command
+	m_vki.cmdDispatch(*cmdBuffer, subCase.numWorkGroups()[0], subCase.numWorkGroups()[1], subCase.numWorkGroups()[2]);
+
+	m_vki.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0,
+							 0, (const VkMemoryBarrier*)DE_NULL,
+							 1, &bufferBarrier,
+							 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	// End recording commands
+	endCommandBuffer(m_vki, *cmdBuffer);
+
+	// Wait for command buffer execution finish
+	submitCommandsAndWait(m_vki, m_device, m_queue, *cmdBuffer);
+
+	const Allocation& resultAlloc = resultBuffer.getAllocation();
+	invalidateMappedMemoryRange(m_vki, m_device, resultAlloc.getMemory(), resultAlloc.getOffset(), resultBufferSize);
+
+	const deUint8*	 ptr = reinterpret_cast<deUint8*>(resultAlloc.getHostPtr());
+
+	int			numFailed		= 0;
+	const int	maxLogPrints	= 10;
+
+	tcu::TestContext& testCtx	= m_context.getTestContext();
+
+	for (deUint32 groupZ = 0; groupZ < subCase.numWorkGroups().z(); groupZ++)
+	for (deUint32 groupY = 0; groupY < subCase.numWorkGroups().y(); groupY++)
+	for (deUint32 groupX = 0; groupX < subCase.numWorkGroups().x(); groupX++)
+	for (deUint32 localZ = 0; localZ < subCase.localSize().z(); localZ++)
+	for (deUint32 localY = 0; localY < subCase.localSize().y(); localY++)
+	for (deUint32 localX = 0; localX < subCase.localSize().x(); localX++)
+	{
+		const UVec3			refGroupID(groupX, groupY, groupZ);
+		const UVec3			refLocalID(localX, localY, localZ);
+		const UVec3			refGlobalID = refGroupID * subCase.localSize() + refLocalID;
+
+		const deUint32		refOffset = stride.x()*refGlobalID.z() + stride.y()*refGlobalID.y() + refGlobalID.x();
+
+		const UVec3			refValue = m_builtin_var_case->computeReference(subCase.numWorkGroups(), subCase.localSize(), refGroupID, refLocalID);
+
+		const deUint32*		resPtr = (const deUint32*)(ptr + refOffset * resultBufferStride);
+		const UVec3			resValue = readResultVec(resPtr, numScalars);
+
+		if (!compareNumComponents(refValue, resValue, numScalars))
+		{
+			if (numFailed < maxLogPrints)
+				testCtx.getLog()
+				<< TestLog::Message
+				<< "ERROR: comparison failed at offset " << refOffset
+				<< ": expected " << LogComps(refValue, numScalars)
+				<< ", got " << LogComps(resValue, numScalars)
+				<< TestLog::EndMessage;
+			else if (numFailed == maxLogPrints)
+				testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+			numFailed += 1;
+		}
+	}
+
+	testCtx.getLog() << TestLog::Message << (numInvocations - numFailed) << " / " << numInvocations << " values passed" << TestLog::EndMessage;
+
+	if (numFailed > 0)
+		return tcu::TestStatus::fail("Comparison failed");
+
+	m_subCaseNdx += 1;
+	return (m_subCaseNdx < (int)m_subCases.size()) ? tcu::TestStatus::incomplete() :tcu::TestStatus::pass("Comparison succeeded");
+}
+
+class ComputeShaderBuiltinVarTests : public tcu::TestCaseGroup
+{
+public:
+			ComputeShaderBuiltinVarTests	(tcu::TestContext& context);
+
+	void	init							(void);
+
+private:
+	ComputeShaderBuiltinVarTests (const ComputeShaderBuiltinVarTests& other);
+	ComputeShaderBuiltinVarTests& operator= (const ComputeShaderBuiltinVarTests& other);
+};
+
+ComputeShaderBuiltinVarTests::ComputeShaderBuiltinVarTests (tcu::TestContext& context)
+	: TestCaseGroup(context, "builtin_var", "Shader builtin var tests")
+{
+}
+
+void ComputeShaderBuiltinVarTests::init (void)
+{
+	addChild(new NumWorkGroupsCase(this->getTestContext()));
+	addChild(new WorkGroupSizeCase(this->getTestContext()));
+	addChild(new WorkGroupIDCase(this->getTestContext()));
+	addChild(new LocalInvocationIDCase(this->getTestContext()));
+	addChild(new GlobalInvocationIDCase(this->getTestContext()));
+	addChild(new LocalInvocationIndexCase(this->getTestContext()));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createComputeShaderBuiltinVarTests (tcu::TestContext& testCtx)
+{
+	return new ComputeShaderBuiltinVarTests(testCtx);
+}
+
+} // compute
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.hpp b/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.hpp
new file mode 100644
index 0000000..3e284ae
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeShaderBuiltinVarTests.hpp
@@ -0,0 +1,48 @@
+#ifndef _VKTCOMPUTESHADERBUILTINVARTESTS_HPP
+#define _VKTCOMPUTESHADERBUILTINVARTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Built-in variable tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+	tcu::TestCaseGroup* createComputeShaderBuiltinVarTests (tcu::TestContext& testCtx);
+} // compute
+} // vkt
+
+#endif // _VKTCOMPUTESHADERBUILTINVARTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeTests.cpp b/external/vulkancts/modules/vulkan/compute/vktComputeTests.cpp
new file mode 100644
index 0000000..5e10f89
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeTests.cpp
@@ -0,0 +1,66 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktComputeTests.hpp"
+#include "vktComputeBasicComputeShaderTests.hpp"
+#include "vktComputeIndirectComputeDispatchTests.hpp"
+#include "vktComputeShaderBuiltinVarTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* computeTests)
+{
+	tcu::TestContext&	testCtx		= computeTests->getTestContext();
+
+	computeTests->addChild(createBasicComputeShaderTests(testCtx));
+	computeTests->addChild(createIndirectComputeDispatchTests(testCtx));
+	computeTests->addChild(createComputeShaderBuiltinVarTests(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "compute", "Compute shader tests", createChildren);
+}
+
+} // compute
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeTests.hpp b/external/vulkancts/modules/vulkan/compute/vktComputeTests.hpp
new file mode 100644
index 0000000..045653b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTCOMPUTETESTS_HPP
+#define _VKTCOMPUTETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx);
+
+} // compute
+} // vkt
+
+#endif // _VKTCOMPUTETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.cpp b/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.cpp
new file mode 100644
index 0000000..51d936b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.cpp
@@ -0,0 +1,335 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute tests utility classes
+ *//*--------------------------------------------------------------------*/
+
+#include "vktComputeTestsUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+using namespace vk;
+
+namespace vkt
+{
+namespace compute
+{
+
+Buffer::Buffer (const DeviceInterface&		vk,
+				const VkDevice				device,
+				Allocator&					allocator,
+				const VkBufferCreateInfo&	bufferCreateInfo,
+				const MemoryRequirement		memoryRequirement)
+{
+	m_buffer = createBuffer(vk, device, &bufferCreateInfo);
+	m_allocation = allocator.allocate(getBufferMemoryRequirements(vk, device, *m_buffer), memoryRequirement);
+	VK_CHECK(vk.bindBufferMemory(device, *m_buffer, m_allocation->getMemory(), m_allocation->getOffset()));
+}
+
+Image::Image (const DeviceInterface&	vk,
+			  const VkDevice			device,
+			  Allocator&				allocator,
+			  const VkImageCreateInfo&	imageCreateInfo,
+			  const MemoryRequirement	memoryRequirement)
+{
+	m_image = createImage(vk, device, &imageCreateInfo);
+	m_allocation = allocator.allocate(getImageMemoryRequirements(vk, device, *m_image), memoryRequirement);
+	VK_CHECK(vk.bindImageMemory(device, *m_image, m_allocation->getMemory(), m_allocation->getOffset()));
+}
+
+VkBufferCreateInfo makeBufferCreateInfo (const VkDeviceSize			bufferSize,
+										 const VkBufferUsageFlags	usage)
+{
+	const VkBufferCreateInfo bufferCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u,										// VkBufferCreateFlags	flags;
+		bufferSize,								// VkDeviceSize			size;
+		usage,									// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+		0u,										// deUint32				queueFamilyIndexCount;
+		DE_NULL,								// const deUint32*		pQueueFamilyIndices;
+	};
+	return bufferCreateInfo;
+}
+
+VkBufferImageCopy makeBufferImageCopy (const VkExtent3D extent,
+									   const deUint32	arraySize)
+{
+	const VkBufferImageCopy copyParams =
+	{
+		0ull,																		//	VkDeviceSize				bufferOffset;
+		0u,																			//	deUint32					bufferRowLength;
+		0u,																			//	deUint32					bufferImageHeight;
+		makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, arraySize),	//	VkImageSubresourceLayers	imageSubresource;
+		makeOffset3D(0, 0, 0),														//	VkOffset3D					imageOffset;
+		extent,																		//	VkExtent3D					imageExtent;
+	};
+	return copyParams;
+}
+
+Move<VkCommandPool> makeCommandPool (const DeviceInterface& vk, const VkDevice device, const deUint32 queueFamilyIndex)
+{
+	const VkCommandPoolCreateInfo commandPoolParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,			// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,	// VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,									// deUint32					queueFamilyIndex;
+	};
+	return createCommandPool(vk, device, &commandPoolParams);
+}
+
+Move<VkCommandBuffer> makeCommandBuffer (const DeviceInterface& vk, const VkDevice device, const VkCommandPool commandPool)
+{
+	const VkCommandBufferAllocateInfo bufferAllocateParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,		// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		commandPool,										// VkCommandPool			commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,					// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	return allocateCommandBuffer(vk, device, &bufferAllocateParams);
+}
+
+Move<VkPipelineLayout> makePipelineLayout (const DeviceInterface&		vk,
+										   const VkDevice				device)
+{
+	const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkPipelineLayoutCreateFlags		flags;
+		0u,													// deUint32							setLayoutCount;
+		DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+		0u,													// deUint32							pushConstantRangeCount;
+		DE_NULL,											// const VkPushConstantRange*		pPushConstantRanges;
+	};
+	return createPipelineLayout(vk, device, &pipelineLayoutParams);
+}
+
+Move<VkPipelineLayout> makePipelineLayout (const DeviceInterface&		vk,
+										   const VkDevice				device,
+										   const VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkPipelineLayoutCreateFlags		flags;
+		1u,													// deUint32							setLayoutCount;
+		&descriptorSetLayout,								// const VkDescriptorSetLayout*		pSetLayouts;
+		0u,													// deUint32							pushConstantRangeCount;
+		DE_NULL,											// const VkPushConstantRange*		pPushConstantRanges;
+	};
+	return createPipelineLayout(vk, device, &pipelineLayoutParams);
+}
+
+Move<VkPipeline> makeComputePipeline (const DeviceInterface&	vk,
+									  const VkDevice			device,
+									  const VkPipelineLayout	pipelineLayout,
+									  const VkShaderModule		shaderModule)
+{
+	const VkPipelineShaderStageCreateInfo pipelineShaderStageParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+		DE_NULL,												// const void*							pNext;
+		0u,														// VkPipelineShaderStageCreateFlags		flags;
+		VK_SHADER_STAGE_COMPUTE_BIT,							// VkShaderStageFlagBits				stage;
+		shaderModule,											// VkShaderModule						module;
+		"main",													// const char*							pName;
+		DE_NULL,												// const VkSpecializationInfo*			pSpecializationInfo;
+	};
+	const VkComputePipelineCreateInfo pipelineCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkPipelineCreateFlags			flags;
+		pipelineShaderStageParams,							// VkPipelineShaderStageCreateInfo	stage;
+		pipelineLayout,										// VkPipelineLayout					layout;
+		DE_NULL,											// VkPipeline						basePipelineHandle;
+		0,													// deInt32							basePipelineIndex;
+	};
+	return createComputePipeline(vk, device, DE_NULL , &pipelineCreateInfo);
+}
+
+Move<VkBufferView> makeBufferView (const DeviceInterface&	vk,
+								   const VkDevice			vkDevice,
+								   const VkBuffer			buffer,
+								   const VkFormat			format,
+								   const VkDeviceSize		offset,
+								   const VkDeviceSize		size)
+{
+	const VkBufferViewCreateInfo bufferViewParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,	// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		0u,											// VkBufferViewCreateFlags	flags;
+		buffer,										// VkBuffer					buffer;
+		format,										// VkFormat					format;
+		offset,										// VkDeviceSize				offset;
+		size,										// VkDeviceSize				range;
+	};
+	return createBufferView(vk, vkDevice, &bufferViewParams);
+}
+
+Move<VkImageView> makeImageView (const DeviceInterface&			vk,
+								 const VkDevice					vkDevice,
+								 const VkImage					image,
+								 const VkImageViewType			imageViewType,
+								 const VkFormat					format,
+								 const VkImageSubresourceRange	subresourceRange)
+{
+	const VkImageViewCreateInfo imageViewParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkImageViewCreateFlags	flags;
+		image,											// VkImage					image;
+		imageViewType,									// VkImageViewType			viewType;
+		format,											// VkFormat					format;
+		makeComponentMappingRGBA(),						// VkComponentMapping		components;
+		subresourceRange,								// VkImageSubresourceRange	subresourceRange;
+	};
+	return createImageView(vk, vkDevice, &imageViewParams);
+}
+
+Move<VkDescriptorSet> makeDescriptorSet (const DeviceInterface&			vk,
+										 const VkDevice					device,
+										 const VkDescriptorPool			descriptorPool,
+										 const VkDescriptorSetLayout	setLayout)
+{
+	const VkDescriptorSetAllocateInfo allocateParams =
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,		// VkStructureType				sType;
+		DE_NULL,											// const void*					pNext;
+		descriptorPool,										// VkDescriptorPool				descriptorPool;
+		1u,													// deUint32						setLayoutCount;
+		&setLayout,											// const VkDescriptorSetLayout*	pSetLayouts;
+	};
+	return allocateDescriptorSet(vk, device, &allocateParams);
+}
+
+VkBufferMemoryBarrier makeBufferMemoryBarrier (const VkAccessFlags	srcAccessMask,
+											   const VkAccessFlags	dstAccessMask,
+											   const VkBuffer		buffer,
+											   const VkDeviceSize	offset,
+											   const VkDeviceSize	bufferSizeBytes)
+{
+	const VkBufferMemoryBarrier barrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		srcAccessMask,								// VkAccessFlags	srcAccessMask;
+		dstAccessMask,								// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			destQueueFamilyIndex;
+		buffer,										// VkBuffer			buffer;
+		offset,										// VkDeviceSize		offset;
+		bufferSizeBytes,							// VkDeviceSize		size;
+	};
+	return barrier;
+}
+
+VkImageMemoryBarrier makeImageMemoryBarrier	(const VkAccessFlags			srcAccessMask,
+											 const VkAccessFlags			dstAccessMask,
+											 const VkImageLayout			oldLayout,
+											 const VkImageLayout			newLayout,
+											 const VkImage					image,
+											 const VkImageSubresourceRange	subresourceRange)
+{
+	const VkImageMemoryBarrier barrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		srcAccessMask,									// VkAccessFlags			outputMask;
+		dstAccessMask,									// VkAccessFlags			inputMask;
+		oldLayout,										// VkImageLayout			oldLayout;
+		newLayout,										// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					destQueueFamilyIndex;
+		image,											// VkImage					image;
+		subresourceRange,								// VkImageSubresourceRange	subresourceRange;
+	};
+	return barrier;
+}
+
+void beginCommandBuffer (const DeviceInterface& vk, const VkCommandBuffer commandBuffer)
+{
+	const VkCommandBufferBeginInfo commandBufBeginParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		0u,												// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	VK_CHECK(vk.beginCommandBuffer(commandBuffer, &commandBufBeginParams));
+}
+
+void endCommandBuffer (const DeviceInterface& vk, const VkCommandBuffer commandBuffer)
+{
+	VK_CHECK(vk.endCommandBuffer(commandBuffer));
+}
+
+void submitCommandsAndWait (const DeviceInterface&	vk,
+							const VkDevice			device,
+							const VkQueue			queue,
+							const VkCommandBuffer	commandBuffer)
+{
+	const VkFenceCreateInfo	fenceParams =
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u,										// VkFenceCreateFlags	flags;
+	};
+	const Unique<VkFence> fence(createFence(vk, device, &fenceParams));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,		// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0u,									// deUint32					waitSemaphoreCount;
+		DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,									// deUint32					commandBufferCount;
+		&commandBuffer,						// const VkCommandBuffer*	pCommandBuffers;
+		0u,									// deUint32					signalSemaphoreCount;
+		DE_NULL,							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1u, &fence.get(), DE_TRUE, ~0ull));
+}
+
+} // compute
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.hpp b/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.hpp
new file mode 100644
index 0000000..6b1ffeb
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/compute/vktComputeTestsUtil.hpp
@@ -0,0 +1,191 @@
+#ifndef _VKTCOMPUTETESTSUTIL_HPP
+#define _VKTCOMPUTETESTSUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute tests utility classes
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkImageUtil.hpp"
+
+namespace vkt
+{
+namespace compute
+{
+
+class Buffer
+{
+public:
+									Buffer			(const vk::DeviceInterface&		vk,
+													 const vk::VkDevice				device,
+													 vk::Allocator&					allocator,
+													 const vk::VkBufferCreateInfo&	bufferCreateInfo,
+													 const vk::MemoryRequirement	memoryRequirement);
+
+	vk::VkBuffer					get				(void) const { return *m_buffer; }
+	vk::VkBuffer					operator*		(void) const { return get(); }
+	vk::Allocation&					getAllocation	(void) const { return *m_allocation; }
+
+private:
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Move<vk::VkBuffer>			m_buffer;
+
+									Buffer			(const Buffer&);  // "deleted"
+	Buffer&							operator=		(const Buffer&);
+};
+
+class Image
+{
+public:
+									Image			(const vk::DeviceInterface&		vk,
+													 const vk::VkDevice				device,
+													 vk::Allocator&					allocator,
+													 const vk::VkImageCreateInfo&	imageCreateInfo,
+													 const vk::MemoryRequirement	memoryRequirement);
+
+	vk::VkImage						get				(void) const { return *m_image; }
+	vk::VkImage						operator*		(void) const { return get(); }
+	vk::Allocation&					getAllocation	(void) const { return *m_allocation; }
+
+private:
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Move<vk::VkImage>			m_image;
+
+									Image			(const Image&);  // "deleted"
+	Image&							operator=		(const Image&);
+};
+
+template<typename T>
+class DynArray
+{
+public:
+				DynArray	(std::size_t size)			{ data = new T[size]; }
+				~DynArray	()							{ delete [] data; }
+
+	T&			operator[]	(std::size_t idx)			{ return data[idx]; }
+	const T&	operator[]	(std::size_t idx) const		{ return data[idx]; }
+
+private:
+	T* data;
+
+				DynArray	(const DynArray&);  // "deleted"
+	DynArray&	operator=	(const DynArray&);
+};
+
+vk::Move<vk::VkCommandPool>			makeCommandPool				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const deUint32						queueFamilyIndex);
+
+vk::Move<vk::VkCommandBuffer>		makeCommandBuffer			(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkCommandPool			commandPool);
+
+vk::Move<vk::VkPipelineLayout>	makePipelineLayout				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device);
+
+vk::Move<vk::VkPipelineLayout>	makePipelineLayout				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+vk::Move<vk::VkPipeline>		makeComputePipeline				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkPipelineLayout			pipelineLayout,
+																 const vk::VkShaderModule			shaderModule);
+
+vk::Move<vk::VkBufferView>		makeBufferView					(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkBuffer					buffer,
+																 const vk::VkFormat					format,
+																 const vk::VkDeviceSize				offset,
+																 const vk::VkDeviceSize				size);
+
+vk::Move<vk::VkImageView>		makeImageView					(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkImage					image,
+																 const vk::VkImageViewType			imageViewType,
+																 const vk::VkFormat					format,
+																 const vk::VkImageSubresourceRange	subresourceRange);
+
+vk::Move<vk::VkDescriptorSet>	makeDescriptorSet				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkDescriptorPool			descriptorPool,
+																 const vk::VkDescriptorSetLayout	setLayout);
+
+vk::VkBufferCreateInfo			makeBufferCreateInfo			(const vk::VkDeviceSize				bufferSize,
+																 const vk::VkBufferUsageFlags		usage);
+
+vk::VkBufferImageCopy			makeBufferImageCopy				(const vk::VkExtent3D				extent,
+																 const deUint32						arraySize);
+
+vk::VkBufferMemoryBarrier		makeBufferMemoryBarrier			(const vk::VkAccessFlags			srcAccessMask,
+																 const vk::VkAccessFlags			dstAccessMask,
+																 const vk::VkBuffer					buffer,
+																 const vk::VkDeviceSize				offset,
+																 const vk::VkDeviceSize				bufferSizeBytes);
+
+vk::VkImageMemoryBarrier		makeImageMemoryBarrier			(const vk::VkAccessFlags			srcAccessMask,
+																 const vk::VkAccessFlags			dstAccessMask,
+																 const vk::VkImageLayout			oldLayout,
+																 const vk::VkImageLayout			newLayout,
+																 const vk::VkImage					image,
+																 const vk::VkImageSubresourceRange	subresourceRange);
+
+void							beginCommandBuffer				(const vk::DeviceInterface&			vk,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+void							endCommandBuffer				(const vk::DeviceInterface&			vk,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+void							submitCommandsAndWait			(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkQueue					queue,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+inline vk::VkExtent3D makeExtent3D (const tcu::IVec3& vec)
+{
+	return vk::makeExtent3D(vec.x(), vec.y(), vec.z());
+}
+
+inline vk::VkDeviceSize getImageSizeBytes (const tcu::IVec3& imageSize, const vk::VkFormat format)
+{
+	return tcu::getPixelSize(vk::mapVkFormat(format)) * imageSize.x() * imageSize.y() * imageSize.z();
+}
+
+} // compute
+} // vkt
+
+#endif // _VKTCOMPUTETESTSUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/CMakeLists.txt b/external/vulkancts/modules/vulkan/draw/CMakeLists.txt
new file mode 100644
index 0000000..57a223e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/CMakeLists.txt
@@ -0,0 +1,30 @@
+include_directories(..)
+
+set(DEQP_VK_DRAW_SRCS
+	vktDrawTests.hpp
+	vktDrawTests.cpp
+	vktDrawIndexedTest.hpp
+	vktDrawIndexedTest.cpp
+	vktDrawIndirectTest.hpp
+	vktDrawIndirectTest.cpp
+	vktDrawSimpleTest.hpp
+	vktDrawSimpleTest.cpp
+	vktDrawBaseClass.hpp
+	vktDrawBaseClass.cpp
+	vktDrawCreateInfoUtil.hpp
+	vktDrawCreateInfoUtil.cpp
+	vktDrawImageObjectUtil.hpp
+	vktDrawImageObjectUtil.cpp
+	vktDrawBufferObjectUtil.hpp
+	vktDrawBufferObjectUtil.cpp
+	vktDrawTestCaseUtil.hpp
+)
+
+set(DEQP_VK_DRAW_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+)
+
+add_library(deqp-vk-draw STATIC ${DEQP_VK_DRAW_SRCS})
+target_link_libraries(deqp-vk-draw ${DEQP_VK_DRAW_LIBS})
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.cpp
new file mode 100644
index 0000000..0e0e62f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.cpp
@@ -0,0 +1,219 @@
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Command draw Tests - Base Class
+*//*--------------------------------------------------------------------*/
+
+#include "vktDrawBaseClass.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+DrawTestsBaseClass::DrawTestsBaseClass (Context& context, const char* vertexShaderName, const char* fragmentShaderName)
+	: TestInstance				(context)
+	, m_colorAttachmentFormat	(vk::VK_FORMAT_R8G8B8A8_UNORM)
+	, m_topology				(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP)
+	, m_vk						(context.getDeviceInterface())
+	, m_vertexShaderName		(vertexShaderName)
+	, m_fragmentShaderName		(fragmentShaderName)
+{
+}
+
+void DrawTestsBaseClass::initialize (void)
+{
+	const vk::VkDevice device				= m_context.getDevice();
+	const deUint32 queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
+
+	const PipelineLayoutCreateInfo pipelineLayoutCreateInfo;
+	m_pipelineLayout						= vk::createPipelineLayout(m_vk, device, &pipelineLayoutCreateInfo);
+
+	const vk::VkExtent3D targetImageExtent	= { WIDTH, HEIGHT, 1 };
+	const ImageCreateInfo targetImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_colorAttachmentFormat, targetImageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+		vk::VK_IMAGE_TILING_OPTIMAL, vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT | vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+
+	m_colorTargetImage						= Image::createAndAlloc(m_vk, device, targetImageCreateInfo, m_context.getDefaultAllocator());
+
+	const ImageViewCreateInfo colorTargetViewInfo(m_colorTargetImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_colorAttachmentFormat);
+	m_colorTargetView						= vk::createImageView(m_vk, device, &colorTargetViewInfo);
+
+	RenderPassCreateInfo renderPassCreateInfo;
+	renderPassCreateInfo.addAttachment(AttachmentDescription(m_colorAttachmentFormat,
+															 vk::VK_SAMPLE_COUNT_1_BIT,
+															 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+															 vk::VK_ATTACHMENT_STORE_OP_STORE,
+															 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+															 vk::VK_ATTACHMENT_STORE_OP_STORE,
+															 vk::VK_IMAGE_LAYOUT_GENERAL,
+															 vk::VK_IMAGE_LAYOUT_GENERAL));
+
+
+	const vk::VkAttachmentReference colorAttachmentReference =
+	{
+		0,
+		vk::VK_IMAGE_LAYOUT_GENERAL
+	};
+
+	renderPassCreateInfo.addSubpass(SubpassDescription(vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
+													   0,
+													   0,
+													   DE_NULL,
+													   1,
+													   &colorAttachmentReference,
+													   DE_NULL,
+													   AttachmentReference(),
+													   0,
+													   DE_NULL));
+
+	m_renderPass		= vk::createRenderPass(m_vk, device, &renderPassCreateInfo);
+
+	std::vector<vk::VkImageView> colorAttachments(1);
+	colorAttachments[0] = *m_colorTargetView;
+
+	const FramebufferCreateInfo framebufferCreateInfo(*m_renderPass, colorAttachments, WIDTH, HEIGHT, 1);
+
+	m_framebuffer		= vk::createFramebuffer(m_vk, device, &framebufferCreateInfo);
+
+	const vk::VkVertexInputBindingDescription vertexInputBindingDescription =
+	{
+		0,
+		(deUint32)sizeof(tcu::Vec4) * 2,
+		vk::VK_VERTEX_INPUT_RATE_VERTEX,
+	};
+
+	const vk::VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+	{
+		{
+			0u,
+			0u,
+			vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+			0u
+		},
+		{
+			1u,
+			0u,
+			vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+			(deUint32)(sizeof(float)* 4),
+		}
+	};
+
+	m_vertexInputState = PipelineCreateInfo::VertexInputState(1,
+															  &vertexInputBindingDescription,
+															  2,
+															  vertexInputAttributeDescriptions);
+
+	const vk::VkDeviceSize dataSize = m_data.size() * sizeof(PositionColorVertex);
+	m_vertexBuffer = Buffer::createAndAlloc(m_vk, device, BufferCreateInfo(dataSize,
+		vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<deUint8*>(m_vertexBuffer->getBoundMemory().getHostPtr());
+	deMemcpy(ptr, &m_data[0], static_cast<size_t>(dataSize));
+
+	vk::flushMappedMemoryRange(m_vk,
+							   device,
+							   m_vertexBuffer->getBoundMemory().getMemory(),
+							   m_vertexBuffer->getBoundMemory().getOffset(),
+							   dataSize);
+
+	const CmdPoolCreateInfo cmdPoolCreateInfo(queueFamilyIndex);
+	m_cmdPool = vk::createCommandPool(m_vk, device, &cmdPoolCreateInfo);
+
+	const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		*m_cmdPool,											// VkCommandPool			commandPool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo);
+
+	initPipeline(device);
+}
+
+void DrawTestsBaseClass::initPipeline (const vk::VkDevice device)
+{
+	const vk::Unique<vk::VkShaderModule> vs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+	const vk::Unique<vk::VkShaderModule> fs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+	const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+	vk::VkViewport viewport;
+	viewport.x				= 0;
+	viewport.y				= 0;
+	viewport.width			= static_cast<float>(WIDTH);
+	viewport.height			= static_cast<float>(HEIGHT);
+	viewport.minDepth		= 0.0f;
+	viewport.maxDepth		= 1.0f;
+
+	vk::VkRect2D scissor;
+	scissor.offset.x		= 0;
+	scissor.offset.y		= 0;
+	scissor.extent.width	= WIDTH;
+	scissor.extent.height	= HEIGHT;
+
+	PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+	pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+	pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+	pipelineCreateInfo.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+	pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+	pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+	pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(1, std::vector<vk::VkViewport>(1, viewport), std::vector<vk::VkRect2D>(1, scissor)));
+	pipelineCreateInfo.addState(PipelineCreateInfo::DepthStencilState());
+	pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+	pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+
+	m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo);
+}
+
+void DrawTestsBaseClass::beginRenderPass (void)
+{
+	const vk::VkClearColorValue clearColor = { { 0.0f, 0.0f, 0.0f, 1.0f } };
+	const CmdBufferBeginInfo beginInfo;
+
+	m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo);
+
+	initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL);
+
+	const ImageSubresourceRange subresourceRange(vk::VK_IMAGE_ASPECT_COLOR_BIT);
+	m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, &clearColor, 1, &subresourceRange);
+
+	const vk::VkRect2D renderArea = { { 0, 0 }, { WIDTH, HEIGHT } };
+	const RenderPassBeginInfo renderPassBegin(*m_renderPass, *m_framebuffer, renderArea);
+
+	m_vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+}
+
+}	// Draw
+}	// vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.hpp
new file mode 100644
index 0000000..76c284b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawBaseClass.hpp
@@ -0,0 +1,153 @@
+#ifndef _VKTDRAWBASECLASS_HPP
+#define _VKTDRAWBASECLASS_HPP
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Command draw Tests - Base Class
+*//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vktTestCase.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuCommandLine.hpp"
+
+#include "vkRefUtil.hpp"
+#include "vkImageUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include "vkPrograms.hpp"
+
+#include "vktDrawCreateInfoUtil.hpp"
+#include "vktDrawImageObjectUtil.hpp"
+#include "vktDrawBufferObjectUtil.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+struct PositionColorVertex
+{
+	PositionColorVertex(tcu::Vec4 position_, tcu::Vec4 color_)
+		: position(position_)
+		, color(color_)
+	{}
+	tcu::Vec4 position;
+	tcu::Vec4 color;
+};
+
+struct ReferenceImageCoordinates
+{
+	ReferenceImageCoordinates (void)
+		: left		(-0.3)
+		, right		(0.3)
+		, top		(0.3)
+		, bottom	(-0.3)
+	{
+	}
+
+	double left;
+	double right;
+	double top;
+	double bottom;
+};
+
+struct ReferenceImageInstancedCoordinates
+{
+	ReferenceImageInstancedCoordinates (void)
+		: left		(-0.3)
+		, right		(0.6)
+		, top		(0.3)
+		, bottom	(-0.6)
+	{
+	}
+
+	double left;
+	double right;
+	double top;
+	double bottom;
+};
+
+class DrawTestsBaseClass : public TestInstance
+{
+public:
+								DrawTestsBaseClass	(Context& context, const char* vertexShaderName, const char* fragmentShaderName);
+
+protected:
+	void						initialize			(void);
+	virtual void				initPipeline		(const vk::VkDevice device);
+	void						beginRenderPass		(void);
+	virtual tcu::TestStatus		iterate				(void)						{ TCU_FAIL("Implement iterate() method!");	}
+
+	enum
+	{
+		WIDTH = 256,
+		HEIGHT = 256
+	};
+
+	vk::VkFormat									m_colorAttachmentFormat;
+
+	vk::VkPrimitiveTopology							m_topology;
+
+	const vk::DeviceInterface&						m_vk;
+
+	vk::Move<vk::VkPipeline>						m_pipeline;
+	vk::Move<vk::VkPipelineLayout>					m_pipelineLayout;
+
+	de::SharedPtr<Image>							m_colorTargetImage;
+	vk::Move<vk::VkImageView>						m_colorTargetView;
+
+	de::SharedPtr<Buffer>							m_vertexBuffer;
+	PipelineCreateInfo::VertexInputState			m_vertexInputState;
+
+	vk::Move<vk::VkCommandPool>						m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>					m_cmdBuffer;
+
+	vk::Move<vk::VkFramebuffer>						m_framebuffer;
+	vk::Move<vk::VkRenderPass>						m_renderPass;
+
+	const std::string								m_vertexShaderName;
+	const std::string								m_fragmentShaderName;
+
+	std::vector<PositionColorVertex>				m_data;
+	std::vector<deUint32>							m_indexes;
+	de::SharedPtr<Buffer>							m_indexBuffer;
+};
+
+}	// Draw
+}	// vkt
+
+#endif // _VKTDRAWBASECLASS_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.cpp
new file mode 100644
index 0000000..25f9a21
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.cpp
@@ -0,0 +1,83 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDrawBufferObjectUtil.hpp"
+
+#include "vkQueryUtil.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+Buffer::Buffer (const vk::DeviceInterface& vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object_)
+	: m_allocation  (DE_NULL)
+	, m_object		(object_)
+	, m_vk			(vk)
+	, m_device		(device)
+{
+}
+
+void Buffer::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindBufferMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Buffer> Buffer::createAndAlloc (const vk::DeviceInterface& vk,
+											  vk::VkDevice device,
+											  const vk::VkBufferCreateInfo &createInfo,
+											  vk::Allocator &allocator,
+											  vk::MemoryRequirement memoryRequirement)
+{
+	de::SharedPtr<Buffer> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements bufferRequirements = vk::getBufferMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(bufferRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Buffer> Buffer::create (const vk::DeviceInterface& vk,
+									  vk::VkDevice device,
+									  const vk::VkBufferCreateInfo& createInfo)
+{
+	return de::SharedPtr<Buffer>(new Buffer(vk, device, vk::createBuffer(vk, device, &createInfo)));
+}
+
+} // Draw
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.hpp
new file mode 100644
index 0000000..f52e77a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawBufferObjectUtil.hpp
@@ -0,0 +1,82 @@
+#ifndef _VKTDRAWBUFFEROBJECTUTIL_HPP
+#define _VKTDRAWBUFFEROBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+class Buffer
+{
+public:
+
+	static de::SharedPtr<Buffer> create			(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkBufferCreateInfo &createInfo);
+
+	static de::SharedPtr<Buffer> createAndAlloc (const vk::DeviceInterface&		vk,
+												 vk::VkDevice					device,
+												 const vk::VkBufferCreateInfo&	createInfo,
+												 vk::Allocator&					allocator,
+												 vk::MemoryRequirement			allocationMemoryProperties = vk::MemoryRequirement::Any);
+
+								Buffer			(const vk::DeviceInterface &vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object);
+
+	void						bindMemory		(de::MovePtr<vk::Allocation> allocation);
+
+	vk::VkBuffer				object			(void) const								{ return *m_object;		}
+	vk::Allocation				getBoundMemory	(void) const								{ return *m_allocation;	}
+
+private:
+
+	Buffer										(const Buffer& other);	// Not allowed!
+	Buffer&						operator=		(const Buffer& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Unique<vk::VkBuffer>		m_object;
+
+	const vk::DeviceInterface&		m_vk;
+	vk::VkDevice					m_device;
+};
+
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWBUFFEROBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.cpp
new file mode 100644
index 0000000..52ea403
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.cpp
@@ -0,0 +1,1195 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDrawCreateInfoUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+ImageSubresourceRange::ImageSubresourceRange (vk::VkImageAspectFlags	_aspectMask,
+											  deUint32					_baseMipLevel,
+											  deUint32					_levelCount,
+											  deUint32					_baseArrayLayer,
+											  deUint32					_layerCount)
+{
+	aspectMask		= _aspectMask;
+	baseMipLevel	= _baseMipLevel;
+	levelCount		= _levelCount;
+	baseArrayLayer	= _baseArrayLayer;
+	layerCount		= _layerCount;
+}
+
+ComponentMapping::ComponentMapping (vk::VkComponentSwizzle _r,
+									vk::VkComponentSwizzle _g,
+									vk::VkComponentSwizzle _b,
+									vk::VkComponentSwizzle _a)
+{
+	r = _r;
+	g = _g;
+	b = _b;
+	a = _a;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage							_image,
+										  vk::VkImageViewType					_viewType,
+										  vk::VkFormat							_format,
+										  const vk::VkImageSubresourceRange&	_subresourceRange,
+										  const vk::VkComponentMapping&			_components,
+										  vk::VkImageViewCreateFlags			_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0u;
+	image				= _image;
+	viewType			= _viewType;
+	format				= _format;
+	components.r		= _components.r;
+	components.g		= _components.g;
+	components.b		= _components.b;
+	components.a		= _components.a;
+	subresourceRange	= _subresourceRange;
+	flags				= _flags;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage					_image,
+										  vk::VkImageViewType			_viewType,
+										  vk::VkFormat					_format,
+										  const vk::VkComponentMapping&	_components,
+										  vk::VkImageViewCreateFlags	_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	image			= _image;
+	viewType		= _viewType;
+	format			= _format;
+	components.r	= _components.r;
+	components.g	= _components.g;
+	components.b	= _components.b;
+	components.a	= _components.a;
+
+	vk::VkImageAspectFlags aspectFlags;
+	const tcu::TextureFormat tcuFormat = vk::mapVkFormat(_format);
+
+	switch (tcuFormat.order)
+	{
+		case tcu::TextureFormat::D:
+			aspectFlags = vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		case tcu::TextureFormat::S:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT;
+			break;
+		case tcu::TextureFormat::DS:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT | vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		default:
+			aspectFlags = vk::VK_IMAGE_ASPECT_COLOR_BIT;
+			break;
+	}
+
+	subresourceRange = ImageSubresourceRange(aspectFlags);;
+	flags = _flags;
+}
+
+BufferViewCreateInfo::BufferViewCreateInfo (vk::VkBuffer	_buffer,
+											vk::VkFormat		_format,
+											vk::VkDeviceSize _offset,
+											vk::VkDeviceSize _range)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags	= 0;
+	buffer	= _buffer;
+	format	= _format;
+	offset	= _offset;
+	range	= _range;
+}
+
+BufferCreateInfo::BufferCreateInfo (vk::VkDeviceSize		_size,
+									vk::VkBufferUsageFlags	_usage,
+									vk::VkSharingMode		_sharingMode,
+									deUint32				_queueFamilyIndexCount,
+									const deUint32*			_pQueueFamilyIndices,
+									vk::VkBufferCreateFlags _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	size					= _size;
+	usage					= _usage;
+	flags					= _flags;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(
+			_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = _pQueueFamilyIndices;
+	}
+}
+
+BufferCreateInfo::BufferCreateInfo (const BufferCreateInfo &other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	size					= other.size;
+	usage					= other.usage;
+	flags					= other.flags;
+	sharingMode				= other.sharingMode;
+	queueFamilyIndexCount	= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices	= other.m_queueFamilyIndices;
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+}
+
+BufferCreateInfo & BufferCreateInfo::operator= (const BufferCreateInfo &other)
+{
+	sType						= other.sType;
+	pNext						= other.pNext;
+	size						= other.size;
+	usage						= other.usage;
+	flags						= other.flags;
+	sharingMode					= other.sharingMode;
+	queueFamilyIndexCount		= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices		= other.m_queueFamilyIndices;
+
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+
+	return *this;
+}
+
+ImageCreateInfo::ImageCreateInfo (vk::VkImageType			_imageType,
+								  vk::VkFormat				_format,
+								  vk::VkExtent3D			_extent,
+								  deUint32					_mipLevels,
+								  deUint32					_arrayLayers,
+								  vk::VkSampleCountFlagBits	_samples,
+								  vk::VkImageTiling			_tiling,
+								  vk::VkImageUsageFlags		_usage,
+								  vk::VkSharingMode			_sharingMode,
+								  deUint32					_queueFamilyIndexCount,
+								  const deUint32*			_pQueueFamilyIndices,
+								  vk::VkImageCreateFlags	_flags,
+								  vk::VkImageLayout			_initialLayout)
+{
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= _flags;
+	imageType				= _imageType;
+	format					= _format;
+	extent					= _extent;
+	mipLevels				= _mipLevels;
+	arrayLayers				= _arrayLayers;
+	samples					= _samples;
+	tiling					= _tiling;
+	usage					= _usage;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+	initialLayout	= _initialLayout;
+}
+
+FramebufferCreateInfo::FramebufferCreateInfo (vk::VkRenderPass						_renderPass,
+											  const std::vector<vk::VkImageView>&	atachments,
+											  deUint32								_width,
+											  deUint32								_height,
+											  deUint32								_layers)
+{
+	sType = vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+
+	renderPass		= _renderPass;
+	attachmentCount	= static_cast<deUint32>(atachments.size());
+
+	if (attachmentCount)
+	{
+		pAttachments = const_cast<vk::VkImageView *>(&atachments[0]);
+	}
+
+	width	= _width;
+	height	= _height;
+	layers	= _layers;
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+											const std::vector<vk::VkSubpassDescription>&	subpasses,
+											const std::vector<vk::VkSubpassDependency>&		dependiences)
+
+	: m_attachments			(attachments.begin(), attachments.end())
+	, m_subpasses			(subpasses.begin(), subpasses.end())
+	, m_dependiences		(dependiences.begin(), dependiences.end())
+	, m_attachmentsStructs	(m_attachments.begin(), m_attachments.end())
+	, m_subpassesStructs	(m_subpasses.begin(), m_subpasses.end())
+	, m_dependiencesStructs	(m_dependiences.begin(), m_dependiences.end())
+{
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+	pAttachments	= &m_attachmentsStructs[0];
+	subpassCount	= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses		= &m_subpassesStructs[0];
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+	pDependencies	= &m_dependiencesStructs[0];
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (deUint32							_attachmentCount,
+											const vk::VkAttachmentDescription*	_pAttachments,
+											deUint32							_subpassCount,
+											const vk::VkSubpassDescription*		_pSubpasses,
+											deUint32							_dependencyCount,
+											const vk::VkSubpassDependency*		_pDependiences)
+{
+
+	m_attachments	= std::vector<AttachmentDescription>(_pAttachments, _pAttachments + _attachmentCount);
+	m_subpasses		= std::vector<SubpassDescription>(_pSubpasses, _pSubpasses + _subpassCount);
+	m_dependiences	= std::vector<SubpassDependency>(_pDependiences, _pDependiences + _dependencyCount);
+
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>	(m_attachments.begin(),		m_attachments.end());
+	m_subpassesStructs		= std::vector<vk::VkSubpassDescription>		(m_subpasses.begin(),		m_subpasses.end());
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>		(m_dependiences.begin(),	m_dependiences.end());
+
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+
+	if (attachmentCount) {
+		pAttachments = &m_attachmentsStructs[0];
+	}
+	else
+	{
+		pAttachments = DE_NULL;
+	}
+
+	subpassCount = static_cast<deUint32>(m_subpasses.size());
+
+	if (subpassCount) {
+		pSubpasses = &m_subpassesStructs[0];
+	}
+	else
+	{
+		pSubpasses = DE_NULL;
+	}
+
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+
+	if (dependencyCount) {
+		pDependencies = &m_dependiencesStructs[0];
+	}
+	else
+	{
+		pDependencies = DE_NULL;
+	}
+}
+
+void
+RenderPassCreateInfo::addAttachment (vk::VkAttachmentDescription attachment)
+{
+
+	m_attachments.push_back(attachment);
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>(m_attachments.begin(), m_attachments.end());
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachmentsStructs[0];
+}
+
+void
+RenderPassCreateInfo::addSubpass (vk::VkSubpassDescription subpass)
+{
+
+	m_subpasses.push_back(subpass);
+	m_subpassesStructs	= std::vector<vk::VkSubpassDescription>(m_subpasses.begin(), m_subpasses.end());
+	subpassCount		= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses			= &m_subpassesStructs[0];
+}
+
+void
+RenderPassCreateInfo::addDependency (vk::VkSubpassDependency dependency)
+{
+
+	m_dependiences.push_back(dependency);
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>(m_dependiences.begin(), m_dependiences.end());
+
+	dependencyCount			= static_cast<deUint32>(m_dependiences.size());
+	pDependencies			= &m_dependiencesStructs[0];
+}
+
+RenderPassBeginInfo::RenderPassBeginInfo (vk::VkRenderPass						_renderPass,
+										  vk::VkFramebuffer						_framebuffer,
+										  vk::VkRect2D							_renderArea,
+										  const std::vector<vk::VkClearValue>&	_clearValues)
+{
+
+	m_clearValues	= _clearValues;
+
+	sType			= vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+	pNext			= DE_NULL;
+	renderPass		= _renderPass;
+	framebuffer		= _framebuffer;
+	renderArea		= _renderArea;
+	clearValueCount = static_cast<deUint32>(m_clearValues.size());
+	pClearValues	= m_clearValues.size() ? &m_clearValues[0] : DE_NULL;
+}
+
+CmdPoolCreateInfo::CmdPoolCreateInfo (deUint32 _queueFamilyIndex, unsigned int _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+
+	queueFamilyIndex = _queueFamilyIndex;
+	flags				= _flags;
+}
+
+AttachmentDescription::AttachmentDescription (vk::VkFormat				_format,
+											  vk::VkSampleCountFlagBits	_samples,
+											  vk::VkAttachmentLoadOp	_loadOp,
+											  vk::VkAttachmentStoreOp	_storeOp,
+											  vk::VkAttachmentLoadOp	_stencilLoadOp,
+											  vk::VkAttachmentStoreOp	_stencilStoreOp,
+											  vk::VkImageLayout			_initialLayout,
+											  vk::VkImageLayout			_finalLayout)
+{
+	flags = 0;
+	format			= _format;
+	samples			= _samples;
+	loadOp			= _loadOp;
+	storeOp			= _storeOp;
+	stencilLoadOp	= _stencilLoadOp;
+	stencilStoreOp	= _stencilStoreOp;
+	initialLayout	= _initialLayout;
+	finalLayout		= _finalLayout;
+}
+
+AttachmentDescription::AttachmentDescription (const vk::VkAttachmentDescription& rhs)
+{
+	flags			= rhs.flags;
+	format			= rhs.format;
+	samples			= rhs.samples;
+	loadOp			= rhs.loadOp;
+	storeOp			= rhs.storeOp;
+	stencilLoadOp	= rhs.stencilLoadOp;
+	stencilStoreOp	= rhs.stencilStoreOp;
+	initialLayout	= rhs.initialLayout;
+	finalLayout		= rhs.finalLayout;
+}
+
+AttachmentReference::AttachmentReference (deUint32 _attachment, vk::VkImageLayout _layout)
+{
+	attachment	= _attachment;
+	layout		= _layout;
+}
+
+AttachmentReference::AttachmentReference (void)
+{
+	attachment = vk::VK_ATTACHMENT_UNUSED;
+	layout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+}
+
+SubpassDescription::SubpassDescription (vk::VkPipelineBindPoint				_pipelineBindPoint,
+										vk::VkSubpassDescriptionFlags		_flags,
+										deUint32							_inputAttachmentCount,
+										const vk::VkAttachmentReference*	_inputAttachments,
+										deUint32							_colorAttachmentCount,
+										const vk::VkAttachmentReference*	_colorAttachments,
+										const vk::VkAttachmentReference*	_resolveAttachments,
+										vk::VkAttachmentReference			depthStencilAttachment,
+										deUint32							_preserveAttachmentCount,
+										const deUint32*						_preserveAttachments)
+{
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(_inputAttachments, _inputAttachments + _inputAttachmentCount);
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(_colorAttachments, _colorAttachments + _colorAttachmentCount);
+
+	if (_resolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(_resolveAttachments, _resolveAttachments + _colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(_preserveAttachments, _preserveAttachments + _preserveAttachmentCount);
+
+	m_depthStencilAttachment = depthStencilAttachment;
+
+	flags					= _flags;
+	pipelineBindPoint		= _pipelineBindPoint;
+	inputAttachmentCount	= _inputAttachmentCount;
+	pInputAttachments		= DE_NULL;
+	colorAttachmentCount	= _colorAttachmentCount;
+	pColorAttachments		= DE_NULL;
+	pResolveAttachments		= DE_NULL;
+	pDepthStencilAttachment	= &m_depthStencilAttachment;
+	pPreserveAttachments	= DE_NULL;
+	preserveAttachmentCount	= _preserveAttachmentCount;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const vk::VkSubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pInputAttachments, rhs.pInputAttachments + rhs.inputAttachmentCount);
+
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pColorAttachments, rhs.pColorAttachments + rhs.colorAttachmentCount);
+
+	if (rhs.pResolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(
+			rhs.pResolveAttachments, rhs.pResolveAttachments + rhs.colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(
+		rhs.pPreserveAttachments, rhs.pPreserveAttachments + rhs.preserveAttachmentCount);
+
+	if (rhs.pDepthStencilAttachment)
+		m_depthStencilAttachment = *rhs.pDepthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const SubpassDescription& rhs) {
+	*this = rhs;
+}
+
+SubpassDescription& SubpassDescription::operator= (const SubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments		= rhs.m_inputAttachments;
+	m_colorAttachments		= rhs.m_colorAttachments;
+	m_resolveAttachments	= rhs.m_resolveAttachments;
+	m_preserveAttachments	= rhs.m_preserveAttachments;
+	m_depthStencilAttachment = rhs.m_depthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+
+	return *this;
+}
+
+SubpassDependency::SubpassDependency (deUint32					_srcSubpass,
+									  deUint32					_dstSubpass,
+									  vk::VkPipelineStageFlags	_srcStageMask,
+									  vk::VkPipelineStageFlags	_dstStageMask,
+									  vk::VkAccessFlags			_srcAccessMask,
+									  vk::VkAccessFlags			_dstAccessMask,
+									  vk::VkDependencyFlags		_dependencyFlags)
+{
+	srcSubpass		= _srcSubpass;
+	dstSubpass		= _dstSubpass;
+	srcStageMask	= _srcStageMask;
+	dstStageMask	= _dstStageMask;
+	srcAccessMask	= _srcAccessMask;
+	dstAccessMask	= _dstAccessMask;
+	dependencyFlags	= _dependencyFlags;
+}
+
+SubpassDependency::SubpassDependency (const vk::VkSubpassDependency& rhs)
+{
+	srcSubpass		= rhs.srcSubpass;
+	dstSubpass		= rhs.dstSubpass;
+	srcStageMask	= rhs.srcStageMask;
+	dstStageMask	= rhs.dstStageMask;
+	srcAccessMask	= rhs.srcAccessMask;
+	dstAccessMask	= rhs.dstAccessMask;
+	dependencyFlags	= rhs.dependencyFlags;
+}
+
+CmdBufferBeginInfo::CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags _flags)
+{
+	sType				= vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+	pNext				= DE_NULL;
+	flags				= _flags;
+	pInheritanceInfo	= DE_NULL;
+}
+
+DescriptorPoolCreateInfo::DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+													vk::VkDescriptorPoolCreateFlags					_flags,
+													deUint32										_maxSets)
+	: m_poolSizeCounts(poolSizeCounts)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= _flags;
+	maxSets			= _maxSets;
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+}
+
+DescriptorPoolCreateInfo& DescriptorPoolCreateInfo::addDescriptors (vk::VkDescriptorType type, deUint32 count)
+{
+	vk::VkDescriptorPoolSize descriptorTypeCount = { type, count };
+	m_poolSizeCounts.push_back(descriptorTypeCount);
+
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+
+	return *this;
+}
+
+DescriptorSetLayoutCreateInfo::DescriptorSetLayoutCreateInfo (deUint32 _bindingCount, const vk::VkDescriptorSetLayoutBinding* _pBindings)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+	bindingCount = _bindingCount;
+	pBindings	 = _pBindings;
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (deUint32							_descriptorSetCount,
+													const vk::VkDescriptorSetLayout*	_pSetLayouts,
+													deUint32							_pushConstantRangeCount,
+													const vk::VkPushConstantRange*		_pPushConstantRanges)
+	: m_pushConstantRanges(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	for (unsigned int i = 0; i < _descriptorSetCount; i++)
+	{
+		m_setLayouts.push_back(_pSetLayouts[i]);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0;
+	setLayoutCount			= static_cast<deUint32>(m_setLayouts.size());
+	pSetLayouts				= setLayoutCount > 0 ? &m_setLayouts[0] : DE_NULL;
+	pushConstantRangeCount	= static_cast<deUint32>(m_pushConstantRanges.size());
+
+	if (m_pushConstantRanges.size()) {
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts,
+													deUint32										_pushConstantRangeCount,
+													const vk::VkPushConstantRange*					_pPushConstantRanges)
+	: m_setLayouts			(setLayouts)
+	, m_pushConstantRanges	(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags			= 0;
+	setLayoutCount	= static_cast<deUint32>(m_setLayouts.size());
+
+	if (setLayoutCount)
+	{
+		pSetLayouts = &m_setLayouts[0];
+	}
+	else
+	{
+		pSetLayouts = DE_NULL;
+	}
+
+	pushConstantRangeCount = static_cast<deUint32>(m_pushConstantRanges.size());
+	if (pushConstantRangeCount) {
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineCreateInfo::PipelineShaderStage::PipelineShaderStage (vk::VkShaderModule _module, const char* _pName, vk::VkShaderStageFlagBits _stage)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+	stage				= _stage;
+	module				= _module;
+	pName				= _pName;
+	pSpecializationInfo = DE_NULL;
+}
+
+PipelineCreateInfo::VertexInputState::VertexInputState (deUint32										_vertexBindingDescriptionCount,
+														const vk::VkVertexInputBindingDescription*		_pVertexBindingDescriptions,
+														deUint32										_vertexAttributeDescriptionCount,
+														const vk::VkVertexInputAttributeDescription*	_pVertexAttributeDescriptions)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags							= 0u;
+	vertexBindingDescriptionCount	= _vertexBindingDescriptionCount;
+	pVertexBindingDescriptions		= _pVertexBindingDescriptions;
+	vertexAttributeDescriptionCount	= _vertexAttributeDescriptionCount;
+	pVertexAttributeDescriptions	= _pVertexAttributeDescriptions;
+}
+
+PipelineCreateInfo::InputAssemblerState::InputAssemblerState (vk::VkPrimitiveTopology	_topology,
+															  vk::VkBool32				_primitiveRestartEnable)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	topology				= _topology;
+	primitiveRestartEnable	= _primitiveRestartEnable;
+}
+
+PipelineCreateInfo::TessellationState::TessellationState (deUint32 _patchControlPoints)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0;
+	patchControlPoints	= _patchControlPoints;
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (deUint32						_viewportCount,
+												  std::vector<vk::VkViewport>	_viewports,
+												  std::vector<vk::VkRect2D>		_scissors)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	viewportCount	= _viewportCount;
+	scissorCount	= _viewportCount;
+
+	if (!_viewports.size())
+	{
+		m_viewports.resize(viewportCount);
+		deMemset(&m_viewports[0], 0, sizeof(m_viewports[0]) * m_viewports.size());
+	}
+	else
+	{
+		m_viewports = _viewports;
+	}
+
+	if (!_scissors.size())
+	{
+		m_scissors.resize(scissorCount);
+		deMemset(&m_scissors[0], 0, sizeof(m_scissors[0]) * m_scissors.size());
+	}
+	else
+	{
+		m_scissors = _scissors;
+	}
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports = std::vector<vk::VkViewport>(other.pViewports, other.pViewports + viewportCount);
+	m_scissors	= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState& PipelineCreateInfo::ViewportState::operator= (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports		= std::vector<vk::VkViewport>(other.pViewports, other.pViewports + scissorCount);
+	m_scissors		= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports		= &m_viewports[0];
+	pScissors		= &m_scissors[0];
+	return *this;
+}
+
+PipelineCreateInfo::RasterizerState::RasterizerState (vk::VkBool32			_depthClampEnable,
+													  vk::VkBool32			_rasterizerDiscardEnable,
+													  vk::VkPolygonMode		_polygonMode,
+													  vk::VkCullModeFlags	_cullMode,
+													  vk::VkFrontFace		_frontFace,
+													  vk::VkBool32			_depthBiasEnable,
+													  float					_depthBiasConstantFactor,
+													  float					_depthBiasClamp,
+													  float					_depthBiasSlopeFactor,
+													  float					_lineWidth)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthClampEnable		= _depthClampEnable;
+	rasterizerDiscardEnable = _rasterizerDiscardEnable;
+	polygonMode				= _polygonMode;
+	cullMode				= _cullMode;
+	frontFace				= _frontFace;
+
+	depthBiasEnable			= _depthBiasEnable;
+	depthBiasConstantFactor	= _depthBiasConstantFactor;
+	depthBiasClamp			= _depthBiasClamp;
+	depthBiasSlopeFactor	= _depthBiasSlopeFactor;
+	lineWidth				= _lineWidth;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (vk::VkSampleCountFlagBits				_rasterizationSamples,
+														vk::VkBool32							_sampleShadingEnable,
+														float									_minSampleShading,
+														const std::vector<vk::VkSampleMask>&	_sampleMask,
+														bool									_alphaToCoverageEnable,
+														bool									_alphaToOneEnable)
+	: m_sampleMask(_sampleMask)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	rasterizationSamples	= _rasterizationSamples;
+	sampleShadingEnable		= _sampleShadingEnable;
+	minSampleShading		= _minSampleShading;
+	pSampleMask				= &m_sampleMask[0];
+	alphaToCoverageEnable   = _alphaToCoverageEnable;
+	alphaToOneEnable		= _alphaToOneEnable;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (const MultiSampleState& other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+}
+
+PipelineCreateInfo::MultiSampleState& PipelineCreateInfo::MultiSampleState::operator= (const MultiSampleState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	_attachments,
+													  vk::VkBool32													_logicOpEnable,
+													  vk::VkLogicOp													_logicOp)
+	: m_attachments(_attachments)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (deUint32											_attachmentCount,
+													  const vk::VkPipelineColorBlendAttachmentState*	_attachments,
+													  vk::VkBool32										_logicOpEnable,
+													  vk::VkLogicOp										_logicOp)
+	: m_attachments(_attachments, _attachments + _attachmentCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext	= DE_NULL;
+	flags					= 0;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo& createInfo)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const ColorBlendState& createInfo, std::vector<float> _blendConstants)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+	deMemcpy(blendConstants, &_blendConstants[0], 4 * sizeof(float));
+}
+
+PipelineCreateInfo::ColorBlendState::Attachment::Attachment (vk::VkBool32		_blendEnable,
+															 vk::VkBlendFactor	_srcColorBlendFactor,
+															 vk::VkBlendFactor	_dstColorBlendFactor,
+															 vk::VkBlendOp		_colorBlendOp,
+															 vk::VkBlendFactor	_srcAlphaBlendFactor,
+															 vk::VkBlendFactor	_dstAlphaBlendFactor,
+															 vk::VkBlendOp		_alphaBlendOp,
+															 deUint8			_colorWriteMask)
+{
+	blendEnable			= _blendEnable;
+	srcColorBlendFactor	= _srcColorBlendFactor;
+	dstColorBlendFactor	= _dstColorBlendFactor;
+	colorBlendOp		= _colorBlendOp;
+	srcAlphaBlendFactor	= _srcAlphaBlendFactor;
+	dstAlphaBlendFactor	= _dstAlphaBlendFactor;
+	alphaBlendOp		= _alphaBlendOp;
+	colorWriteMask	= _colorWriteMask;
+}
+
+PipelineCreateInfo::DepthStencilState::StencilOpState::StencilOpState (vk::VkStencilOp	_failOp,
+																	   vk::VkStencilOp	_passOp,
+																	   vk::VkStencilOp	_depthFailOp,
+																	   vk::VkCompareOp	_compareOp,
+																	   deUint32			_compareMask,
+																	   deUint32			_writeMask,
+																	   deUint32			_reference)
+{
+	failOp		= _failOp;
+	passOp		= _passOp;
+	depthFailOp	= _depthFailOp;
+	compareOp	= _compareOp;
+
+	compareMask	= _compareMask;
+	writeMask	= _writeMask;
+	reference	= _reference;
+}
+
+PipelineCreateInfo::DepthStencilState::DepthStencilState (vk::VkBool32		_depthTestEnable,
+														  vk::VkBool32		_depthWriteEnable,
+														  vk::VkCompareOp	_depthCompareOp,
+														  vk::VkBool32		_depthBoundsTestEnable,
+														  vk::VkBool32		_stencilTestEnable,
+														  StencilOpState	_front,
+														  StencilOpState	_back,
+														  float				_minDepthBounds,
+														  float				_maxDepthBounds)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthTestEnable			= _depthTestEnable;
+	depthWriteEnable		= _depthWriteEnable;
+	depthCompareOp			= _depthCompareOp;
+	depthBoundsTestEnable	= _depthBoundsTestEnable;
+	stencilTestEnable		= _stencilTestEnable;
+	front	= _front;
+	back	= _back;
+
+	minDepthBounds = _minDepthBounds;
+	maxDepthBounds = _maxDepthBounds;
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const std::vector<vk::VkDynamicState>& _dynamicStates)
+{
+	sType	= vk::VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+	pNext	= DE_NULL;
+	flags	= 0;
+
+	if (!_dynamicStates.size())
+	{
+		for (size_t i = 0; i < vk::VK_DYNAMIC_STATE_LAST; ++i)
+		{
+			m_dynamicStates.push_back(static_cast<vk::VkDynamicState>(i));
+		}
+	}
+	else
+		m_dynamicStates = _dynamicStates;
+
+	dynamicStateCount = static_cast<deUint32>(m_dynamicStates.size());
+	pDynamicStates = &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const DynamicState &other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+
+	flags				= other.flags;
+	dynamicStateCount	= other.dynamicStateCount;
+	m_dynamicStates		= std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates		= &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState& PipelineCreateInfo::DynamicState::operator= (const DynamicState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+
+	flags				= other.flags;
+	dynamicStateCount	= other.dynamicStateCount;
+	m_dynamicStates		= std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates		= &m_dynamicStates[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::PipelineCreateInfo (vk::VkPipelineLayout		_layout,
+										vk::VkRenderPass			_renderPass,
+										int							_subpass,
+										vk::VkPipelineCreateFlags	_flags)
+{
+	deMemset(static_cast<vk::VkGraphicsPipelineCreateInfo *>(this), 0,
+		sizeof(vk::VkGraphicsPipelineCreateInfo));
+
+	sType = vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= _flags;
+	renderPass			= _renderPass;
+	subpass				= _subpass;
+	layout				= _layout;
+	basePipelineHandle	= DE_NULL;
+	basePipelineIndex	= 0;
+	pDynamicState		= DE_NULL;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addShader (const vk::VkPipelineShaderStageCreateInfo& shader)
+{
+	m_shaders.push_back(shader);
+
+	stageCount	= static_cast<deUint32>(m_shaders.size());
+	pStages		= &m_shaders[0];
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineVertexInputStateCreateInfo& state)
+{
+	m_vertexInputState	= state;
+	pVertexInputState	= &m_vertexInputState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineInputAssemblyStateCreateInfo& state)
+{
+	m_inputAssemblyState = state;
+	pInputAssemblyState = &m_inputAssemblyState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineColorBlendStateCreateInfo& state)
+{
+	m_colorBlendStateAttachments	= std::vector<vk::VkPipelineColorBlendAttachmentState>(state.pAttachments, state.pAttachments + state.attachmentCount);
+	m_colorBlendState				= state;
+	m_colorBlendState.pAttachments	= &m_colorBlendStateAttachments[0];
+	pColorBlendState				= &m_colorBlendState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineViewportStateCreateInfo& state)
+{
+	m_viewports					= std::vector<vk::VkViewport>(state.pViewports, state.pViewports + state.viewportCount);
+	m_scissors					= std::vector<vk::VkRect2D>(state.pScissors, state.pScissors + state.scissorCount);
+	m_viewportState				= state;
+	m_viewportState.pViewports	= &m_viewports[0];
+	m_viewportState.pScissors	= &m_scissors[0];
+	pViewportState				= &m_viewportState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDepthStencilStateCreateInfo& state)
+{
+	m_dynamicDepthStencilState	= state;
+	pDepthStencilState			= &m_dynamicDepthStencilState;
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineTessellationStateCreateInfo& state)
+{
+	m_tessState			= state;
+	pTessellationState	= &m_tessState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineRasterizationStateCreateInfo& state)
+{
+	m_rasterState		= state;
+	pRasterizationState	= &m_rasterState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineMultisampleStateCreateInfo& state)
+{
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + state.rasterizationSamples) / ( sizeof(vk::VkSampleMask) * 8 );
+	m_multisampleStateSampleMask	= std::vector<vk::VkSampleMask>(state.pSampleMask, state.pSampleMask + sampleMaskArrayLen);
+	m_multisampleState				= state;
+	m_multisampleState.pSampleMask	= &m_multisampleStateSampleMask[0];
+	pMultisampleState				= &m_multisampleState;
+
+	return *this;
+}
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDynamicStateCreateInfo& state)
+{
+	m_dynamicStates					= std::vector<vk::VkDynamicState>(state.pDynamicStates, state.pDynamicStates + state.dynamicStateCount);
+	m_dynamicState					= state;
+	m_dynamicState.pDynamicStates	= &m_dynamicStates[0];
+	pDynamicState					= &m_dynamicState;
+
+	return *this;
+}
+
+SamplerCreateInfo::SamplerCreateInfo (vk::VkFilter				_magFilter,
+									  vk::VkFilter				_minFilter,
+									  vk::VkSamplerMipmapMode	_mipmapMode,
+									  vk::VkSamplerAddressMode	_addressModeU,
+									  vk::VkSamplerAddressMode	_addressModeV,
+									  vk::VkSamplerAddressMode	_addressModeW,
+									  float						_mipLodBias,
+									  vk::VkBool32				_anisotropyEnable,
+									  float						_maxAnisotropy,
+									  vk::VkBool32				_compareEnable,
+									  vk::VkCompareOp			_compareOp,
+									  float						_minLod,
+									  float						_maxLod,
+									  vk::VkBorderColor			_borderColor,
+									  vk::VkBool32				_unnormalizedCoordinates)
+{
+	sType					= vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+	pNext					= DE_NULL;
+	flags					= 0u;
+	magFilter				= _magFilter;
+	minFilter				= _minFilter;
+	mipmapMode				= _mipmapMode;
+	addressModeU			= _addressModeU;
+	addressModeV			= _addressModeV;
+	addressModeW			= _addressModeW;
+	mipLodBias				= _mipLodBias;
+	anisotropyEnable		= _anisotropyEnable;
+	maxAnisotropy			= _maxAnisotropy;
+	compareEnable			= _compareEnable;
+	compareOp				= _compareOp;
+	minLod					= _minLod;
+	maxLod					= _maxLod;
+	borderColor				= _borderColor;
+	unnormalizedCoordinates = _unnormalizedCoordinates;
+}
+} // Draw
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.hpp
new file mode 100644
index 0000000..b7ae712
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawCreateInfoUtil.hpp
@@ -0,0 +1,520 @@
+#ifndef _VKTDRAWCREATEINFOUTIL_HPP
+#define _VKTDRAWCREATEINFOUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuVector.hpp"
+#include "deSharedPtr.hpp"
+#include <vector>
+
+namespace vkt
+{
+namespace Draw
+{
+
+class ImageSubresourceRange : public vk::VkImageSubresourceRange
+{
+public:
+	ImageSubresourceRange		(vk::VkImageAspectFlags	aspectMask,
+								 deUint32				baseMipLevel	= 0,
+								 deUint32				levelCount		= 1,
+								 deUint32				baseArrayLayer	= 0,
+								 deUint32				layerCount		= 1);
+};
+
+class ComponentMapping : public vk::VkComponentMapping
+{
+public:
+	ComponentMapping			(vk::VkComponentSwizzle r = vk::VK_COMPONENT_SWIZZLE_R,
+								 vk::VkComponentSwizzle g = vk::VK_COMPONENT_SWIZZLE_G,
+								 vk::VkComponentSwizzle b = vk::VK_COMPONENT_SWIZZLE_B,
+								 vk::VkComponentSwizzle a = vk::VK_COMPONENT_SWIZZLE_A);
+};
+
+class ImageViewCreateInfo : public vk::VkImageViewCreateInfo
+{
+public:
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkImageSubresourceRange&	subresourceRange,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+};
+
+class BufferViewCreateInfo : public vk::VkBufferViewCreateInfo
+{
+public:
+	BufferViewCreateInfo		 (vk::VkBuffer		buffer,
+								  vk::VkFormat		format,
+								  vk::VkDeviceSize	offset,
+								  vk::VkDeviceSize	range);
+};
+
+class BufferCreateInfo : public vk::VkBufferCreateInfo
+{
+public:
+	BufferCreateInfo			(vk::VkDeviceSize			size,
+								 vk::VkBufferCreateFlags	usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkBufferCreateFlags	flags					= 0);
+
+	BufferCreateInfo			(const BufferCreateInfo&	other);
+	BufferCreateInfo& operator=	(const BufferCreateInfo&	other);
+
+private:
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class ImageCreateInfo : public vk::VkImageCreateInfo
+{
+public:
+	ImageCreateInfo				(vk::VkImageType			imageType,
+								 vk::VkFormat				format,
+								 vk::VkExtent3D				extent,
+								 deUint32					mipLevels,
+								 deUint32					arrayLayers,
+								 vk::VkSampleCountFlagBits	samples,
+								 vk::VkImageTiling			tiling,
+								 vk::VkImageUsageFlags		usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkImageCreateFlags		flags					= 0,
+								 vk::VkImageLayout			initialLayout			= vk::VK_IMAGE_LAYOUT_UNDEFINED);
+
+private:
+	ImageCreateInfo				(const ImageCreateInfo&		other);
+	ImageCreateInfo& operator=	(const ImageCreateInfo&		other);
+
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class FramebufferCreateInfo : public vk::VkFramebufferCreateInfo
+{
+public:
+	FramebufferCreateInfo		(vk::VkRenderPass						renderPass,
+								 const std::vector<vk::VkImageView>&	attachments,
+								 deUint32								width,
+								 deUint32								height,
+								 deUint32								layers);
+};
+
+class AttachmentDescription : public vk::VkAttachmentDescription
+{
+public:
+	AttachmentDescription	(vk::VkFormat				format,
+							 vk::VkSampleCountFlagBits	samples,
+							 vk::VkAttachmentLoadOp		loadOp,
+							 vk::VkAttachmentStoreOp	storeOp,
+							 vk::VkAttachmentLoadOp		stencilLoadOp,
+							 vk::VkAttachmentStoreOp	stencilStoreOp,
+							 vk::VkImageLayout			initialLayout,
+							 vk::VkImageLayout			finalLayout);
+
+	AttachmentDescription	(const vk::VkAttachmentDescription &);
+};
+
+class AttachmentReference : public vk::VkAttachmentReference
+{
+public:
+	AttachmentReference		(deUint32 attachment, vk::VkImageLayout layout);
+	AttachmentReference		(void);
+};
+
+class SubpassDescription : public vk::VkSubpassDescription
+{
+public:
+	SubpassDescription				(vk::VkPipelineBindPoint			pipelineBindPoint,
+									 vk::VkSubpassDescriptionFlags		flags,
+									 deUint32							inputAttachmentCount,
+									 const vk::VkAttachmentReference*	inputAttachments,
+									 deUint32							colorAttachmentCount,
+									 const vk::VkAttachmentReference*	colorAttachments,
+									 const vk::VkAttachmentReference*	resolveAttachments,
+									 vk::VkAttachmentReference			depthStencilAttachment,
+									 deUint32							preserveAttachmentCount,
+									 const deUint32*					preserveAttachments);
+
+	SubpassDescription				(const vk::VkSubpassDescription&	other);
+	SubpassDescription				(const SubpassDescription&			other);
+	SubpassDescription& operator=	(const SubpassDescription&			other);
+
+private:
+	std::vector<vk::VkAttachmentReference>	m_inputAttachments;
+	std::vector<vk::VkAttachmentReference>	m_colorAttachments;
+	std::vector<vk::VkAttachmentReference>	m_resolveAttachments;
+	std::vector<deUint32>					m_preserveAttachments;
+
+	vk::VkAttachmentReference				m_depthStencilAttachment;
+};
+
+class SubpassDependency : public vk::VkSubpassDependency
+{
+public:
+	SubpassDependency (	deUint32					srcSubpass,
+						deUint32					dstSubpass,
+						vk::VkPipelineStageFlags	srcStageMask,
+						vk::VkPipelineStageFlags	dstStageMask,
+						vk::VkAccessFlags			srcAccessMask,
+						vk::VkAccessFlags			dstAccessMask,
+						vk::VkDependencyFlags		dependencyFlags);
+
+	SubpassDependency (const vk::VkSubpassDependency& other);
+};
+
+class RenderPassCreateInfo : public vk::VkRenderPassCreateInfo
+{
+public:
+	RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+						  const std::vector<vk::VkSubpassDescription>&		subpasses,
+						  const std::vector<vk::VkSubpassDependency>&		dependiences		= std::vector<vk::VkSubpassDependency>());
+
+	RenderPassCreateInfo (deUint32											attachmentCount	= 0,
+						  const vk::VkAttachmentDescription*				pAttachments	= DE_NULL,
+						  deUint32											subpassCount	= 0,
+						  const vk::VkSubpassDescription*					pSubpasses		= DE_NULL,
+						  deUint32											dependencyCount	= 0,
+						  const vk::VkSubpassDependency*					pDependiences	= DE_NULL);
+
+	void addAttachment	(vk::VkAttachmentDescription						attachment);
+	void addSubpass		(vk::VkSubpassDescription							subpass);
+	void addDependency	(vk::VkSubpassDependency							dependency);
+
+private:
+	std::vector<AttachmentDescription>			m_attachments;
+	std::vector<SubpassDescription>				m_subpasses;
+	std::vector<SubpassDependency>				m_dependiences;
+
+	std::vector<vk::VkAttachmentDescription>	m_attachmentsStructs;
+	std::vector<vk::VkSubpassDescription>		m_subpassesStructs;
+	std::vector<vk::VkSubpassDependency>		m_dependiencesStructs;
+
+	RenderPassCreateInfo			(const RenderPassCreateInfo &other); //Not allowed!
+	RenderPassCreateInfo& operator= (const RenderPassCreateInfo &other); //Not allowed!
+};
+
+class RenderPassBeginInfo : public vk::VkRenderPassBeginInfo
+{
+public:
+	RenderPassBeginInfo (vk::VkRenderPass						renderPass,
+						 vk::VkFramebuffer						framebuffer,
+						 vk::VkRect2D							renderArea,
+						 const std::vector<vk::VkClearValue>&	clearValues = std::vector<vk::VkClearValue>());
+
+private:
+	std::vector<vk::VkClearValue> m_clearValues;
+
+	RenderPassBeginInfo				(const RenderPassBeginInfo&	other); //Not allowed!
+	RenderPassBeginInfo& operator=	(const RenderPassBeginInfo&	other); //Not allowed!
+};
+
+class CmdPoolCreateInfo : public vk::VkCommandPoolCreateInfo
+{
+public:
+	CmdPoolCreateInfo (deUint32						queueFamilyIndex,
+					   vk::VkCommandPoolCreateFlags flags				= vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+};
+
+class CmdBufferBeginInfo : public vk::VkCommandBufferBeginInfo
+{
+public:
+	CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags		flags					= 0);
+};
+
+class DescriptorPoolSize : public vk::VkDescriptorPoolSize
+{
+public:
+	DescriptorPoolSize (vk::VkDescriptorType _type, deUint32 _descriptorCount)
+	{
+		type			= _type;
+		descriptorCount = _descriptorCount;
+	}
+};
+
+class DescriptorPoolCreateInfo : public vk::VkDescriptorPoolCreateInfo
+{
+public:
+	DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+							  vk::VkDescriptorPoolCreateFlags				flags,
+							  deUint32										maxSets);
+
+	DescriptorPoolCreateInfo& addDescriptors (vk::VkDescriptorType type, deUint32 count);
+
+private:
+	std::vector<vk::VkDescriptorPoolSize> m_poolSizeCounts;
+};
+
+class DescriptorSetLayoutCreateInfo : public vk::VkDescriptorSetLayoutCreateInfo
+{
+public:
+	DescriptorSetLayoutCreateInfo (deUint32 bindingCount, const vk::VkDescriptorSetLayoutBinding* pBindings);
+};
+
+class PipelineLayoutCreateInfo : public vk::VkPipelineLayoutCreateInfo
+{
+public:
+	PipelineLayoutCreateInfo (deUint32										descriptorSetCount,
+							  const vk::VkDescriptorSetLayout*				pSetLayouts,
+									  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+	PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts				= std::vector<vk::VkDescriptorSetLayout>(),
+							  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+private:
+	std::vector<vk::VkDescriptorSetLayout>	m_setLayouts;
+	std::vector<vk::VkPushConstantRange>	m_pushConstantRanges;
+};
+
+class PipelineCreateInfo : public vk::VkGraphicsPipelineCreateInfo
+{
+public:
+	class VertexInputState : public vk::VkPipelineVertexInputStateCreateInfo
+	{
+	public:
+		VertexInputState (deUint32										vertexBindingDescriptionCount	= 0,
+						  const vk::VkVertexInputBindingDescription*	pVertexBindingDescriptions		= NULL,
+						  deUint32										vertexAttributeDescriptionCount	= 0,
+						  const vk::VkVertexInputAttributeDescription*	pVertexAttributeDescriptions	= NULL);
+	};
+
+	class InputAssemblerState : public vk::VkPipelineInputAssemblyStateCreateInfo
+	{
+	public:
+		InputAssemblerState (vk::VkPrimitiveTopology topology, vk::VkBool32 primitiveRestartEnable = false);
+	};
+
+	class TessellationState : public vk::VkPipelineTessellationStateCreateInfo
+	{
+	public:
+		TessellationState (deUint32 patchControlPoints = 0);
+	};
+
+	class ViewportState : public vk::VkPipelineViewportStateCreateInfo
+	{
+	public:
+		ViewportState				(deUint32						viewportCount,
+									 std::vector<vk::VkViewport>	viewports		= std::vector<vk::VkViewport>(0),
+									 std::vector<vk::VkRect2D>		scissors		= std::vector<vk::VkRect2D>(0));
+
+		ViewportState				(const ViewportState&			other);
+		ViewportState& operator=	(const ViewportState&			other);
+
+		std::vector<vk::VkViewport> m_viewports;
+		std::vector<vk::VkRect2D>	m_scissors;
+	};
+
+	class RasterizerState : public vk::VkPipelineRasterizationStateCreateInfo
+	{
+	public:
+		RasterizerState (vk::VkBool32			depthClampEnable		= false,
+						 vk::VkBool32			rasterizerDiscardEnable = false,
+						 vk::VkPolygonMode		polygonMode				= vk::VK_POLYGON_MODE_FILL,
+						 vk::VkCullModeFlags	cullMode				= vk::VK_CULL_MODE_NONE,
+						 vk::VkFrontFace		frontFace				= vk::VK_FRONT_FACE_CLOCKWISE,
+						 vk::VkBool32			depthBiasEnable			= true,
+						 float					depthBiasConstantFactor	= 0.0f,
+						 float					depthBiasClamp			= 0.0f,
+						 float					depthBiasSlopeFactor	= 0.0f,
+						 float					lineWidth				= 1.0f);
+	};
+
+	class MultiSampleState : public vk::VkPipelineMultisampleStateCreateInfo
+	{
+	public:
+		MultiSampleState			(vk::VkSampleCountFlagBits				rasterizationSamples		= vk::VK_SAMPLE_COUNT_1_BIT,
+									 vk::VkBool32							sampleShadingEnable			= false,
+									 float									minSampleShading			= 0.0f,
+									 const std::vector<vk::VkSampleMask>&	sampleMask					= std::vector<vk::VkSampleMask>(1, 0xffffffff),
+									 bool									alphaToCoverageEnable		= false,
+									 bool									alphaToOneEnable			= false);
+
+		MultiSampleState			(const MultiSampleState&				other);
+		MultiSampleState& operator= (const MultiSampleState&				other);
+
+	private:
+		std::vector<vk::VkSampleMask> m_sampleMask;
+	};
+
+	class ColorBlendState : public vk::VkPipelineColorBlendStateCreateInfo
+	{
+	public:
+		class Attachment : public vk::VkPipelineColorBlendAttachmentState
+		{
+		public:
+			Attachment (vk::VkBool32		blendEnable			= false,
+						vk::VkBlendFactor	srcColorBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstColorBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		colorBlendOp		= vk::VK_BLEND_OP_ADD,
+						vk::VkBlendFactor	srcAlphaBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstAlphaBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		alphaBlendOp		= vk::VK_BLEND_OP_ADD,
+						deUint8				colorWriteMask		= 0xff);
+		};
+
+		ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	attachments,
+						 vk::VkBool32													alphaToCoverageEnable	= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (deUint32														attachmentCount,
+						 const vk::VkPipelineColorBlendAttachmentState*					attachments,
+						 vk::VkBool32													logicOpEnable			= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo&					createInfo);
+		ColorBlendState (const ColorBlendState&											createInfo,
+						 std::vector<float>												blendConstants			= std::vector<float>(4));
+
+	private:
+		std::vector<vk::VkPipelineColorBlendAttachmentState> m_attachments;
+	};
+
+	class DepthStencilState : public vk::VkPipelineDepthStencilStateCreateInfo
+	{
+	public:
+		class StencilOpState : public vk::VkStencilOpState
+		{
+		public:
+			StencilOpState (vk::VkStencilOp failOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp passOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp depthFailOp				= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkCompareOp compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+							deUint32		compareMask				= 0xffffffffu,
+							deUint32		writeMask				= 0xffffffffu,
+							deUint32		reference				= 0);
+		};
+
+		DepthStencilState (vk::VkBool32		depthTestEnable			= false,
+						   vk::VkBool32		depthWriteEnable		= false,
+						   vk::VkCompareOp	depthCompareOp			= vk::VK_COMPARE_OP_ALWAYS,
+						   vk::VkBool32		depthBoundsTestEnable	= false,
+						   vk::VkBool32		stencilTestEnable		= false,
+						   StencilOpState	front					= StencilOpState(),
+						   StencilOpState	back					= StencilOpState(),
+						   float			minDepthBounds			= -1.0f,
+						   float			maxDepthBounds			= 1.0f);
+	};
+
+	class PipelineShaderStage : public vk::VkPipelineShaderStageCreateInfo
+	{
+	public:
+		PipelineShaderStage (vk::VkShaderModule shaderModule, const char* pName, vk::VkShaderStageFlagBits stage);
+	};
+
+	class DynamicState : public vk::VkPipelineDynamicStateCreateInfo
+	{
+	public:
+		DynamicState			(const std::vector<vk::VkDynamicState>& dynamicStates = std::vector<vk::VkDynamicState>(0));
+
+		DynamicState			(const DynamicState& other);
+		DynamicState& operator= (const DynamicState& other);
+
+		std::vector<vk::VkDynamicState> m_dynamicStates;
+	};
+
+	PipelineCreateInfo				(vk::VkPipelineLayout								layout,
+								     vk::VkRenderPass									renderPass,
+									 int												subpass,
+									 vk::VkPipelineCreateFlags							flags);
+
+	PipelineCreateInfo& addShader	(const vk::VkPipelineShaderStageCreateInfo&			shader);
+
+	PipelineCreateInfo& addState	(const vk::VkPipelineVertexInputStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineInputAssemblyStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineColorBlendStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineViewportStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDepthStencilStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineTessellationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineRasterizationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineMultisampleStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDynamicStateCreateInfo&		state);
+
+private:
+	std::vector<vk::VkPipelineShaderStageCreateInfo>		m_shaders;
+
+	vk::VkPipelineVertexInputStateCreateInfo				m_vertexInputState;
+	vk::VkPipelineInputAssemblyStateCreateInfo				m_inputAssemblyState;
+	std::vector<vk::VkPipelineColorBlendAttachmentState>	m_colorBlendStateAttachments;
+	vk::VkPipelineColorBlendStateCreateInfo					m_colorBlendState;
+	vk::VkPipelineViewportStateCreateInfo					m_viewportState;
+	vk::VkPipelineDepthStencilStateCreateInfo				m_dynamicDepthStencilState;
+	vk::VkPipelineTessellationStateCreateInfo				m_tessState;
+	vk::VkPipelineRasterizationStateCreateInfo				m_rasterState;
+	vk::VkPipelineMultisampleStateCreateInfo				m_multisampleState;
+	vk::VkPipelineDynamicStateCreateInfo					m_dynamicState;
+
+	std::vector<vk::VkDynamicState>							m_dynamicStates;
+
+	std::vector<vk::VkViewport>								m_viewports;
+	std::vector<vk::VkRect2D>								m_scissors;
+
+	std::vector<vk::VkSampleMask>							m_multisampleStateSampleMask;
+};
+
+class SamplerCreateInfo : public vk::VkSamplerCreateInfo
+{
+public:
+	SamplerCreateInfo (vk::VkFilter				magFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkFilter				minFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkSamplerMipmapMode	mipmapMode				= vk::VK_SAMPLER_MIPMAP_MODE_NEAREST,
+					   vk::VkSamplerAddressMode	addressU				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressV				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressW				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   float					mipLodBias				= 0.0f,
+					   vk::VkBool32				anisotropyEnable		= vk::VK_FALSE,
+					   float					maxAnisotropy			= 1.0f,
+					   vk::VkBool32				compareEnable			= false,
+					   vk::VkCompareOp			compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+					   float					minLod					= 0.0f,
+					   float					maxLod					= 16.0f,
+					   vk::VkBorderColor		borderColor				= vk::VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+					   vk::VkBool32				unnormalizedCoordinates	= false);
+};
+
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWCREATEINFOUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.cpp
new file mode 100644
index 0000000..472fd1e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.cpp
@@ -0,0 +1,939 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDrawImageObjectUtil.hpp"
+
+#include "tcuSurface.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vktDrawCreateInfoUtil.hpp"
+#include "vktDrawBufferObjectUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+void MemoryOp::pack (int				pixelSize,
+					 int				width,
+					 int				height,
+					 int				depth,
+					 vk::VkDeviceSize	rowPitchOrZero,
+					 vk::VkDeviceSize	depthPitchOrZero,
+					 const void *		srcBuffer,
+					 void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch	= depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const deUint8* srcRow = reinterpret_cast<const deUint8*>(srcBuffer);
+	const deUint8* srcStart;
+	srcStart = srcRow;
+	deUint8* dstRow = reinterpret_cast<deUint8 *>(destBuffer);
+	deUint8* dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else
+	{
+		// slower, per row path
+		for (int d = 0; d < depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * depthPitch;
+			vk::VkDeviceSize offsetDepthSrc = d * (pixelSize * width * height);
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(rowPitch));
+				srcRow += pixelSize * width;
+				dstRow += rowPitch;
+			}
+		}
+	}
+}
+
+void MemoryOp::unpack (int					pixelSize,
+					   int					width,
+					   int					height,
+					   int					depth,
+					   vk::VkDeviceSize		rowPitchOrZero,
+					   vk::VkDeviceSize		depthPitchOrZero,
+					   const void *			srcBuffer,
+					   void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch = depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const deUint8* srcRow = reinterpret_cast<const deUint8*>(srcBuffer);
+	const deUint8* srcStart;
+	srcStart = srcRow;
+	deUint8* dstRow = reinterpret_cast<deUint8*>(destBuffer);
+	deUint8* dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else {
+		// slower, per row path
+		for (size_t d = 0; d < (size_t)depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * (pixelSize * width * height);
+			vk::VkDeviceSize offsetDepthSrc = d * depthPitch;
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(pixelSize * width));
+				srcRow += rowPitch;
+				dstRow += pixelSize * width;
+			}
+		}
+	}
+}
+
+Image::Image (const vk::DeviceInterface& vk,
+			  vk::VkDevice				device,
+			  vk::VkFormat				format,
+			  const vk::VkExtent3D&		extend,
+			  deUint32					levelCount,
+			  deUint32					layerCount,
+			  vk::Move<vk::VkImage>		object_)
+	: m_allocation		(DE_NULL)
+	, m_object			(object_)
+	, m_format			(format)
+	, m_extent			(extend)
+	, m_levelCount		(levelCount)
+	, m_layerCount		(layerCount)
+	, m_vk(vk)
+	, m_device(device)
+{
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface (vk::VkQueue					queue,
+												vk::Allocator&				allocator,
+												vk::VkImageLayout			layout,
+												vk::VkOffset3D				offset,
+												int							width,
+												int							height,
+												vk::VkImageAspectFlagBits	aspect,
+												unsigned int				mipLevel,
+												unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readVolume (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   vk::VkImageAspectFlagBits	aspect,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * depth * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, depth, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface1D(vk::VkQueue				queue,
+												 vk::Allocator&				allocator,
+												 vk::VkImageLayout			layout,
+												 vk::VkOffset3D				offset,
+												 int						width,
+												 vk::VkImageAspectFlagBits	aspect,
+												 unsigned int				mipLevel,
+												 unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect,
+		m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, 1, 1, m_pixelAccessData.data());
+}
+
+void Image::read (vk::VkQueue					queue,
+				  vk::Allocator&				allocator,
+				  vk::VkImageLayout				layout,
+				  vk::VkOffset3D				offset,
+				  int							width,
+				  int							height,
+				  int							depth,
+				  unsigned int					mipLevel,
+				  unsigned int					arrayElement,
+				  vk::VkImageAspectFlagBits		aspect,
+				  vk::VkImageType				type,
+				  void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource = copyToLinearImage(queue, allocator, layout, offset, width,
+															 height, depth, mipLevel, arrayElement, aspect, type);
+	const vk::VkOffset3D zeroOffset = {0, 0, 0};
+	stagingResource->readLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+}
+
+void Image::readUsingBuffer (vk::VkQueue				queue,
+							 vk::Allocator&				allocator,
+							 vk::VkImageLayout			layout,
+							 vk::VkOffset3D				offset,
+							 int						width,
+							 int						height,
+							 int						depth,
+							 unsigned int				mipLevel,
+							 unsigned int				arrayElement,
+							 vk::VkImageAspectFlagBits	aspect,
+							 void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);;
+
+	de::SharedPtr<Buffer> stagingResource;
+
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() * width * height * depth;
+
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+				break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		vk::VkBufferImageCopy region =
+		{
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyImageToBuffer(*copyCmdBuffer, object(), layout, stagingResource->object(), 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,										// deUint32					waitSemaphoreCount;
+			DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,										// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,										// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+
+	deUint8* destPtr = reinterpret_cast<deUint8*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(data, destPtr, static_cast<size_t>(bufferSize));
+}
+
+tcu::ConstPixelBufferAccess Image::readSurfaceLinear (vk::VkOffset3D				offset,
+													  int							width,
+													  int							height,
+													  int							depth,
+													  vk::VkImageAspectFlagBits		aspect,
+													  unsigned int					mipLevel,
+													  unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	readLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+void Image::readLinear (vk::VkOffset3D				offset,
+						int							width,
+						int							height,
+						int							depth,
+						unsigned int				mipLevel,
+						unsigned int				arrayElement,
+						vk::VkImageAspectFlagBits	aspect,
+						void *						data)
+{
+	vk::VkImageSubresource imageSubResource = { aspect, mipLevel, arrayElement };
+
+	vk::VkSubresourceLayout imageLayout;
+
+	deMemset(&imageLayout, 0, sizeof(imageLayout));
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource, &imageLayout);
+
+	const deUint8* srcPtr = reinterpret_cast<const deUint8*>(getBoundMemory().getHostPtr());
+	srcPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::unpack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, srcPtr, data);
+}
+
+de::SharedPtr<Image> Image::copyToLinearImage (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement,
+											   vk::VkImageAspectFlagBits	aspect,
+											   vk::VkImageType				type)
+{
+	de::SharedPtr<Image> stagingResource;
+	{
+		vk::VkExtent3D stagingExtent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+		ImageCreateInfo stagingResourceCreateInfo(type, m_format, stagingExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+												  vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+
+		stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+												vk::MemoryRequirement::HostVisible);
+
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+		vk::VkImageCopy region = { {aspect, mipLevel, arrayElement, 1}, offset, {aspect, 0, 0, 1}, zeroOffset, {(deUint32)width, (deUint32)height, (deUint32)depth} };
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, object(), layout, stagingResource->object(), vk::VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,										// deUint32					waitSemaphoreCount;
+			DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,										// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,										// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+	return stagingResource;
+}
+
+void Image::uploadVolume(const tcu::ConstPixelBufferAccess&	access,
+						 vk::VkQueue						queue,
+						 vk::Allocator&							allocator,
+						 vk::VkImageLayout					layout,
+						 vk::VkOffset3D						offset,
+						 vk::VkImageAspectFlagBits			aspect,
+						 unsigned int						mipLevel,
+						 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface (const tcu::ConstPixelBufferAccess&	access,
+						   vk::VkQueue							queue,
+						   vk::Allocator&							allocator,
+						   vk::VkImageLayout					layout,
+						   vk::VkOffset3D						offset,
+						   vk::VkImageAspectFlagBits			aspect,
+						   unsigned int							mipLevel,
+						   unsigned int							arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface1D (const tcu::ConstPixelBufferAccess&	access,
+							 vk::VkQueue						queue,
+							 vk::Allocator&							allocator,
+							 vk::VkImageLayout					layout,
+							 vk::VkOffset3D						offset,
+							 vk::VkImageAspectFlagBits			aspect,
+							 unsigned int						mipLevel,
+							 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurfaceLinear (const tcu::ConstPixelBufferAccess&	access,
+								 vk::VkOffset3D						offset,
+								 int								width,
+								 int								height,
+								 int								depth,
+								 vk::VkImageAspectFlagBits			aspect,
+								 unsigned int						mipLevel,
+								 unsigned int						arrayElement)
+{
+	uploadLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, access.getDataPtr());
+}
+
+void Image::upload (vk::VkQueue					queue,
+					vk::Allocator&					allocator,
+					vk::VkImageLayout			layout,
+					vk::VkOffset3D				offset,
+					int							width,
+					int							height,
+					int							depth,
+					unsigned int				mipLevel,
+					unsigned int				arrayElement,
+					vk::VkImageAspectFlagBits	aspect,
+					vk::VkImageType				type,
+					const void *				data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource;
+	vk::VkExtent3D extent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+	ImageCreateInfo stagingResourceCreateInfo(
+		type, m_format, extent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+		vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+	stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+								vk::MemoryRequirement::HostVisible);
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	stagingResource->uploadLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		vk::VkImageCopy region = {{aspect, 0, 0, 1},
+									zeroOffset,
+									{aspect, mipLevel, arrayElement, 1},
+									offset,
+									{(deUint32)width, (deUint32)height, (deUint32)depth}};
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, stagingResource->object(),
+								vk::VK_IMAGE_LAYOUT_GENERAL, object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,										// deUint32					waitSemaphoreCount;
+			DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,										// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,										// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadUsingBuffer (vk::VkQueue					queue,
+							   vk::Allocator&					allocator,
+							   vk::VkImageLayout			layout,
+							   vk::VkOffset3D				offset,
+							   int							width,
+							   int							height,
+							   int							depth,
+							   unsigned int					mipLevel,
+							   unsigned int					arrayElement,
+							   vk::VkImageAspectFlagBits	aspect,
+							   const void *					data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Buffer> stagingResource;
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() *width*height*depth;
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+				break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+	deUint8* destPtr = reinterpret_cast<deUint8*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(destPtr, data, static_cast<size_t>(bufferSize));
+	vk::flushMappedMemoryRange(m_vk, m_device, stagingResource->getBoundMemory().getMemory(), stagingResource->getBoundMemory().getOffset(), bufferSize);
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		vk::VkBufferImageCopy region = {
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyBufferToImage(*copyCmdBuffer, stagingResource->object(),
+			object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,										// deUint32					waitSemaphoreCount;
+			DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,										// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,										// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadLinear (vk::VkOffset3D			offset,
+						  int						width,
+						  int						height,
+						  int						depth,
+						  unsigned int				mipLevel,
+						  unsigned int				arrayElement,
+						  vk::VkImageAspectFlagBits	aspect,
+						  const void *				data)
+{
+	vk::VkSubresourceLayout imageLayout;
+
+	vk::VkImageSubresource imageSubResource = {aspect, mipLevel, arrayElement};
+
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource,
+													&imageLayout);
+
+	deUint8* destPtr = reinterpret_cast<deUint8*>(getBoundMemory().getHostPtr());
+
+	destPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::pack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, data, destPtr);
+}
+
+vk::VkDeviceSize Image::getPixelOffset (vk::VkOffset3D		offset,
+										vk::VkDeviceSize	rowPitch,
+										vk::VkDeviceSize	depthPitch,
+										unsigned int		level,
+										unsigned int		layer)
+{
+	DE_ASSERT(level < m_levelCount);
+	DE_ASSERT(layer < m_layerCount);
+
+	vk::VkDeviceSize mipLevelSizes[32];
+	vk::VkDeviceSize mipLevelRectSizes[32];
+	tcu::IVec3 mipExtend
+	= tcu::IVec3(m_extent.width, m_extent.height, m_extent.depth);
+
+	vk::VkDeviceSize arrayElemSize = 0;
+	for (unsigned int i = 0; i < m_levelCount && (mipExtend[0] > 1 || mipExtend[1] > 1 || mipExtend[2] > 1); ++i)
+	{
+		// Rect size is just a 3D image size;
+		mipLevelSizes[i] = mipExtend[2] * depthPitch;
+
+		arrayElemSize += mipLevelSizes[0];
+
+		mipExtend = tcu::max(mipExtend / 2, tcu::IVec3(1));
+	}
+
+	vk::VkDeviceSize pixelOffset = layer * arrayElemSize;
+	for (size_t i = 0; i < level; ++i) {
+		pixelOffset += mipLevelSizes[i];
+	}
+	pixelOffset += offset.z * mipLevelRectSizes[level];
+	pixelOffset += offset.y * rowPitch;
+	pixelOffset += offset.x;
+
+	return pixelOffset;
+}
+
+void Image::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindImageMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Image> Image::createAndAlloc(const vk::DeviceInterface&	vk,
+										   vk::VkDevice					device,
+										   const vk::VkImageCreateInfo& createInfo,
+										   vk::Allocator&					allocator,
+										   vk::MemoryRequirement		memoryRequirement)
+{
+	de::SharedPtr<Image> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements imageRequirements = vk::getImageMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(imageRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Image> Image::create(const vk::DeviceInterface&	vk,
+								   vk::VkDevice					device,
+								   const vk::VkImageCreateInfo	&createInfo)
+{
+	return de::SharedPtr<Image>(new Image(vk, device, createInfo.format, createInfo.extent,
+								createInfo.mipLevels, createInfo.arrayLayers,
+								vk::createImage(vk, device, &createInfo)));
+}
+
+void transition2DImage (const vk::DeviceInterface&	vk,
+						vk::VkCommandBuffer				cmdBuffer,
+						vk::VkImage					image,
+						vk::VkImageAspectFlags		aspectMask,
+						vk::VkImageLayout			oldLayout,
+						vk::VkImageLayout			newLayout)
+{
+	vk::VkImageMemoryBarrier barrier;
+	barrier.sType							= vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+	barrier.pNext							= DE_NULL;
+	barrier.srcAccessMask					= 0;
+	barrier.dstAccessMask					= 0;
+	barrier.oldLayout						= oldLayout;
+	barrier.newLayout						= newLayout;
+	barrier.srcQueueFamilyIndex				= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.dstQueueFamilyIndex				= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.image							= image;
+	barrier.subresourceRange.aspectMask		= aspectMask;
+	barrier.subresourceRange.baseMipLevel	= 0;
+	barrier.subresourceRange.levelCount		= 1;
+	barrier.subresourceRange.baseArrayLayer = 0;
+	barrier.subresourceRange.layerCount		= 1;
+
+	vk.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+						  0, (const vk::VkMemoryBarrier*)DE_NULL,
+						  0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+						  1, &barrier);
+}
+
+void initialTransitionColor2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT | vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+} // Draw
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.hpp
new file mode 100644
index 0000000..58cd0c5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawImageObjectUtil.hpp
@@ -0,0 +1,287 @@
+#ifndef _VKTDRAWIMAGEOBJECTUTIL_HPP
+#define _VKTDRAWIMAGEOBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include "tcuTexture.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+class MemoryOp
+{
+public:
+	static void pack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+
+	static void unpack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+};
+
+class Image
+{
+public:
+	static de::SharedPtr<Image> create				(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkImageCreateInfo& createInfo);
+
+	static de::SharedPtr<Image> createAndAlloc		(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 const vk::VkImageCreateInfo&			createInfo,
+													 vk::Allocator&							allocator,
+													 vk::MemoryRequirement					memoryRequirement = vk::MemoryRequirement::Any);
+
+	tcu::ConstPixelBufferAccess readSurface			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurface1D		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readVolume			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurfaceLinear	(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						read				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 void *									data);
+
+	void						readUsingBuffer		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						readLinear			(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						uploadVolume		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurface		 (const tcu::ConstPixelBufferAccess&	access,
+														vk::VkQueue							queue,
+														vk::Allocator&						allocator,
+														vk::VkImageLayout					layout,
+														vk::VkOffset3D						offset,
+														vk::VkImageAspectFlagBits			aspect,
+														unsigned int						mipLevel = 0,
+														unsigned int						arrayElement = 0);
+
+	void						uploadSurface1D		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurfaceLinear	(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						upload				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 const void *							data);
+
+	void						uploadUsingBuffer	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	void						uploadLinear		(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	de::SharedPtr<Image>		copyToLinearImage	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type);
+
+	const vk::VkFormat&			getFormat			(void) const											{ return m_format;		}
+	vk::VkImage					object				(void) const											{ return *m_object;		}
+	void						bindMemory			(de::MovePtr<vk::Allocation>			allocation);
+	vk::Allocation				getBoundMemory		(void) const											{ return *m_allocation; }
+
+private:
+	vk::VkDeviceSize			getPixelOffset		(vk::VkOffset3D							offset,
+													 vk::VkDeviceSize						rowPitch,
+													 vk::VkDeviceSize						depthPitch,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement);
+
+								Image				(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 vk::VkFormat							format,
+													 const vk::VkExtent3D&					extend,
+													 deUint32								levelCount,
+													 deUint32								layerCount,
+													 vk::Move<vk::VkImage>					object);
+
+	Image											(const Image& other);	// Not allowed!
+	Image&						operator=			(const Image& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>	m_allocation;
+	vk::Unique<vk::VkImage>		m_object;
+
+	vk::VkFormat				m_format;
+	vk::VkExtent3D				m_extent;
+	deUint32					m_levelCount;
+	deUint32					m_layerCount;
+
+	std::vector<deUint8>		m_pixelAccessData;
+
+	const vk::DeviceInterface&	m_vk;
+	vk::VkDevice				m_device;
+};
+
+void transition2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageAspectFlags aspectMask, vk::VkImageLayout oldLayout, vk::VkImageLayout newLayout);
+
+void initialTransitionColor2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWIMAGEOBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.cpp
new file mode 100644
index 0000000..8a5cd53
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.cpp
@@ -0,0 +1,375 @@
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Indexed Tests
+*//*--------------------------------------------------------------------*/
+
+#include "vktDrawIndexedTest.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktDrawTestCaseUtil.hpp"
+
+#include "vktDrawBaseClass.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+#include "vkDefs.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+namespace
+{
+class DrawIndexed : public DrawTestsBaseClass
+{
+public:
+								DrawIndexed				(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	virtual		tcu::TestStatus iterate					(void);
+};
+
+class DrawInstancedIndexed : public DrawIndexed
+{
+public:
+								DrawInstancedIndexed	(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	virtual		tcu::TestStatus	iterate					(void);
+};
+
+DrawIndexed::DrawIndexed (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+		: DrawTestsBaseClass(context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+{
+	m_topology = topology;
+
+	/*0*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*1*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*2*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*3*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	 1.0f,	-1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*4*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*5*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*6*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*7*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+	/*8*/ m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_indexes.push_back(0);
+			m_indexes.push_back(0);
+			m_indexes.push_back(2);
+			m_indexes.push_back(0);
+			m_indexes.push_back(6);
+			m_indexes.push_back(6);
+			m_indexes.push_back(0);
+			m_indexes.push_back(7);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_indexes.push_back(0);
+			m_indexes.push_back(0);
+			m_indexes.push_back(2);
+			m_indexes.push_back(0);
+			m_indexes.push_back(6);
+			m_indexes.push_back(5);
+			m_indexes.push_back(0);
+			m_indexes.push_back(7);
+			break;
+
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+	initialize();
+};
+
+tcu::TestStatus DrawIndexed::iterate (void)
+{
+	tcu::TestLog &log			= m_context.getTestContext().getLog();
+	const vk::VkQueue queue		= m_context.getUniversalQueue();
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize dataSize = m_indexes.size() * sizeof(deUint32);
+	m_indexBuffer = Buffer::createAndAlloc(	m_vk, m_context.getDevice(),
+											BufferCreateInfo(dataSize,
+															 vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
+											m_context.getDefaultAllocator(),
+											vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<deUint8*>(m_indexBuffer->getBoundMemory().getHostPtr());
+
+	deMemcpy(ptr, &m_indexes[0], static_cast<size_t>(dataSize));
+
+	vk::flushMappedMemoryRange(m_vk, m_context.getDevice(),
+							   m_indexBuffer->getBoundMemory().getMemory(),
+							   m_indexBuffer->getBoundMemory().getOffset(),
+							   dataSize);
+
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+	const vk::VkBuffer indexBuffer = m_indexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+	m_vk.cmdBindIndexBuffer(*m_cmdBuffer, indexBuffer, 0, vk::VK_INDEX_TYPE_UINT32);
+
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+	m_vk.cmdDrawIndexed(*m_cmdBuffer, 6, 1, 2, 0, 0);
+
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+								  referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth	= referenceFrame.getWidth();
+	const deInt32 frameHeight	= referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageCoordinates refCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refCoords.bottom &&
+				 yCoord <= refCoords.top	&&
+				 xCoord >= refCoords.left	&&
+				 xCoord <= refCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+};
+
+DrawInstancedIndexed::DrawInstancedIndexed (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+	: DrawIndexed	(context, shaders, topology)
+{
+}
+
+tcu::TestStatus DrawInstancedIndexed::iterate (void)
+{
+	tcu::TestLog &log		= m_context.getTestContext().getLog();
+	const vk::VkQueue queue = m_context.getUniversalQueue();
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize dataSize = m_indexes.size() * sizeof(deUint32);
+	m_indexBuffer = Buffer::createAndAlloc(	m_vk, m_context.getDevice(),
+											BufferCreateInfo(dataSize,
+															 vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT),
+											m_context.getDefaultAllocator(),
+											vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<deUint8*>(m_indexBuffer->getBoundMemory().getHostPtr());
+
+	deMemcpy(ptr, &m_indexes[0], static_cast<size_t>(dataSize));
+	vk::flushMappedMemoryRange(m_vk, m_context.getDevice(),
+							   m_indexBuffer->getBoundMemory().getMemory(),
+							   m_indexBuffer->getBoundMemory().getOffset(),
+							   dataSize);
+
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+	const vk::VkBuffer indexBuffer = m_indexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+	m_vk.cmdBindIndexBuffer(*m_cmdBuffer, indexBuffer, 0, vk::VK_INDEX_TYPE_UINT32);
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_vk.cmdDrawIndexed(*m_cmdBuffer, 6, 4, 2, 0, 2);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_vk.cmdDrawIndexed(*m_cmdBuffer, 4, 4, 2, 0, 2);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+	referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth = referenceFrame.getWidth();
+	const deInt32 frameHeight = referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageInstancedCoordinates refInstancedCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refInstancedCoords.bottom	&&
+				 yCoord <= refInstancedCoords.top		&&
+				 xCoord >= refInstancedCoords.left		&&
+				 xCoord <= refInstancedCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+
+}
+
+}	// anonymous
+
+DrawIndexedTests::DrawIndexedTests (tcu::TestContext &testCtx)
+	: TestCaseGroup	(testCtx, "indexed_draw", "drawing indexed geometry")
+{
+	/* Left blank on purpose */
+}
+
+DrawIndexedTests::~DrawIndexedTests (void) {}
+
+void DrawIndexedTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/draw/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<DrawIndexed>(m_testCtx, "draw_indexed_triangle_list", "Draws indexed triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<DrawIndexed>(m_testCtx, "draw_indexed_triangle_strip", "Draws indexed triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/draw/VertexFetchWithInstance.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<DrawInstancedIndexed>(m_testCtx, "draw_instanced_indexed_triangle_list", "Draws indexed triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<DrawInstancedIndexed>(m_testCtx, "draw_instanced_indexed_triangle_strip", "Draws indexed triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+}
+
+}	// DrawTests
+}	// vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.hpp
new file mode 100644
index 0000000..b00fea0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawIndexedTest.hpp
@@ -0,0 +1,59 @@
+#ifndef _VKTDRAWINDEXEDTEST_HPP
+#define _VKTDRAWINDEXEDTEST_HPP
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Indexed Test
+*//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+class DrawIndexedTests : public tcu::TestCaseGroup
+{
+public:
+							DrawIndexedTests		(tcu::TestContext &testCtx);
+							~DrawIndexedTests		(void);
+	void					init					(void);
+
+private:
+	DrawIndexedTests								(const DrawIndexedTests &other);
+	DrawIndexedTests&		operator=				(const DrawIndexedTests &other);
+
+};
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWINDEXEDTEST_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.cpp
new file mode 100644
index 0000000..394bb8a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.cpp
@@ -0,0 +1,538 @@
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Indirect Test
+*//*--------------------------------------------------------------------*/
+
+#include "vktDrawIndirectTest.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktDrawTestCaseUtil.hpp"
+
+#include "vktDrawBaseClass.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+#include "vkDefs.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+namespace
+{
+struct JunkData
+{
+	JunkData()
+		: varA	(0xcd)
+		, varB	(0xcd)
+	{
+	}
+	const deUint16	varA;
+	const deUint32	varB;
+};
+
+class IndirectDraw : public DrawTestsBaseClass
+{
+public:
+								IndirectDraw	(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	virtual	tcu::TestStatus		iterate			(void);
+private:
+	de::SharedPtr<Buffer>					m_indirectBuffer;
+	std::vector<vk::VkDrawIndirectCommand>	m_indirectDrawCmd;
+	vk::VkDeviceSize						m_offsetInBuffer;
+	deUint32								m_strideInBuffer;
+	deUint32								m_drawCount;
+	JunkData								m_junkData;
+protected:
+	deBool									m_isMultiDrawEnabled;
+	deUint32								m_drawIndirectMaxCount;
+};
+
+class IndirectDrawInstanced : public IndirectDraw
+{
+public:
+								IndirectDrawInstanced	(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	virtual tcu::TestStatus		iterate					(void);
+private:
+	de::SharedPtr<Buffer>					m_indirectBuffer;
+	std::vector<vk::VkDrawIndirectCommand>	m_indirectDrawCmd;
+	vk::VkDeviceSize						m_offsetInBuffer;
+	deUint32								m_strideInBuffer;
+	deUint32								m_drawCount;
+	JunkData								m_junkData;
+};
+
+IndirectDraw::IndirectDraw (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+		: DrawTestsBaseClass(context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+{
+	m_topology = topology;
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 1.0f,	-1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_data.push_back(PositionColorVertex(tcu::Vec4( 1.0f,	-1.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f,	 1.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-0.3f,	 0.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4( 0.3f,	 0.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-0.3f,	-0.3f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4( 0.3f,	-0.3f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-0.3f,	 0.3f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4( 0.3f,	 0.3f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-0.3f,	 0.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4( 0.3f,	 0.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f,	 1.0f,	 1.0f,	 1.0f),	 tcu::RGBA::blue().toVec()));
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+	initialize();
+
+	// Check device for multidraw support:
+	if (m_context.getDeviceFeatures().multiDrawIndirect)
+		m_isMultiDrawEnabled = true;	
+	else
+		m_isMultiDrawEnabled = false;
+	
+	m_drawIndirectMaxCount = m_context.getDeviceProperties().limits.maxDrawIndirectCount;
+
+}
+
+tcu::TestStatus IndirectDraw::iterate (void)
+{
+	tcu::TestLog &log = m_context.getTestContext().getLog();
+	const vk::VkQueue queue = m_context.getUniversalQueue();
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+		{
+			vk::VkDrawIndirectCommand drawCommands[] =
+			{
+				{
+					3,		//vertexCount
+					1,		//instanceCount
+					2,		//firstVertex
+					0		//firstInstance
+				},
+				{ (deUint32)-4, (deUint32)-2, (deUint32)-11, (deUint32)-9 }, // junk (stride)
+				{
+					3,		//vertexCount
+					1,		//instanceCount
+					5,		//firstVertex
+					0		//firstInstance
+				}
+			};
+			m_indirectDrawCmd.push_back(drawCommands[0]);
+			m_indirectDrawCmd.push_back(drawCommands[1]);
+			m_indirectDrawCmd.push_back(drawCommands[2]);
+			break;
+		}
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+		{
+			vk::VkDrawIndirectCommand drawCommands[] =
+			{
+				{
+					4,		//vertexCount
+					1,		//instanceCount
+					2,		//firstVertex
+					0		//firstInstance
+				},
+				{ (deUint32)-4, (deUint32)-2, (deUint32)-11, (deUint32)-9 }, // junk (stride)
+				{
+					4,		//vertexCount
+					1,		//instanceCount
+					6,		//firstVertex
+					0		//firstInstance
+				}
+			};
+			m_indirectDrawCmd.push_back(drawCommands[0]);
+			m_indirectDrawCmd.push_back(drawCommands[1]);
+			m_indirectDrawCmd.push_back(drawCommands[2]);
+			break;
+		}
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+
+	m_strideInBuffer	= 2 * (deUint32)sizeof(m_indirectDrawCmd[0]);
+	m_drawCount			= 2;
+	m_offsetInBuffer	= sizeof(m_junkData);
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize vertexBufferOffset	= 0;
+	const vk::VkBuffer vertexBuffer				= m_vertexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+	const vk::VkDeviceSize dataSize = m_indirectDrawCmd.size()*sizeof(m_indirectDrawCmd[0]);
+
+	m_indirectBuffer = Buffer::createAndAlloc(	m_vk,
+												m_context.getDevice(),
+												BufferCreateInfo(dataSize,
+																 vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
+												m_context.getDefaultAllocator(),
+												vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<deUint8*>(m_indirectBuffer->getBoundMemory().getHostPtr());
+
+	deMemcpy(ptr, &m_junkData, static_cast<size_t>(m_offsetInBuffer));
+	deMemcpy((ptr+m_offsetInBuffer), &m_indirectDrawCmd[0], static_cast<size_t>(dataSize));
+
+	vk::flushMappedMemoryRange(m_vk,
+							   m_context.getDevice(),
+							   m_indirectBuffer->getBoundMemory().getMemory(),
+							   m_indirectBuffer->getBoundMemory().getOffset(),
+							   dataSize);
+
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+	if (m_isMultiDrawEnabled && m_drawCount <= m_drawIndirectMaxCount)
+		m_vk.cmdDrawIndirect(*m_cmdBuffer, m_indirectBuffer->object(), m_offsetInBuffer, m_drawCount, m_strideInBuffer);
+	else
+	{
+		for(deUint32 drawNdx = 0; drawNdx < m_drawCount; drawNdx++){
+			m_vk.cmdDrawIndirect(*m_cmdBuffer, m_indirectBuffer->object(), m_offsetInBuffer + drawNdx*m_strideInBuffer, 1, m_strideInBuffer);
+		}
+	}
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+	referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth	= referenceFrame.getWidth();
+	const deInt32 frameHeight	= referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageCoordinates refCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refCoords.bottom	&&
+				 yCoord <= refCoords.top	&&
+				 xCoord >= refCoords.left	&&
+				 xCoord <= refCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset					= { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+
+}
+
+IndirectDrawInstanced::IndirectDrawInstanced (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+	: IndirectDraw	(context, shaders, topology)
+{
+}
+
+tcu::TestStatus IndirectDrawInstanced::iterate (void)
+{
+	tcu::TestLog &log = m_context.getTestContext().getLog();
+	const vk::VkQueue queue = m_context.getUniversalQueue();
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+		{
+			vk::VkDrawIndirectCommand drawCmd[] =
+			{
+				{
+					3,		//vertexCount
+					4,		//instanceCount
+					2,		//firstVertex
+					2		//firstInstance
+				},
+				{ (deUint32)-4, (deUint32)-2, (deUint32)-11, (deUint32)-9 }, // junk (stride)
+				{
+					3,		//vertexCount
+					4,		//instanceCount
+					5,		//firstVertex
+					2		//firstInstance
+				}
+			};
+			m_indirectDrawCmd.push_back(drawCmd[0]);
+			m_indirectDrawCmd.push_back(drawCmd[1]);
+			m_indirectDrawCmd.push_back(drawCmd[2]);
+			break;
+		}
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+		{
+			vk::VkDrawIndirectCommand drawCmd[] =
+			{
+				{
+					4,		//vertexCount
+					4,		//instanceCount
+					2,		//firstVertex
+					2		//firstInstance
+				},
+				{ (deUint32)-4, (deUint32)-2, (deUint32)-11, (deUint32)-9 },
+				{
+					4,		//vertexCount
+					4,		//instanceCount
+					6,		//firstVertex
+					2		//firstInstance
+				}
+			};
+			m_indirectDrawCmd.push_back(drawCmd[0]);
+			m_indirectDrawCmd.push_back(drawCmd[1]);
+			m_indirectDrawCmd.push_back(drawCmd[2]);
+			break;
+		}
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+
+	m_strideInBuffer	= 2 * (deUint32)sizeof(m_indirectDrawCmd[0]);
+	m_drawCount			= 2;
+	m_offsetInBuffer	= sizeof(m_junkData);
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+	const vk::VkDeviceSize dataSize = m_indirectDrawCmd.size()*sizeof(m_indirectDrawCmd[0]);
+
+	m_indirectBuffer = Buffer::createAndAlloc(	m_vk,
+												m_context.getDevice(),
+												BufferCreateInfo(dataSize,
+																 vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT),
+												m_context.getDefaultAllocator(),
+												vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<deUint8*>(m_indirectBuffer->getBoundMemory().getHostPtr());
+
+	deMemcpy(ptr, &m_junkData, static_cast<size_t>(m_offsetInBuffer));
+	deMemcpy((ptr + m_offsetInBuffer), &m_indirectDrawCmd[0], static_cast<size_t>(dataSize));
+
+	vk::flushMappedMemoryRange(m_vk,
+							   m_context.getDevice(),
+							   m_indirectBuffer->getBoundMemory().getMemory(),
+							   m_indirectBuffer->getBoundMemory().getOffset(),
+							   dataSize);
+
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+	if (m_isMultiDrawEnabled && m_drawCount <= m_drawIndirectMaxCount)
+		m_vk.cmdDrawIndirect(*m_cmdBuffer, m_indirectBuffer->object(), m_offsetInBuffer, m_drawCount, m_strideInBuffer);
+	else
+	{
+		for (deUint32 drawNdx = 0; drawNdx < m_drawCount; drawNdx++){
+			m_vk.cmdDrawIndirect(*m_cmdBuffer, m_indirectBuffer->object(), m_offsetInBuffer + drawNdx*m_strideInBuffer, 1, m_strideInBuffer);
+		}
+	}
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+
+	referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth	= referenceFrame.getWidth();
+	const deInt32 frameHeight	= referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageInstancedCoordinates refInstancedCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refInstancedCoords.bottom	&&
+				 yCoord <= refInstancedCoords.top		&&
+				 xCoord >= refInstancedCoords.left		&&
+				 xCoord <= refInstancedCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+
+	}
+
+}	// anonymous
+
+IndirectDrawTests::IndirectDrawTests (tcu::TestContext &testCtx)
+	: TestCaseGroup(testCtx, "indirect_draw", "indirect drawing simple geometry")
+{
+	/* Left blank on purpose */
+}
+
+IndirectDrawTests::~IndirectDrawTests (void) {}
+
+
+void IndirectDrawTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX]		= "vulkan/draw/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT]	 = "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<IndirectDraw>(m_testCtx, "indirect_draw_triangle_list", "Draws triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<IndirectDraw>(m_testCtx, "indirect_draw_triangle_strip", "Draws triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+
+	shaderPaths[glu::SHADERTYPE_VERTEX]		= "vulkan/draw/VertexFetchWithInstance.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT]	= "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<IndirectDrawInstanced>(m_testCtx, "indirect_draw_instanced_triangle_list", "Draws an instanced triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<IndirectDrawInstanced>(m_testCtx, "indirect_draw_instanced_triangle_strip", "Draws an instanced triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+}
+
+}	// DrawTests
+}	// vkt
\ No newline at end of file
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.hpp
new file mode 100644
index 0000000..d51d7d5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawIndirectTest.hpp
@@ -0,0 +1,59 @@
+#ifndef _VKTDRAWINDIRECTTEST_HPP
+#define _VKTDRAWINDIRECTTEST_HPP
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Indirect Test
+*//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+class IndirectDrawTests : public tcu::TestCaseGroup
+{
+public:
+						IndirectDrawTests		(tcu::TestContext &testCtx);
+						~IndirectDrawTests		(void);
+	void				init					(void);
+
+private:
+	IndirectDrawTests							(const IndirectDrawTests &other);
+	IndirectDrawTests&	operator=				(const IndirectDrawTests &other);
+
+};
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWINDIRECTTEST_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.cpp
new file mode 100644
index 0000000..4a43913
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.cpp
@@ -0,0 +1,354 @@
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Simple Draw Tests
+*//*--------------------------------------------------------------------*/
+
+#include "vktDrawSimpleTest.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktDrawTestCaseUtil.hpp"
+
+#include "vktDrawBaseClass.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+#include "vkDefs.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+namespace
+{
+class SimpleDraw : public DrawTestsBaseClass
+{
+public:
+							SimpleDraw				(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	virtual tcu::TestStatus iterate					(void);
+};
+
+class SimpleDrawInstanced : public SimpleDraw
+{
+public:
+							SimpleDrawInstanced		(Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology);
+	tcu::TestStatus			iterate					(void);
+};
+
+SimpleDraw::SimpleDraw (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+	: DrawTestsBaseClass	(context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+{
+	m_topology = topology;
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 1.0f,	-1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 1.0f,	-1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	-0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	 0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-0.3f,	 0.3f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(	-1.0f,	 1.0f,	1.0f,	1.0f), tcu::RGBA::blue().toVec()));
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+	initialize();
+}
+
+tcu::TestStatus SimpleDraw::iterate (void)
+{
+	tcu::TestLog &log							= m_context.getTestContext().getLog();
+	const vk::VkQueue queue						= m_context.getUniversalQueue();
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize vertexBufferOffset	= 0;
+	const vk::VkBuffer vertexBuffer				= m_vertexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_vk.cmdDraw(*m_cmdBuffer, 6, 1, 2, 0);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 2, 0);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+
+	referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth	= referenceFrame.getWidth();
+	const deInt32 frameHeight	= referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageCoordinates refCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refCoords.bottom	&&
+				 yCoord <= refCoords.top	&&
+				 xCoord >= refCoords.left	&&
+				 xCoord <= refCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+
+}
+
+SimpleDrawInstanced::SimpleDrawInstanced (Context &context, ShaderMap shaders, vk::VkPrimitiveTopology topology)
+	: SimpleDraw	(context, shaders, topology) {}
+
+tcu::TestStatus SimpleDrawInstanced::iterate (void)
+{
+	tcu::TestLog &log		= m_context.getTestContext().getLog();
+
+	const vk::VkQueue queue = m_context.getUniversalQueue();
+
+	beginRenderPass();
+
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+
+	m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+	m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+	switch (m_topology)
+	{
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			m_vk.cmdDraw(*m_cmdBuffer, 6, 4, 2, 2);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			m_vk.cmdDraw(*m_cmdBuffer, 4, 4, 2, 2);
+			break;
+		case vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+		case vk::VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+		case vk::VK_PRIMITIVE_TOPOLOGY_LAST:
+			DE_FATAL("Topology not implemented");
+			break;
+		default:
+			DE_FATAL("Unknown topology");
+			break;
+	}
+
+	m_vk.cmdEndRenderPass(*m_cmdBuffer);
+	m_vk.endCommandBuffer(*m_cmdBuffer);
+
+	vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,										// deUint32					waitSemaphoreCount;
+		DE_NULL,								// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,										// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,										// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	VK_CHECK(m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL));
+
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	// Validation
+	VK_CHECK(m_vk.queueWaitIdle(queue));
+
+	tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+
+	referenceFrame.allocLevel(0);
+
+	const deInt32 frameWidth	= referenceFrame.getWidth();
+	const deInt32 frameHeight	= referenceFrame.getHeight();
+
+	tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	ReferenceImageInstancedCoordinates refInstancedCoords;
+
+	for (int y = 0; y < frameHeight; y++)
+	{
+		const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+		for (int x = 0; x < frameWidth; x++)
+		{
+			const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+			if ((yCoord >= refInstancedCoords.bottom	&&
+				 yCoord <= refInstancedCoords.top		&&
+				 xCoord >= refInstancedCoords.left		&&
+				 xCoord <= refInstancedCoords.right))
+				referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+		}
+	}
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	qpTestResult res = QP_TEST_RESULT_PASS;
+
+	if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+		referenceFrame.getLevel(0), renderedFrame, 0.05f,
+		tcu::COMPARE_LOG_RESULT)) {
+		res = QP_TEST_RESULT_FAIL;
+	}
+
+	return tcu::TestStatus(res, qpGetTestResultName(res));
+}
+
+}	// anonymous
+
+SimpleDrawTests::SimpleDrawTests (tcu::TestContext &testCtx)
+: TestCaseGroup	(testCtx, "simple_draw", "drawing simple geometry")
+{
+	/* Left blank on purpose */
+}
+
+SimpleDrawTests::~SimpleDrawTests (void) {}
+
+
+void SimpleDrawTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX]		= "vulkan/draw/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT]	= "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<SimpleDraw>(m_testCtx, "simple_draw_triangle_list", "Draws triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<SimpleDraw>(m_testCtx, "simple_draw_triangle_strip", "Draws triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+
+	shaderPaths[glu::SHADERTYPE_VERTEX]		= "vulkan/draw/VertexFetchWithInstance.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT]	= "vulkan/draw/VertexFetch.frag";
+
+	addChild(new InstanceFactory<SimpleDrawInstanced>(m_testCtx, "simple_draw_instanced_triangle_list", "Draws an instanced triangle list", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+	addChild(new InstanceFactory<SimpleDrawInstanced>(m_testCtx, "simple_draw_instanced_triangle_strip", "Draws an instanced triangle strip", shaderPaths, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+}
+
+}	// DrawTests
+}	// vkt
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.hpp
new file mode 100644
index 0000000..c13e503
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawSimpleTest.hpp
@@ -0,0 +1,59 @@
+#ifndef _VKTDRAWSIMPLETEST_HPP
+#define _VKTDRAWSIMPLETEST_HPP
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Simple Test
+*//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+class SimpleDrawTests : public tcu::TestCaseGroup
+{
+public:
+						SimpleDrawTests			(tcu::TestContext &testCtx);
+						~SimpleDrawTests		(void);
+	void				init					(void);
+
+private:
+	SimpleDrawTests								(const SimpleDrawTests &other);
+	SimpleDrawTests&	operator=				(const SimpleDrawTests &other);
+
+};
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWSIMPLETEST_HPP
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawTestCaseUtil.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawTestCaseUtil.hpp
new file mode 100644
index 0000000..6484fe5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawTestCaseUtil.hpp
@@ -0,0 +1,105 @@
+#ifndef _VKTDRAWTESTCASEUTIL_HPP
+#define _VKTDRAWTESTCASEUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Draw Test Case Utils
+ *//*--------------------------------------------------------------------*/
+
+
+#include "tcuDefs.hpp"
+#include "tcuResource.hpp"
+
+#include "vktTestCase.hpp"
+
+#include "gluShaderUtil.hpp"
+#include "vkPrograms.hpp"
+
+#include <map>
+
+namespace vkt
+{
+namespace Draw
+{
+
+class ShaderSourceProvider
+{
+public:
+	static std::string getSource (tcu::Archive& archive, const char* path)
+	{
+		tcu::Resource *resource = archive.getResource(path);
+
+		std::vector<deUint8> readBuffer(resource->getSize() + 1);
+		resource->read(&readBuffer[0], resource->getSize());
+		readBuffer[readBuffer.size() - 1] = 0;
+
+		return reinterpret_cast<const char*>(&readBuffer[0]);
+	}
+};
+
+typedef std::map<glu::ShaderType, const char*> ShaderMap;
+
+template<typename Instance>
+class InstanceFactory : public TestCase
+{
+public:
+	InstanceFactory (tcu::TestContext& testCtx, const std::string& name, const std::string& desc,
+		const std::map<glu::ShaderType, const char*> shaderPaths, const vk::VkPrimitiveTopology topology)
+		: TestCase		(testCtx, name, desc)
+		, m_shaderPaths (shaderPaths)
+		, m_topology	(topology)
+	{
+	}
+
+	TestInstance* createInstance (Context& context) const
+	{
+		return new Instance(context, m_shaderPaths, m_topology);
+	}
+
+	virtual void initPrograms (vk::SourceCollections& programCollection) const
+	{
+		for (ShaderMap::const_iterator i = m_shaderPaths.begin(); i != m_shaderPaths.end(); ++i)
+		{
+			programCollection.glslSources.add(i->second) <<
+				glu::ShaderSource(i->first, ShaderSourceProvider::getSource(m_testCtx.getArchive(), i->second));
+		}
+	}
+
+private:
+	const ShaderMap m_shaderPaths;
+	const vk::VkPrimitiveTopology m_topology;
+};
+
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWTESTCASEUTIL_HPP
\ No newline at end of file
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawTests.cpp b/external/vulkancts/modules/vulkan/draw/vktDrawTests.cpp
new file mode 100644
index 0000000..5dbfa9a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawTests.cpp
@@ -0,0 +1,68 @@
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Tests
+*//*--------------------------------------------------------------------*/
+
+#include "vktDrawTests.hpp"
+
+#include "vktDrawSimpleTest.hpp"
+#include "vktDrawIndexedTest.hpp"
+#include "vktDrawIndirectTest.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* group)
+{
+	tcu::TestContext&	testCtx		= group->getTestContext();
+
+	group->addChild(new SimpleDrawTests(testCtx));
+	group->addChild(new DrawIndexedTests(testCtx));
+	group->addChild(new IndirectDrawTests(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "draw", "Spimple Draw tests", createChildren);
+}
+
+} // Draw
+} // vkt
\ No newline at end of file
diff --git a/external/vulkancts/modules/vulkan/draw/vktDrawTests.hpp b/external/vulkancts/modules/vulkan/draw/vktDrawTests.hpp
new file mode 100644
index 0000000..dd8b56f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/draw/vktDrawTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTDRAWTESTS_HPP
+#define _VKTDRAWTESTS_HPP
+/*------------------------------------------------------------------------
+* Vulkan Conformance Tests
+* ------------------------
+*
+* Copyright (c) 2015 The Khronos Group Inc.
+* Copyright (c) 2015 Intel Corporation
+*
+* Permission is hereby granted, free of charge, to any person obtaining a
+* copy of this software and/or associated documentation files (the
+* "Materials"), to deal in the Materials without restriction, including
+* without limitation the rights to use, copy, modify, merge, publish,
+* distribute, sublicense, and/or sell copies of the Materials, and to
+* permit persons to whom the Materials are furnished to do so, subject to
+* the following conditions:
+*
+* The above copyright notice(s) and this permission notice shall be included
+* in all copies or substantial portions of the Materials.
+*
+* The Materials are Confidential Information as defined by the
+* Khronos Membership Agreement until designated non-confidential by Khronos,
+* at which point this condition clause shall be removed.
+*
+* THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+* MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+*
+*//*!
+* \file
+* \brief Draw Tests
+*//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace Draw
+{
+
+tcu::TestCaseGroup*		createTests (tcu::TestContext& testCtx);
+
+} // Draw
+} // vkt
+
+#endif // _VKTDRAWTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/CMakeLists.txt b/external/vulkancts/modules/vulkan/dynamic_state/CMakeLists.txt
new file mode 100644
index 0000000..b4493d1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/CMakeLists.txt
@@ -0,0 +1,34 @@
+include_directories(..)
+
+set(DEQP_VK_DYNAMIC_STATE_SRCS
+	vktDynamicStateTestCaseUtil.hpp
+	vktDynamicStateBaseClass.hpp
+	vktDynamicStateBaseClass.cpp
+	vktDynamicStateCBTests.cpp
+	vktDynamicStateCBTests.hpp
+	vktDynamicStateDSTests.cpp
+	vktDynamicStateDSTests.hpp
+	vktDynamicStateGeneralTests.cpp
+	vktDynamicStateGeneralTests.hpp
+	vktDynamicStateRSTests.cpp
+	vktDynamicStateRSTests.hpp
+	vktDynamicStateTests.cpp
+	vktDynamicStateTests.hpp
+	vktDynamicStateVPTests.cpp
+	vktDynamicStateVPTests.hpp
+	vktDynamicStateCreateInfoUtil.hpp
+	vktDynamicStateCreateInfoUtil.cpp
+	vktDynamicStateBufferObjectUtil.hpp
+	vktDynamicStateBufferObjectUtil.cpp
+	vktDynamicStateImageObjectUtil.hpp
+	vktDynamicStateImageObjectUtil.cpp
+)
+
+set(DEQP_VK_DYNAMIC_STATE_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+)
+
+add_library(deqp-vk-dynamic-state STATIC ${DEQP_VK_DYNAMIC_STATE_SRCS})
+target_link_libraries(deqp-vk-dynamic-state ${DEQP_VK_DYNAMIC_STATE_LIBS})
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.cpp
new file mode 100644
index 0000000..9afd2fb
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.cpp
@@ -0,0 +1,279 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests - Base Class
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateBaseClass.hpp"
+
+#include "vkPrograms.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+DynamicStateBaseClass::DynamicStateBaseClass (Context& context, const char* vertexShaderName, const char* fragmentShaderName)
+	: TestInstance				(context)
+	, m_colorAttachmentFormat   (vk::VK_FORMAT_R8G8B8A8_UNORM)
+	, m_topology				(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP)
+	, m_vk						(context.getDeviceInterface())
+	, m_vertexShaderName		(vertexShaderName)
+	, m_fragmentShaderName		(fragmentShaderName)
+{
+}
+
+void DynamicStateBaseClass::initialize (void)
+{
+	const vk::VkDevice device		= m_context.getDevice();
+	const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
+
+	const PipelineLayoutCreateInfo pipelineLayoutCreateInfo;
+	m_pipelineLayout = vk::createPipelineLayout(m_vk, device, &pipelineLayoutCreateInfo);
+
+	const vk::VkExtent3D targetImageExtent = { WIDTH, HEIGHT, 1 };
+	const ImageCreateInfo targetImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_colorAttachmentFormat, targetImageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+												vk::VK_IMAGE_TILING_OPTIMAL, vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+	m_colorTargetImage = Image::createAndAlloc(m_vk, device, targetImageCreateInfo, m_context.getDefaultAllocator());
+
+	const ImageViewCreateInfo colorTargetViewInfo(m_colorTargetImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_colorAttachmentFormat);
+	m_colorTargetView = vk::createImageView(m_vk, device, &colorTargetViewInfo);
+
+	RenderPassCreateInfo renderPassCreateInfo;
+	renderPassCreateInfo.addAttachment(AttachmentDescription(m_colorAttachmentFormat,
+															 vk::VK_SAMPLE_COUNT_1_BIT,
+															 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+															 vk::VK_ATTACHMENT_STORE_OP_STORE,
+															 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+															 vk::VK_ATTACHMENT_STORE_OP_STORE,
+															 vk::VK_IMAGE_LAYOUT_GENERAL,
+															 vk::VK_IMAGE_LAYOUT_GENERAL));
+
+	const vk::VkAttachmentReference colorAttachmentReference =
+	{
+		0,
+		vk::VK_IMAGE_LAYOUT_GENERAL
+	};
+
+	renderPassCreateInfo.addSubpass(SubpassDescription(
+		vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
+		0,
+		0,
+		DE_NULL,
+		1,
+		&colorAttachmentReference,
+		DE_NULL,
+		AttachmentReference(),
+		0,
+		DE_NULL
+		)
+		);
+
+	m_renderPass = vk::createRenderPass(m_vk, device, &renderPassCreateInfo);
+
+	std::vector<vk::VkImageView> colorAttachments(1);
+	colorAttachments[0] = *m_colorTargetView;
+
+	const FramebufferCreateInfo framebufferCreateInfo(*m_renderPass, colorAttachments, WIDTH, HEIGHT, 1);
+
+	m_framebuffer = vk::createFramebuffer(m_vk, device, &framebufferCreateInfo);
+
+	const vk::VkVertexInputBindingDescription vertexInputBindingDescription =
+	{
+		0,
+		(deUint32)sizeof(tcu::Vec4) * 2,
+		vk::VK_VERTEX_INPUT_RATE_VERTEX,
+	};
+
+	const vk::VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+	{
+		{
+			0u,
+			0u,
+			vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+			0u
+		},
+		{
+			1u,
+			0u,
+			vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+			(deUint32)(sizeof(float)* 4),
+		}
+	};
+
+	m_vertexInputState = PipelineCreateInfo::VertexInputState(
+		1,
+		&vertexInputBindingDescription,
+		2,
+		vertexInputAttributeDescriptions);
+
+	const vk::VkDeviceSize dataSize = m_data.size() * sizeof(PositionColorVertex);
+	m_vertexBuffer = Buffer::createAndAlloc(m_vk, device, BufferCreateInfo(dataSize, vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
+											m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+
+	deUint8* ptr = reinterpret_cast<unsigned char *>(m_vertexBuffer->getBoundMemory().getHostPtr());
+	deMemcpy(ptr, &m_data[0], (size_t)dataSize);
+
+	vk::flushMappedMemoryRange(m_vk, device,
+		m_vertexBuffer->getBoundMemory().getMemory(),
+		m_vertexBuffer->getBoundMemory().getOffset(),
+		dataSize);
+
+	const CmdPoolCreateInfo cmdPoolCreateInfo(queueFamilyIndex);
+	m_cmdPool = vk::createCommandPool(m_vk, device, &cmdPoolCreateInfo);
+
+	const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		*m_cmdPool,											// VkCommandPool			commandPool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo);
+
+	initPipeline(device);
+}
+
+void DynamicStateBaseClass::initPipeline (const vk::VkDevice device)
+{
+	const vk::Unique<vk::VkShaderModule> vs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+	const vk::Unique<vk::VkShaderModule> fs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+	const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+	PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+	pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+	pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+	pipelineCreateInfo.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+	pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+	pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+	pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(1));
+	pipelineCreateInfo.addState(PipelineCreateInfo::DepthStencilState());
+	pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+	pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+	pipelineCreateInfo.addState(PipelineCreateInfo::DynamicState());
+
+	m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo);
+}
+
+tcu::TestStatus DynamicStateBaseClass::iterate (void)
+{
+	DE_ASSERT(false);
+	return tcu::TestStatus::fail("Implement iterate() method!");
+}
+
+void DynamicStateBaseClass::beginRenderPass (void)
+{
+	const vk::VkClearColorValue clearColor = { { 0.0f, 0.0f, 0.0f, 1.0f } };
+	beginRenderPassWithClearColor(clearColor);
+}
+
+void DynamicStateBaseClass::beginRenderPassWithClearColor (const vk::VkClearColorValue& clearColor)
+{
+	const CmdBufferBeginInfo beginInfo;
+	m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo);
+
+	initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL);
+
+	const ImageSubresourceRange subresourceRange(vk::VK_IMAGE_ASPECT_COLOR_BIT);
+	m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(),
+		vk::VK_IMAGE_LAYOUT_GENERAL, &clearColor, 1, &subresourceRange);
+
+	const vk::VkRect2D renderArea = { { 0, 0 }, { WIDTH, HEIGHT } };
+	const RenderPassBeginInfo renderPassBegin(*m_renderPass, *m_framebuffer, renderArea);
+
+	m_vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+}
+
+void DynamicStateBaseClass::setDynamicViewportState (const deUint32 width, const deUint32 height)
+{
+	vk::VkViewport viewport;
+	viewport.x = 0;
+	viewport.y = 0;
+	viewport.width = static_cast<float>(width);
+	viewport.height = static_cast<float>(height);
+	viewport.minDepth = 0.0f;
+	viewport.maxDepth = 1.0f;
+
+	m_vk.cmdSetViewport(*m_cmdBuffer, 0, 1, &viewport);
+
+	vk::VkRect2D scissor;
+	scissor.offset.x = 0;
+	scissor.offset.y = 0;
+	scissor.extent.width = width;
+	scissor.extent.height = height;
+	m_vk.cmdSetScissor(*m_cmdBuffer, 0, 1, &scissor);
+}
+
+void DynamicStateBaseClass::setDynamicViewportState (deUint32 viewportCount, const vk::VkViewport* pViewports, const vk::VkRect2D* pScissors)
+{
+	m_vk.cmdSetViewport(*m_cmdBuffer, 0, viewportCount, pViewports);
+	m_vk.cmdSetScissor(*m_cmdBuffer, 0, viewportCount, pScissors);
+}
+
+void DynamicStateBaseClass::setDynamicRasterizationState (const float lineWidth,
+														 const float depthBiasConstantFactor,
+														 const float depthBiasClamp,
+														 const float depthBiasSlopeFactor)
+{
+	m_vk.cmdSetLineWidth(*m_cmdBuffer, lineWidth);
+	m_vk.cmdSetDepthBias(*m_cmdBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+}
+
+void DynamicStateBaseClass::setDynamicBlendState (const float const1, const float const2, const float const3, const float const4)
+{
+	float blendConstantsants[4] = { const1, const2, const3, const4 };
+	m_vk.cmdSetBlendConstants(*m_cmdBuffer, blendConstantsants);
+}
+
+void DynamicStateBaseClass::setDynamicDepthStencilState (const float	minDepthBounds,
+														 const float	maxDepthBounds,
+														 const deUint32 stencilFrontCompareMask,
+														 const deUint32 stencilFrontWriteMask,
+														 const deUint32 stencilFrontReference,
+														 const deUint32 stencilBackCompareMask,
+														 const deUint32 stencilBackWriteMask,
+														 const deUint32 stencilBackReference)
+{
+	m_vk.cmdSetDepthBounds(*m_cmdBuffer, minDepthBounds, maxDepthBounds);
+	m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontCompareMask);
+	m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontWriteMask);
+	m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontReference);
+	m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackCompareMask);
+	m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackWriteMask);
+	m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackReference);
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.hpp
new file mode 100644
index 0000000..4a4f35d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBaseClass.hpp
@@ -0,0 +1,125 @@
+#ifndef _VKTDYNAMICSTATEBASECLASS_HPP
+#define _VKTDYNAMICSTATEBASECLASS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests - Base Class
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+#include "vktDynamicStateTestCaseUtil.hpp"
+#include "vktDynamicStateImageObjectUtil.hpp"
+#include "vktDynamicStateBufferObjectUtil.hpp"
+#include "vktDynamicStateCreateInfoUtil.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateBaseClass : public TestInstance
+{
+public:
+	DynamicStateBaseClass (Context& context, const char* vertexShaderName, const char* fragmentShaderName);
+
+protected:
+	void					initialize						(void);
+
+	virtual void			initPipeline					(const vk::VkDevice				device);
+
+	virtual tcu::TestStatus iterate							(void);
+
+	void					beginRenderPass					(void);
+
+	void					beginRenderPassWithClearColor	(const vk::VkClearColorValue&	clearColor);
+
+	void					setDynamicViewportState			(const deUint32					width,
+															const deUint32					height);
+
+	void					setDynamicViewportState			(deUint32						viewportCount,
+															 const vk::VkViewport*			pViewports,
+															 const vk::VkRect2D*			pScissors);
+
+	void					setDynamicRasterizationState	(const float					lineWidth = 1.0f,
+															 const float					depthBiasConstantFactor = 0.0f,
+															 const float					depthBiasClamp = 0.0f,
+															 const float					depthBiasSlopeFactor = 0.0f);
+
+	void					setDynamicBlendState			(const float					const1 = 0.0f, const float const2 = 0.0f,
+															 const float					const3 = 0.0f, const float const4 = 0.0f);
+
+	void					setDynamicDepthStencilState		(const float					minDepthBounds = -1.0f,
+															 const float					maxDepthBounds = 1.0f,
+															 const deUint32					stencilFrontCompareMask = 0xffffffffu,
+															 const deUint32					stencilFrontWriteMask = 0xffffffffu,
+															 const deUint32					stencilFrontReference = 0,
+															 const deUint32					stencilBackCompareMask = 0xffffffffu,
+															 const deUint32					stencilBackWriteMask = 0xffffffffu,
+															 const deUint32					stencilBackReference = 0);
+	enum
+	{
+		WIDTH       = 128,
+		HEIGHT      = 128
+	};
+
+	vk::VkFormat									m_colorAttachmentFormat;
+
+	vk::VkPrimitiveTopology							m_topology;
+
+	const vk::DeviceInterface&						m_vk;
+
+	vk::Move<vk::VkPipeline>						m_pipeline;
+	vk::Move<vk::VkPipelineLayout>					m_pipelineLayout;
+
+	de::SharedPtr<Image>							m_colorTargetImage;
+	vk::Move<vk::VkImageView>						m_colorTargetView;
+
+	PipelineCreateInfo::VertexInputState			m_vertexInputState;
+	de::SharedPtr<Buffer>							m_vertexBuffer;
+
+	vk::Move<vk::VkCommandPool>						m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>					m_cmdBuffer;
+
+	vk::Move<vk::VkFramebuffer>						m_framebuffer;
+	vk::Move<vk::VkRenderPass>						m_renderPass;
+
+	const std::string								m_vertexShaderName;
+	const std::string								m_fragmentShaderName;
+	std::vector<PositionColorVertex>				m_data;
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEBASECLASS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.cpp
new file mode 100644
index 0000000..c315de5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.cpp
@@ -0,0 +1,83 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateBufferObjectUtil.hpp"
+
+#include "vkQueryUtil.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+Buffer::Buffer (const vk::DeviceInterface& vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object_)
+	: m_allocation  (DE_NULL)
+	, m_object		(object_)
+	, m_vk			(vk)
+	, m_device		(device)
+{
+}
+
+void Buffer::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindBufferMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Buffer> Buffer::createAndAlloc (const vk::DeviceInterface& vk,
+											  vk::VkDevice device,
+											  const vk::VkBufferCreateInfo &createInfo,
+											  vk::Allocator &allocator,
+											  vk::MemoryRequirement memoryRequirement)
+{
+	de::SharedPtr<Buffer> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements bufferRequirements = vk::getBufferMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(bufferRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Buffer> Buffer::create (const vk::DeviceInterface& vk,
+									  vk::VkDevice device,
+									  const vk::VkBufferCreateInfo& createInfo)
+{
+	return de::SharedPtr<Buffer>(new Buffer(vk, device, vk::createBuffer(vk, device, &createInfo)));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.hpp
new file mode 100644
index 0000000..b01d459
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateBufferObjectUtil.hpp
@@ -0,0 +1,83 @@
+#ifndef _VKTDYNAMICSTATEBUFFEROBJECTUTIL_HPP
+#define _VKTDYNAMICSTATEBUFFEROBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class Buffer
+{
+public:
+
+	static de::SharedPtr<Buffer> create			(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkBufferCreateInfo &createInfo);
+
+	static de::SharedPtr<Buffer> createAndAlloc (const vk::DeviceInterface&		vk,
+												 vk::VkDevice					device,
+												 const vk::VkBufferCreateInfo&	createInfo,
+												 vk::Allocator&					allocator,
+												 vk::MemoryRequirement			allocationMemoryProperties = vk::MemoryRequirement::Any);
+
+								Buffer			(const vk::DeviceInterface &vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object);
+
+	void						bindMemory		(de::MovePtr<vk::Allocation> allocation);
+
+	vk::VkBuffer				object			(void) const								{ return *m_object;		}
+	vk::Allocation				getBoundMemory	(void) const								{ return *m_allocation;	}
+
+private:
+
+	Buffer										(const Buffer& other);	// Not allowed!
+	Buffer&						operator=		(const Buffer& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Unique<vk::VkBuffer>		m_object;
+
+	const vk::DeviceInterface&		m_vk;
+	vk::VkDevice					m_device;
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEBUFFEROBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.cpp
new file mode 100644
index 0000000..a508a4f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.cpp
@@ -0,0 +1,193 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic CB State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateCBTests.hpp"
+
+#include "vktDynamicStateBaseClass.hpp"
+#include "vktDynamicStateTestCaseUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+namespace
+{
+
+class BlendConstantsTestInstance : public DynamicStateBaseClass
+{
+public:
+	BlendConstantsTestInstance (Context& context, ShaderMap shaders)
+		: DynamicStateBaseClass	(context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_topology = vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual void initPipeline (const vk::VkDevice device)
+	{
+		const vk::Unique<vk::VkShaderModule> vs (createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> fs (createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+		const vk::VkPipelineColorBlendAttachmentState VkPipelineColorBlendAttachmentState =
+			PipelineCreateInfo::ColorBlendState::Attachment(vk::VK_TRUE,
+															vk::VK_BLEND_FACTOR_SRC_ALPHA, vk::VK_BLEND_FACTOR_CONSTANT_COLOR, vk::VK_BLEND_OP_ADD,
+															vk::VK_BLEND_FACTOR_SRC_ALPHA, vk::VK_BLEND_FACTOR_CONSTANT_ALPHA, vk::VK_BLEND_OP_ADD);
+
+		PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &VkPipelineColorBlendAttachmentState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo.addState(PipelineCreateInfo::DepthStencilState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::DynamicState());
+
+		m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo);
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		const vk::VkClearColorValue clearColor = { { 1.0f, 1.0f, 1.0f, 1.0f } };
+		beginRenderPassWithClearColor(clearColor);
+
+		// bind states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicRasterizationState();
+		setDynamicDepthStencilState();
+		setDynamicBlendState(0.33f, 0.1f, 0.66f, 0.5f);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+
+		//validation
+		{
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if ((yCoord >= -1.0f && yCoord <= 1.0f && xCoord >= -1.0f && xCoord <= 1.0f))
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.33f, 1.0f, 0.66f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+																							  vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+} //anonymous
+
+DynamicStateCBTests::DynamicStateCBTests (tcu::TestContext& testCtx)
+	: TestCaseGroup (testCtx, "cb_state", "Tests for color blend state")
+{
+	/* Left blank on purpose */
+}
+
+DynamicStateCBTests::~DynamicStateCBTests (void) {}
+
+void DynamicStateCBTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/dynamic_state/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/dynamic_state/VertexFetch.frag";
+	addChild(new InstanceFactory<BlendConstantsTestInstance>(m_testCtx, "blend_constants", "Check if blend constants are working properly", shaderPaths));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.hpp
new file mode 100644
index 0000000..56263c2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCBTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTDYNAMICSTATECBTESTS_HPP
+#define _VKTDYNAMICSTATECBTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic CB State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateCBTests : public tcu::TestCaseGroup
+{
+public:
+							DynamicStateCBTests			(tcu::TestContext& testCtx);
+							~DynamicStateCBTests		(void);
+	void					init						(void);
+
+private:
+	DynamicStateCBTests									(const DynamicStateCBTests &other);
+	DynamicStateCBTests&	operator=					(const DynamicStateCBTests &other);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATECBTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.cpp
new file mode 100644
index 0000000..203f152
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.cpp
@@ -0,0 +1,1196 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateCreateInfoUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+ImageSubresourceRange::ImageSubresourceRange (vk::VkImageAspectFlags	_aspectMask,
+											  deUint32					_baseMipLevel,
+											  deUint32					_levelCount,
+											  deUint32					_baseArrayLayer,
+											  deUint32					_layerCount)
+{
+	aspectMask		= _aspectMask;
+	baseMipLevel	= _baseMipLevel;
+	levelCount		= _levelCount;
+	baseArrayLayer	= _baseArrayLayer;
+	layerCount		= _layerCount;
+}
+
+ComponentMapping::ComponentMapping (vk::VkComponentSwizzle _r,
+									vk::VkComponentSwizzle _g,
+									vk::VkComponentSwizzle _b,
+									vk::VkComponentSwizzle _a)
+{
+	r = _r;
+	g = _g;
+	b = _b;
+	a = _a;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage							_image,
+										  vk::VkImageViewType					_viewType,
+										  vk::VkFormat							_format,
+										  const vk::VkImageSubresourceRange&	_subresourceRange,
+										  const vk::VkComponentMapping&			_components,
+										  vk::VkImageViewCreateFlags			_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0u;
+	image				= _image;
+	viewType			= _viewType;
+	format				= _format;
+	components.r		= _components.r;
+	components.g		= _components.g;
+	components.b		= _components.b;
+	components.a		= _components.a;
+	subresourceRange	= _subresourceRange;
+	flags				= _flags;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage					_image,
+										  vk::VkImageViewType			_viewType,
+										  vk::VkFormat					_format,
+										  const vk::VkComponentMapping&	_components,
+										  vk::VkImageViewCreateFlags	_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	image			= _image;
+	viewType		= _viewType;
+	format			= _format;
+	components.r	= _components.r;
+	components.g	= _components.g;
+	components.b	= _components.b;
+	components.a	= _components.a;
+
+	vk::VkImageAspectFlags aspectFlags;
+	const tcu::TextureFormat tcuFormat = vk::mapVkFormat(_format);
+
+	switch (tcuFormat.order)
+	{
+		case tcu::TextureFormat::D:
+			aspectFlags = vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		case tcu::TextureFormat::S:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT;
+			break;
+		case tcu::TextureFormat::DS:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT | vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		default:
+			aspectFlags = vk::VK_IMAGE_ASPECT_COLOR_BIT;
+			break;
+	}
+
+	subresourceRange = ImageSubresourceRange(aspectFlags);;
+	flags = _flags;
+}
+
+BufferViewCreateInfo::BufferViewCreateInfo (vk::VkBuffer		_buffer,
+											vk::VkFormat		_format,
+											vk::VkDeviceSize	_offset,
+											vk::VkDeviceSize	_range)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags	= 0u;
+	buffer	= _buffer;
+	format	= _format;
+	offset	= _offset;
+	range	= _range;
+}
+
+BufferCreateInfo::BufferCreateInfo (vk::VkDeviceSize		_size,
+									vk::VkBufferUsageFlags	_usage,
+									vk::VkSharingMode		_sharingMode,
+									deUint32				_queueFamilyIndexCount,
+									const deUint32*			_pQueueFamilyIndices,
+									vk::VkBufferCreateFlags _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	size					= _size;
+	usage					= _usage;
+	flags					= _flags;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(
+			_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = _pQueueFamilyIndices;
+	}
+}
+
+BufferCreateInfo::BufferCreateInfo (const BufferCreateInfo &other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	size					= other.size;
+	usage					= other.usage;
+	flags					= other.flags;
+	sharingMode				= other.sharingMode;
+	queueFamilyIndexCount	= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices	= other.m_queueFamilyIndices;
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+}
+
+BufferCreateInfo & BufferCreateInfo::operator= (const BufferCreateInfo &other)
+{
+	sType						= other.sType;
+	pNext						= other.pNext;
+	size						= other.size;
+	usage						= other.usage;
+	flags						= other.flags;
+	sharingMode					= other.sharingMode;
+	queueFamilyIndexCount		= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices		= other.m_queueFamilyIndices;
+
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+
+	return *this;
+}
+
+ImageCreateInfo::ImageCreateInfo (vk::VkImageType			_imageType,
+								  vk::VkFormat				_format,
+								  vk::VkExtent3D			_extent,
+								  deUint32					_mipLevels,
+								  deUint32					_arrayLayers,
+								  vk::VkSampleCountFlagBits	_samples,
+								  vk::VkImageTiling			_tiling,
+								  vk::VkImageUsageFlags		_usage,
+								  vk::VkSharingMode			_sharingMode,
+								  deUint32					_queueFamilyIndexCount,
+								  const deUint32*			_pQueueFamilyIndices,
+								  vk::VkImageCreateFlags	_flags,
+								  vk::VkImageLayout			_initialLayout)
+{
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= _flags;
+	imageType				= _imageType;
+	format					= _format;
+	extent					= _extent;
+	mipLevels				= _mipLevels;
+	arrayLayers				= _arrayLayers;
+	samples					= _samples;
+	tiling					= _tiling;
+	usage					= _usage;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+	initialLayout	= _initialLayout;
+}
+
+FramebufferCreateInfo::FramebufferCreateInfo (vk::VkRenderPass						_renderPass,
+											  const std::vector<vk::VkImageView>&	atachments,
+											  deUint32								_width,
+											  deUint32								_height,
+											  deUint32								_layers)
+{
+	sType = vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+
+	renderPass		= _renderPass;
+	attachmentCount	= static_cast<deUint32>(atachments.size());
+
+	if (attachmentCount)
+	{
+		pAttachments = const_cast<vk::VkImageView *>(&atachments[0]);
+	}
+
+	width	= _width;
+	height	= _height;
+	layers	= _layers;
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+											const std::vector<vk::VkSubpassDescription>&	subpasses,
+											const std::vector<vk::VkSubpassDependency>&		dependiences)
+
+	: m_attachments			(attachments.begin(), attachments.end())
+	, m_subpasses			(subpasses.begin(), subpasses.end())
+	, m_dependiences		(dependiences.begin(), dependiences.end())
+	, m_attachmentsStructs	(m_attachments.begin(), m_attachments.end())
+	, m_subpassesStructs	(m_subpasses.begin(), m_subpasses.end())
+	, m_dependiencesStructs	(m_dependiences.begin(), m_dependiences.end())
+{
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+	pAttachments	= &m_attachmentsStructs[0];
+	subpassCount	= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses		= &m_subpassesStructs[0];
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+	pDependencies	= &m_dependiencesStructs[0];
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (deUint32							_attachmentCount,
+											const vk::VkAttachmentDescription*	_pAttachments,
+											deUint32							_subpassCount,
+											const vk::VkSubpassDescription*		_pSubpasses,
+											deUint32							_dependencyCount,
+											const vk::VkSubpassDependency*		_pDependiences)
+{
+
+	m_attachments	= std::vector<AttachmentDescription>(_pAttachments, _pAttachments + _attachmentCount);
+	m_subpasses		= std::vector<SubpassDescription>(_pSubpasses, _pSubpasses + _subpassCount);
+	m_dependiences	= std::vector<SubpassDependency>(_pDependiences, _pDependiences + _dependencyCount);
+
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>	(m_attachments.begin(),		m_attachments.end());
+	m_subpassesStructs		= std::vector<vk::VkSubpassDescription>		(m_subpasses.begin(),		m_subpasses.end());
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>		(m_dependiences.begin(),	m_dependiences.end());
+
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+
+	if (attachmentCount) {
+		pAttachments = &m_attachmentsStructs[0];
+	}
+	else
+	{
+		pAttachments = DE_NULL;
+	}
+
+	subpassCount = static_cast<deUint32>(m_subpasses.size());
+
+	if (subpassCount) {
+		pSubpasses = &m_subpassesStructs[0];
+	}
+	else
+	{
+		pSubpasses = DE_NULL;
+	}
+
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+
+	if (dependencyCount) {
+		pDependencies = &m_dependiencesStructs[0];
+	}
+	else
+	{
+		pDependencies = DE_NULL;
+	}
+}
+
+void
+RenderPassCreateInfo::addAttachment (vk::VkAttachmentDescription attachment)
+{
+
+	m_attachments.push_back(attachment);
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>(m_attachments.begin(), m_attachments.end());
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachmentsStructs[0];
+}
+
+void
+RenderPassCreateInfo::addSubpass (vk::VkSubpassDescription subpass)
+{
+
+	m_subpasses.push_back(subpass);
+	m_subpassesStructs	= std::vector<vk::VkSubpassDescription>(m_subpasses.begin(), m_subpasses.end());
+	subpassCount		= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses			= &m_subpassesStructs[0];
+}
+
+void
+RenderPassCreateInfo::addDependency (vk::VkSubpassDependency dependency)
+{
+
+	m_dependiences.push_back(dependency);
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>(m_dependiences.begin(), m_dependiences.end());
+
+	dependencyCount			= static_cast<deUint32>(m_dependiences.size());
+	pDependencies			= &m_dependiencesStructs[0];
+}
+
+RenderPassBeginInfo::RenderPassBeginInfo (vk::VkRenderPass						_renderPass,
+										  vk::VkFramebuffer						_framebuffer,
+										  vk::VkRect2D							_renderArea,
+										  const std::vector<vk::VkClearValue>&	_clearValues)
+{
+
+	m_clearValues	= _clearValues;
+
+	sType			= vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+	pNext			= DE_NULL;
+	renderPass		= _renderPass;
+	framebuffer		= _framebuffer;
+	renderArea		= _renderArea;
+	clearValueCount = static_cast<deUint32>(m_clearValues.size());
+	pClearValues	= m_clearValues.size() ? &m_clearValues[0] : DE_NULL;
+}
+
+CmdPoolCreateInfo::CmdPoolCreateInfo (deUint32 _queueFamilyIndex, unsigned int _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+
+	queueFamilyIndex = _queueFamilyIndex;
+	flags				= _flags;
+}
+
+AttachmentDescription::AttachmentDescription (vk::VkFormat				_format,
+											  vk::VkSampleCountFlagBits	_samples,
+											  vk::VkAttachmentLoadOp	_loadOp,
+											  vk::VkAttachmentStoreOp	_storeOp,
+											  vk::VkAttachmentLoadOp	_stencilLoadOp,
+											  vk::VkAttachmentStoreOp	_stencilStoreOp,
+											  vk::VkImageLayout			_initialLayout,
+											  vk::VkImageLayout			_finalLayout)
+{
+	flags = 0;
+	format			= _format;
+	samples			= _samples;
+	loadOp			= _loadOp;
+	storeOp			= _storeOp;
+	stencilLoadOp	= _stencilLoadOp;
+	stencilStoreOp	= _stencilStoreOp;
+	initialLayout	= _initialLayout;
+	finalLayout		= _finalLayout;
+}
+
+AttachmentDescription::AttachmentDescription (const vk::VkAttachmentDescription& rhs)
+{
+	flags			= rhs.flags;
+	format			= rhs.format;
+	samples			= rhs.samples;
+	loadOp			= rhs.loadOp;
+	storeOp			= rhs.storeOp;
+	stencilLoadOp	= rhs.stencilLoadOp;
+	stencilStoreOp	= rhs.stencilStoreOp;
+	initialLayout	= rhs.initialLayout;
+	finalLayout		= rhs.finalLayout;
+}
+
+AttachmentReference::AttachmentReference (deUint32 _attachment, vk::VkImageLayout _layout)
+{
+	attachment	= _attachment;
+	layout		= _layout;
+}
+
+AttachmentReference::AttachmentReference (void)
+{
+	attachment = vk::VK_ATTACHMENT_UNUSED;
+	layout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+}
+
+SubpassDescription::SubpassDescription (vk::VkPipelineBindPoint				_pipelineBindPoint,
+										vk::VkSubpassDescriptionFlags		_flags,
+										deUint32							_inputAttachmentCount,
+										const vk::VkAttachmentReference*	_inputAttachments,
+										deUint32							_colorAttachmentCount,
+										const vk::VkAttachmentReference*	_colorAttachments,
+										const vk::VkAttachmentReference*	_resolveAttachments,
+										vk::VkAttachmentReference			depthStencilAttachment,
+										deUint32							_preserveAttachmentCount,
+										const deUint32*						_preserveAttachments)
+{
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(_inputAttachments, _inputAttachments + _inputAttachmentCount);
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(_colorAttachments, _colorAttachments + _colorAttachmentCount);
+
+	if (_resolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(_resolveAttachments, _resolveAttachments + _colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(_preserveAttachments, _preserveAttachments + _preserveAttachmentCount);
+
+	m_depthStencilAttachment = depthStencilAttachment;
+
+	flags					= _flags;
+	pipelineBindPoint		= _pipelineBindPoint;
+	inputAttachmentCount	= _inputAttachmentCount;
+	pInputAttachments		= DE_NULL;
+	colorAttachmentCount	= _colorAttachmentCount;
+	pColorAttachments		= DE_NULL;
+	pResolveAttachments		= DE_NULL;
+	pDepthStencilAttachment	= &m_depthStencilAttachment;
+	pPreserveAttachments	= DE_NULL;
+	preserveAttachmentCount	= _preserveAttachmentCount;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const vk::VkSubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pInputAttachments, rhs.pInputAttachments + rhs.inputAttachmentCount);
+
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pColorAttachments, rhs.pColorAttachments + rhs.colorAttachmentCount);
+
+	if (rhs.pResolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(
+			rhs.pResolveAttachments, rhs.pResolveAttachments + rhs.colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(
+		rhs.pPreserveAttachments, rhs.pPreserveAttachments + rhs.preserveAttachmentCount);
+
+	if (rhs.pDepthStencilAttachment)
+		m_depthStencilAttachment = *rhs.pDepthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const SubpassDescription& rhs) {
+	*this = rhs;
+}
+
+SubpassDescription& SubpassDescription::operator= (const SubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments		= rhs.m_inputAttachments;
+	m_colorAttachments		= rhs.m_colorAttachments;
+	m_resolveAttachments	= rhs.m_resolveAttachments;
+	m_preserveAttachments	= rhs.m_preserveAttachments;
+	m_depthStencilAttachment = rhs.m_depthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+
+	return *this;
+}
+
+SubpassDependency::SubpassDependency (deUint32					_srcSubpass,
+									  deUint32					_dstSubpass,
+									  vk::VkPipelineStageFlags	_srcStageMask,
+									  vk::VkPipelineStageFlags	_dstStageMask,
+									  vk::VkAccessFlags			_srcAccessMask,
+									  vk::VkAccessFlags			_dstAccessMask,
+									  vk::VkDependencyFlags		_dependencyFlags)
+{
+	srcSubpass		= _srcSubpass;
+	dstSubpass		= _dstSubpass;
+	srcStageMask	= _srcStageMask;
+	dstStageMask	= _dstStageMask;
+	srcAccessMask	= _srcAccessMask;
+	dstAccessMask	= _dstAccessMask;
+	dependencyFlags	= _dependencyFlags;
+}
+
+SubpassDependency::SubpassDependency (const vk::VkSubpassDependency& rhs)
+{
+	srcSubpass		= rhs.srcSubpass;
+	dstSubpass		= rhs.dstSubpass;
+	srcStageMask	= rhs.srcStageMask;
+	dstStageMask	= rhs.dstStageMask;
+	srcAccessMask	= rhs.srcAccessMask;
+	dstAccessMask	= rhs.dstAccessMask;
+	dependencyFlags	= rhs.dependencyFlags;
+}
+
+CmdBufferBeginInfo::CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags _flags)
+{
+	sType				= vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+	pNext				= DE_NULL;
+	flags				= _flags;
+	pInheritanceInfo	= DE_NULL;
+}
+
+DescriptorPoolCreateInfo::DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+													vk::VkDescriptorPoolCreateFlags					_flags,
+													deUint32										_maxSets)
+	: m_poolSizeCounts(poolSizeCounts)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= _flags;
+	maxSets			= _maxSets;
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+}
+
+DescriptorPoolCreateInfo& DescriptorPoolCreateInfo::addDescriptors (vk::VkDescriptorType type, deUint32 count)
+{
+	vk::VkDescriptorPoolSize descriptorTypeCount = { type, count };
+	m_poolSizeCounts.push_back(descriptorTypeCount);
+
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+
+	return *this;
+}
+
+DescriptorSetLayoutCreateInfo::DescriptorSetLayoutCreateInfo (deUint32 _bindingCount, const vk::VkDescriptorSetLayoutBinding* _pBindings)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+	bindingCount = _bindingCount;
+	pBindings	 = _pBindings;
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (deUint32							_descriptorSetCount,
+													const vk::VkDescriptorSetLayout*	_pSetLayouts,
+													deUint32							_pushConstantRangeCount,
+													const vk::VkPushConstantRange*		_pPushConstantRanges)
+	: m_pushConstantRanges(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	for (unsigned int i = 0; i < _descriptorSetCount; i++)
+	{
+		m_setLayouts.push_back(_pSetLayouts[i]);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+	setLayoutCount			= static_cast<deUint32>(m_setLayouts.size());
+	pSetLayouts				= setLayoutCount > 0 ? &m_setLayouts[0] : DE_NULL;
+	pushConstantRangeCount	= static_cast<deUint32>(m_pushConstantRanges.size());
+
+	if (m_pushConstantRanges.size()) {
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts,
+													deUint32										_pushConstantRangeCount,
+													const vk::VkPushConstantRange*					_pPushConstantRanges)
+	: m_setLayouts			(setLayouts)
+	, m_pushConstantRanges	(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags			= 0u;
+	setLayoutCount	= static_cast<deUint32>(m_setLayouts.size());
+
+	if (setLayoutCount)
+	{
+		pSetLayouts = &m_setLayouts[0];
+	}
+	else
+	{
+		pSetLayouts = DE_NULL;
+	}
+
+	pushConstantRangeCount = static_cast<deUint32>(m_pushConstantRanges.size());
+	if (pushConstantRangeCount) {
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineCreateInfo::PipelineShaderStage::PipelineShaderStage (vk::VkShaderModule _module, const char* _pName, vk::VkShaderStageFlagBits _stage)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+	stage				= _stage;
+	module				= _module;
+	pName				= _pName;
+	pSpecializationInfo = DE_NULL;
+}
+
+PipelineCreateInfo::VertexInputState::VertexInputState (deUint32										_vertexBindingDescriptionCount,
+														const vk::VkVertexInputBindingDescription*		_pVertexBindingDescriptions,
+														deUint32										_vertexAttributeDescriptionCount,
+														const vk::VkVertexInputAttributeDescription*	_pVertexAttributeDescriptions)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags							= 0u;
+	vertexBindingDescriptionCount	= _vertexBindingDescriptionCount;
+	pVertexBindingDescriptions		= _pVertexBindingDescriptions;
+	vertexAttributeDescriptionCount	= _vertexAttributeDescriptionCount;
+	pVertexAttributeDescriptions	= _pVertexAttributeDescriptions;
+}
+
+PipelineCreateInfo::InputAssemblerState::InputAssemblerState (vk::VkPrimitiveTopology	_topology,
+															  vk::VkBool32				_primitiveRestartEnable)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	topology				= _topology;
+	primitiveRestartEnable	= _primitiveRestartEnable;
+}
+
+PipelineCreateInfo::TessellationState::TessellationState (deUint32 _patchControlPoints)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0u;
+	patchControlPoints	= _patchControlPoints;
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (deUint32						_viewportCount,
+												  std::vector<vk::VkViewport>	_viewports,
+												  std::vector<vk::VkRect2D>		_scissors)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	viewportCount	= _viewportCount;
+	scissorCount	= _viewportCount;
+
+	if (!_viewports.size())
+	{
+		m_viewports.resize(viewportCount);
+		deMemset(&m_viewports[0], 0, sizeof(m_viewports[0]) * m_viewports.size());
+	}
+	else
+	{
+		m_viewports = _viewports;
+	}
+
+	if (!_scissors.size())
+	{
+		m_scissors.resize(scissorCount);
+		deMemset(&m_scissors[0], 0, sizeof(m_scissors[0]) * m_scissors.size());
+	}
+	else
+	{
+		m_scissors = _scissors;
+	}
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports = std::vector<vk::VkViewport>(other.pViewports, other.pViewports + viewportCount);
+	m_scissors	= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState& PipelineCreateInfo::ViewportState::operator= (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports		= std::vector<vk::VkViewport>(other.pViewports, other.pViewports + scissorCount);
+	m_scissors		= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports		= &m_viewports[0];
+	pScissors		= &m_scissors[0];
+	return *this;
+}
+
+PipelineCreateInfo::RasterizerState::RasterizerState (vk::VkBool32			_depthClampEnable,
+													  vk::VkBool32			_rasterizerDiscardEnable,
+													  vk::VkPolygonMode		_polygonMode,
+													  vk::VkCullModeFlags	_cullMode,
+													  vk::VkFrontFace		_frontFace,
+													  vk::VkBool32			_depthBiasEnable,
+													  float					_depthBiasConstantFactor,
+													  float					_depthBiasClamp,
+													  float					_depthBiasSlopeFactor,
+													  float					_lineWidth)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthClampEnable		= _depthClampEnable;
+	rasterizerDiscardEnable = _rasterizerDiscardEnable;
+	polygonMode				= _polygonMode;
+	cullMode				= _cullMode;
+	frontFace				= _frontFace;
+
+	depthBiasEnable			= _depthBiasEnable;
+	depthBiasConstantFactor	= _depthBiasConstantFactor;
+	depthBiasClamp			= _depthBiasClamp;
+	depthBiasSlopeFactor	= _depthBiasSlopeFactor;
+	lineWidth				= _lineWidth;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (vk::VkSampleCountFlagBits				_rasterizationSamples,
+														vk::VkBool32							_sampleShadingEnable,
+														float									_minSampleShading,
+														const std::vector<vk::VkSampleMask>&	_sampleMask,
+														bool									_alphaToCoverageEnable,
+														bool									_alphaToOneEnable)
+	: m_sampleMask(_sampleMask)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	rasterizationSamples	= _rasterizationSamples;
+	sampleShadingEnable		= _sampleShadingEnable;
+	minSampleShading		= _minSampleShading;
+	pSampleMask				= &m_sampleMask[0];
+	alphaToCoverageEnable   = _alphaToCoverageEnable;
+	alphaToOneEnable		= _alphaToOneEnable;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (const MultiSampleState& other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+}
+
+PipelineCreateInfo::MultiSampleState& PipelineCreateInfo::MultiSampleState::operator= (const MultiSampleState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	_attachments,
+													  vk::VkBool32													_logicOpEnable,
+													  vk::VkLogicOp													_logicOp)
+	: m_attachments(_attachments)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (deUint32											_attachmentCount,
+													  const vk::VkPipelineColorBlendAttachmentState*	_attachments,
+													  vk::VkBool32										_logicOpEnable,
+													  vk::VkLogicOp										_logicOp)
+	: m_attachments(_attachments, _attachments + _attachmentCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext	= DE_NULL;
+	flags					= 0u;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo& createInfo)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const ColorBlendState& createInfo, std::vector<float> _blendConstants)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+	deMemcpy(blendConstants, &_blendConstants[0], 4 * sizeof(float));
+}
+
+PipelineCreateInfo::ColorBlendState::Attachment::Attachment (vk::VkBool32		_blendEnable,
+															 vk::VkBlendFactor	_srcColorBlendFactor,
+															 vk::VkBlendFactor	_dstColorBlendFactor,
+															 vk::VkBlendOp		_colorBlendOp,
+															 vk::VkBlendFactor	_srcAlphaBlendFactor,
+															 vk::VkBlendFactor	_dstAlphaBlendFactor,
+															 vk::VkBlendOp		_alphaBlendOp,
+															 deUint8			_colorWriteMask)
+{
+	blendEnable			= _blendEnable;
+	srcColorBlendFactor	= _srcColorBlendFactor;
+	dstColorBlendFactor	= _dstColorBlendFactor;
+	colorBlendOp		= _colorBlendOp;
+	srcAlphaBlendFactor	= _srcAlphaBlendFactor;
+	dstAlphaBlendFactor	= _dstAlphaBlendFactor;
+	alphaBlendOp		= _alphaBlendOp;
+	colorWriteMask	= _colorWriteMask;
+}
+
+PipelineCreateInfo::DepthStencilState::StencilOpState::StencilOpState (vk::VkStencilOp	_failOp,
+																	   vk::VkStencilOp	_passOp,
+																	   vk::VkStencilOp	_depthFailOp,
+																	   vk::VkCompareOp	_compareOp,
+																	   deUint32			_compareMask,
+																	   deUint32			_writeMask,
+																	   deUint32			_reference)
+{
+	failOp		= _failOp;
+	passOp		= _passOp;
+	depthFailOp	= _depthFailOp;
+	compareOp	= _compareOp;
+
+	compareMask	= _compareMask;
+	writeMask	= _writeMask;
+	reference	= _reference;
+}
+
+PipelineCreateInfo::DepthStencilState::DepthStencilState (vk::VkBool32		_depthTestEnable,
+														  vk::VkBool32		_depthWriteEnable,
+														  vk::VkCompareOp	_depthCompareOp,
+														  vk::VkBool32		_depthBoundsTestEnable,
+														  vk::VkBool32		_stencilTestEnable,
+														  StencilOpState	_front,
+														  StencilOpState	_back,
+														  float				_minDepthBounds,
+														  float				_maxDepthBounds)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthTestEnable			= _depthTestEnable;
+	depthWriteEnable		= _depthWriteEnable;
+	depthCompareOp			= _depthCompareOp;
+	depthBoundsTestEnable	= _depthBoundsTestEnable;
+	stencilTestEnable		= _stencilTestEnable;
+	front	= _front;
+	back	= _back;
+
+	minDepthBounds = _minDepthBounds;
+	maxDepthBounds = _maxDepthBounds;
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const std::vector<vk::VkDynamicState>& _dynamicStates)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+
+	if (!_dynamicStates.size())
+	{
+		for (size_t i = 0; i < vk::VK_DYNAMIC_STATE_LAST; ++i)
+		{
+			m_dynamicStates.push_back(static_cast<vk::VkDynamicState>(i));
+		}
+	}
+	else
+		m_dynamicStates = _dynamicStates;
+
+	dynamicStateCount = static_cast<deUint32>(m_dynamicStates.size());
+	pDynamicStates = &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const DynamicState &other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags = other.flags;
+
+	dynamicStateCount = other.dynamicStateCount;
+
+	m_dynamicStates = std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates = &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState& PipelineCreateInfo::DynamicState::operator= (const DynamicState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags = other.flags;
+
+	dynamicStateCount = other.dynamicStateCount;
+
+	m_dynamicStates = std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates = &m_dynamicStates[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::PipelineCreateInfo (vk::VkPipelineLayout		_layout,
+										vk::VkRenderPass			_renderPass,
+										int							_subpass,
+										vk::VkPipelineCreateFlags	_flags)
+{
+	deMemset(static_cast<vk::VkGraphicsPipelineCreateInfo *>(this), 0,
+		sizeof(vk::VkGraphicsPipelineCreateInfo));
+
+	sType = vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= _flags;
+	renderPass			= _renderPass;
+	subpass				= _subpass;
+	layout				= _layout;
+	basePipelineHandle	= DE_NULL;
+	basePipelineIndex	= 0;
+	pDynamicState		= DE_NULL;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addShader (const vk::VkPipelineShaderStageCreateInfo& shader)
+{
+	m_shaders.push_back(shader);
+
+	stageCount	= static_cast<deUint32>(m_shaders.size());
+	pStages		= &m_shaders[0];
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineVertexInputStateCreateInfo& state)
+{
+	m_vertexInputState	= state;
+	pVertexInputState	= &m_vertexInputState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineInputAssemblyStateCreateInfo& state)
+{
+	m_inputAssemblyState = state;
+	pInputAssemblyState = &m_inputAssemblyState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineColorBlendStateCreateInfo& state)
+{
+	m_colorBlendStateAttachments	= std::vector<vk::VkPipelineColorBlendAttachmentState>(state.pAttachments, state.pAttachments + state.attachmentCount);
+	m_colorBlendState				= state;
+	m_colorBlendState.pAttachments	= &m_colorBlendStateAttachments[0];
+	pColorBlendState				= &m_colorBlendState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineViewportStateCreateInfo& state)
+{
+	m_viewports					= std::vector<vk::VkViewport>(state.pViewports, state.pViewports + state.viewportCount);
+	m_scissors					= std::vector<vk::VkRect2D>(state.pScissors, state.pScissors + state.scissorCount);
+	m_viewportState				= state;
+	m_viewportState.pViewports	= &m_viewports[0];
+	m_viewportState.pScissors	= &m_scissors[0];
+	pViewportState				= &m_viewportState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDepthStencilStateCreateInfo& state)
+{
+	m_dynamicDepthStencilState	= state;
+	pDepthStencilState			= &m_dynamicDepthStencilState;
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineTessellationStateCreateInfo& state)
+{
+	m_tessState			= state;
+	pTessellationState	= &m_tessState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineRasterizationStateCreateInfo& state)
+{
+	m_rasterState		= state;
+	pRasterizationState	= &m_rasterState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineMultisampleStateCreateInfo& state)
+{
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + state.rasterizationSamples) / ( sizeof(vk::VkSampleMask) * 8 );
+	m_multisampleStateSampleMask	= std::vector<vk::VkSampleMask>(state.pSampleMask, state.pSampleMask + sampleMaskArrayLen);
+	m_multisampleState				= state;
+	m_multisampleState.pSampleMask	= &m_multisampleStateSampleMask[0];
+	pMultisampleState				= &m_multisampleState;
+
+	return *this;
+}
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDynamicStateCreateInfo& state)
+{
+	m_dynamicStates					= std::vector<vk::VkDynamicState>(state.pDynamicStates, state.pDynamicStates + state.dynamicStateCount);
+	m_dynamicState					= state;
+	m_dynamicState.pDynamicStates	= &m_dynamicStates[0];
+	pDynamicState					= &m_dynamicState;
+
+	return *this;
+}
+
+SamplerCreateInfo::SamplerCreateInfo (vk::VkFilter				_magFilter,
+									  vk::VkFilter				_minFilter,
+									  vk::VkSamplerMipmapMode	_mipmapMode,
+									  vk::VkSamplerAddressMode	_addressModeU,
+									  vk::VkSamplerAddressMode	_addressModeV,
+									  vk::VkSamplerAddressMode	_addressModeW,
+									  float						_mipLodBias,
+									  vk::VkBool32				_anisotropyEnable,
+									  float						_maxAnisotropy,
+									  vk::VkBool32				_compareEnable,
+									  vk::VkCompareOp			_compareOp,
+									  float						_minLod,
+									  float						_maxLod,
+									  vk::VkBorderColor			_borderColor,
+									  vk::VkBool32				_unnormalizedCoordinates)
+{
+	sType					= vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+	pNext					= DE_NULL;
+	flags					= 0u;
+	magFilter				= _magFilter;
+	minFilter				= _minFilter;
+	mipmapMode				= _mipmapMode;
+	addressModeU			= _addressModeU;
+	addressModeV			= _addressModeV;
+	addressModeW			= _addressModeW;
+	mipLodBias				= _mipLodBias;
+	anisotropyEnable		= _anisotropyEnable;
+	maxAnisotropy			= _maxAnisotropy;
+	compareEnable			= _compareEnable;
+	compareOp				= _compareOp;
+	minLod					= _minLod;
+	maxLod					= _maxLod;
+	borderColor				= _borderColor;
+	unnormalizedCoordinates = _unnormalizedCoordinates;
+}
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.hpp
new file mode 100644
index 0000000..cd61d66
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateCreateInfoUtil.hpp
@@ -0,0 +1,522 @@
+#ifndef _VKTDYNAMICSTATECREATEINFOUTIL_HPP
+#define _VKTDYNAMICSTATECREATEINFOUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuVector.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include <vector>
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class ImageSubresourceRange : public vk::VkImageSubresourceRange
+{
+public:
+	ImageSubresourceRange		(vk::VkImageAspectFlags	aspectMask,
+								 deUint32				baseMipLevel	= 0,
+								 deUint32				levelCount		= 1,
+								 deUint32				baseArrayLayer	= 0,
+								 deUint32				layerCount		= 1);
+};
+
+class ComponentMapping : public vk::VkComponentMapping
+{
+public:
+	ComponentMapping			(vk::VkComponentSwizzle r = vk::VK_COMPONENT_SWIZZLE_R,
+								 vk::VkComponentSwizzle g = vk::VK_COMPONENT_SWIZZLE_G,
+								 vk::VkComponentSwizzle b = vk::VK_COMPONENT_SWIZZLE_B,
+								 vk::VkComponentSwizzle a = vk::VK_COMPONENT_SWIZZLE_A);
+};
+
+class ImageViewCreateInfo : public vk::VkImageViewCreateInfo
+{
+public:
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkImageSubresourceRange&	subresourceRange,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+};
+
+class BufferViewCreateInfo : public vk::VkBufferViewCreateInfo
+{
+public:
+	BufferViewCreateInfo		 (vk::VkBuffer		buffer,
+								  vk::VkFormat		format,
+								  vk::VkDeviceSize	offset,
+								  vk::VkDeviceSize	range);
+};
+
+class BufferCreateInfo : public vk::VkBufferCreateInfo
+{
+public:
+	BufferCreateInfo			(vk::VkDeviceSize			size,
+								 vk::VkBufferCreateFlags	usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkBufferCreateFlags	flags					= 0);
+
+	BufferCreateInfo			(const BufferCreateInfo&	other);
+	BufferCreateInfo& operator=	(const BufferCreateInfo&	other);
+
+private:
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class ImageCreateInfo : public vk::VkImageCreateInfo
+{
+public:
+	ImageCreateInfo				(vk::VkImageType			imageType,
+								 vk::VkFormat				format,
+								 vk::VkExtent3D				extent,
+								 deUint32					mipLevels,
+								 deUint32					arrayLayers,
+								 vk::VkSampleCountFlagBits	samples,
+								 vk::VkImageTiling			tiling,
+								 vk::VkImageUsageFlags		usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkImageCreateFlags		flags					= 0,
+								 vk::VkImageLayout			initialLayout			= vk::VK_IMAGE_LAYOUT_UNDEFINED);
+
+private:
+	ImageCreateInfo				(const ImageCreateInfo&		other);
+	ImageCreateInfo& operator=	(const ImageCreateInfo&		other);
+
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class FramebufferCreateInfo : public vk::VkFramebufferCreateInfo
+{
+public:
+	FramebufferCreateInfo		(vk::VkRenderPass						renderPass,
+								 const std::vector<vk::VkImageView>&	attachments,
+								 deUint32								width,
+								 deUint32								height,
+								 deUint32								layers);
+};
+
+class AttachmentDescription : public vk::VkAttachmentDescription
+{
+public:
+	AttachmentDescription	(vk::VkFormat				format,
+							 vk::VkSampleCountFlagBits	samples,
+							 vk::VkAttachmentLoadOp		loadOp,
+							 vk::VkAttachmentStoreOp	storeOp,
+							 vk::VkAttachmentLoadOp		stencilLoadOp,
+							 vk::VkAttachmentStoreOp	stencilStoreOp,
+							 vk::VkImageLayout			initialLayout,
+							 vk::VkImageLayout			finalLayout);
+
+	AttachmentDescription	(const vk::VkAttachmentDescription &);
+};
+
+class AttachmentReference : public vk::VkAttachmentReference
+{
+public:
+	AttachmentReference		(deUint32 attachment, vk::VkImageLayout layout);
+	AttachmentReference		(void);
+};
+
+class SubpassDescription : public vk::VkSubpassDescription
+{
+public:
+	SubpassDescription				(vk::VkPipelineBindPoint			pipelineBindPoint,
+									 vk::VkSubpassDescriptionFlags		flags,
+									 deUint32							inputAttachmentCount,
+									 const vk::VkAttachmentReference*	inputAttachments,
+									 deUint32							colorAttachmentCount,
+									 const vk::VkAttachmentReference*	colorAttachments,
+									 const vk::VkAttachmentReference*	resolveAttachments,
+									 vk::VkAttachmentReference			depthStencilAttachment,
+									 deUint32							preserveAttachmentCount,
+									 const deUint32*					preserveAttachments);
+
+	SubpassDescription				(const vk::VkSubpassDescription&	other);
+	SubpassDescription				(const SubpassDescription&			other);
+	SubpassDescription& operator=	(const SubpassDescription&			other);
+
+private:
+	std::vector<vk::VkAttachmentReference>	m_inputAttachments;
+	std::vector<vk::VkAttachmentReference>	m_colorAttachments;
+	std::vector<vk::VkAttachmentReference>	m_resolveAttachments;
+	std::vector<deUint32>					m_preserveAttachments;
+
+	vk::VkAttachmentReference				m_depthStencilAttachment;
+};
+
+class SubpassDependency : public vk::VkSubpassDependency
+{
+public:
+	SubpassDependency (	deUint32					srcSubpass,
+						deUint32					dstSubpass,
+						vk::VkPipelineStageFlags	srcStageMask,
+						vk::VkPipelineStageFlags	dstStageMask,
+						vk::VkAccessFlags			srcAccessMask,
+						vk::VkAccessFlags			dstAccessMask,
+						vk::VkDependencyFlags		dependencyFlags);
+
+	SubpassDependency (const vk::VkSubpassDependency& other);
+};
+
+class RenderPassCreateInfo : public vk::VkRenderPassCreateInfo
+{
+public:
+	RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+						  const std::vector<vk::VkSubpassDescription>&		subpasses,
+						  const std::vector<vk::VkSubpassDependency>&		dependiences		= std::vector<vk::VkSubpassDependency>());
+
+	RenderPassCreateInfo (deUint32											attachmentCount	= 0,
+						  const vk::VkAttachmentDescription*				pAttachments	= DE_NULL,
+						  deUint32											subpassCount	= 0,
+						  const vk::VkSubpassDescription*					pSubpasses		= DE_NULL,
+						  deUint32											dependencyCount	= 0,
+						  const vk::VkSubpassDependency*					pDependiences	= DE_NULL);
+
+	void addAttachment	(vk::VkAttachmentDescription						attachment);
+	void addSubpass		(vk::VkSubpassDescription							subpass);
+	void addDependency	(vk::VkSubpassDependency							dependency);
+
+private:
+	std::vector<AttachmentDescription>			m_attachments;
+	std::vector<SubpassDescription>				m_subpasses;
+	std::vector<SubpassDependency>				m_dependiences;
+
+	std::vector<vk::VkAttachmentDescription>	m_attachmentsStructs;
+	std::vector<vk::VkSubpassDescription>		m_subpassesStructs;
+	std::vector<vk::VkSubpassDependency>		m_dependiencesStructs;
+
+	RenderPassCreateInfo			(const RenderPassCreateInfo &other); //Not allowed!
+	RenderPassCreateInfo& operator= (const RenderPassCreateInfo &other); //Not allowed!
+};
+
+class RenderPassBeginInfo : public vk::VkRenderPassBeginInfo
+{
+public:
+	RenderPassBeginInfo (vk::VkRenderPass						renderPass,
+						 vk::VkFramebuffer						framebuffer,
+						 vk::VkRect2D							renderArea,
+						 const std::vector<vk::VkClearValue>&	clearValues = std::vector<vk::VkClearValue>());
+
+private:
+	std::vector<vk::VkClearValue> m_clearValues;
+
+	RenderPassBeginInfo				(const RenderPassBeginInfo&	other); //Not allowed!
+	RenderPassBeginInfo& operator=	(const RenderPassBeginInfo&	other); //Not allowed!
+};
+
+class CmdPoolCreateInfo : public vk::VkCommandPoolCreateInfo
+{
+public:
+	CmdPoolCreateInfo (deUint32						queueFamilyIndex,
+					   vk::VkCommandPoolCreateFlags flags				= vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+};
+
+class CmdBufferBeginInfo : public vk::VkCommandBufferBeginInfo
+{
+public:
+	CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags		flags					= 0);
+};
+
+class DescriptorPoolSize : public vk::VkDescriptorPoolSize
+{
+public:
+	DescriptorPoolSize (vk::VkDescriptorType _type, deUint32 _descriptorCount)
+	{
+		type			= _type;
+		descriptorCount = _descriptorCount;
+	}
+};
+
+class DescriptorPoolCreateInfo : public vk::VkDescriptorPoolCreateInfo
+{
+public:
+	DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+							  vk::VkDescriptorPoolCreateFlags				flags,
+							  deUint32										maxSets);
+
+	DescriptorPoolCreateInfo& addDescriptors (vk::VkDescriptorType type, deUint32 count);
+
+private:
+	std::vector<vk::VkDescriptorPoolSize> m_poolSizeCounts;
+};
+
+class DescriptorSetLayoutCreateInfo : public vk::VkDescriptorSetLayoutCreateInfo
+{
+public:
+	DescriptorSetLayoutCreateInfo (deUint32 bindingCount, const vk::VkDescriptorSetLayoutBinding* pBindings);
+};
+
+class PipelineLayoutCreateInfo : public vk::VkPipelineLayoutCreateInfo
+{
+public:
+	PipelineLayoutCreateInfo (deUint32										descriptorSetCount,
+							  const vk::VkDescriptorSetLayout*				pSetLayouts,
+							  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+	PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts				= std::vector<vk::VkDescriptorSetLayout>(),
+							  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+private:
+	std::vector<vk::VkDescriptorSetLayout>	m_setLayouts;
+	std::vector<vk::VkPushConstantRange>	m_pushConstantRanges;
+};
+
+class PipelineCreateInfo : public vk::VkGraphicsPipelineCreateInfo
+{
+public:
+	class VertexInputState : public vk::VkPipelineVertexInputStateCreateInfo
+	{
+	public:
+		VertexInputState (deUint32										vertexBindingDescriptionCount	= 0,
+						  const vk::VkVertexInputBindingDescription*	pVertexBindingDescriptions		= NULL,
+						  deUint32										vertexAttributeDescriptionCount	= 0,
+						  const vk::VkVertexInputAttributeDescription*	pVertexAttributeDescriptions	= NULL);
+	};
+
+	class InputAssemblerState : public vk::VkPipelineInputAssemblyStateCreateInfo
+	{
+	public:
+		InputAssemblerState (vk::VkPrimitiveTopology topology, vk::VkBool32 primitiveRestartEnable = false);
+	};
+
+	class TessellationState : public vk::VkPipelineTessellationStateCreateInfo
+	{
+	public:
+		TessellationState (deUint32 patchControlPoints = 0);
+	};
+
+	class ViewportState : public vk::VkPipelineViewportStateCreateInfo
+	{
+	public:
+		ViewportState				(deUint32						viewportCount,
+									 std::vector<vk::VkViewport>	viewports		= std::vector<vk::VkViewport>(0),
+									 std::vector<vk::VkRect2D>		scissors		= std::vector<vk::VkRect2D>(0));
+
+		ViewportState				(const ViewportState&			other);
+		ViewportState& operator=	(const ViewportState&			other);
+
+		std::vector<vk::VkViewport> m_viewports;
+		std::vector<vk::VkRect2D>	m_scissors;
+	};
+
+	class RasterizerState : public vk::VkPipelineRasterizationStateCreateInfo
+	{
+	public:
+		RasterizerState (vk::VkBool32			depthClampEnable		= false,
+						 vk::VkBool32			rasterizerDiscardEnable = false,
+						 vk::VkPolygonMode		polygonMode				= vk::VK_POLYGON_MODE_FILL,
+						 vk::VkCullModeFlags	cullMode				= vk::VK_CULL_MODE_NONE,
+						 vk::VkFrontFace		frontFace				= vk::VK_FRONT_FACE_CLOCKWISE,
+						 vk::VkBool32			depthBiasEnable			= true,
+						 float					depthBiasConstantFactor	= 0.0f,
+						 float					depthBiasClamp			= 0.0f,
+						 float					depthBiasSlopeFactor	= 0.0f,
+						 float					lineWidth				= 1.0f);
+	};
+
+	class MultiSampleState : public vk::VkPipelineMultisampleStateCreateInfo
+	{
+	public:
+		MultiSampleState			(vk::VkSampleCountFlagBits				rasterizationSamples		= vk::VK_SAMPLE_COUNT_1_BIT,
+									 vk::VkBool32							sampleShadingEnable			= false,
+									 float									minSampleShading			= 0.0f,
+									 const std::vector<vk::VkSampleMask>&	sampleMask					= std::vector<vk::VkSampleMask>(1, 0xffffffff),
+									 bool									alphaToCoverageEnable		= false,
+									 bool									alphaToOneEnable			= false);
+
+		MultiSampleState			(const MultiSampleState&				other);
+		MultiSampleState& operator= (const MultiSampleState&				other);
+
+	private:
+		std::vector<vk::VkSampleMask> m_sampleMask;
+	};
+
+	class ColorBlendState : public vk::VkPipelineColorBlendStateCreateInfo
+	{
+	public:
+		class Attachment : public vk::VkPipelineColorBlendAttachmentState
+		{
+		public:
+			Attachment (vk::VkBool32		blendEnable			= false,
+						vk::VkBlendFactor	srcColorBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstColorBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		colorBlendOp		= vk::VK_BLEND_OP_ADD,
+						vk::VkBlendFactor	srcAlphaBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstAlphaBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		alphaBlendOp		= vk::VK_BLEND_OP_ADD,
+						deUint8				colorWriteMask		= 0xff);
+		};
+
+		ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	attachments,
+						 vk::VkBool32													alphaToCoverageEnable	= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (deUint32														attachmentCount,
+						 const vk::VkPipelineColorBlendAttachmentState*					attachments,
+						 vk::VkBool32													logicOpEnable			= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo&					createInfo);
+		ColorBlendState (const ColorBlendState&											createInfo,
+						 std::vector<float>												blendConstants			= std::vector<float>(4));
+
+	private:
+		std::vector<vk::VkPipelineColorBlendAttachmentState> m_attachments;
+	};
+
+	class DepthStencilState : public vk::VkPipelineDepthStencilStateCreateInfo
+	{
+	public:
+		class StencilOpState : public vk::VkStencilOpState
+		{
+		public:
+			StencilOpState (vk::VkStencilOp failOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp passOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp depthFailOp				= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkCompareOp compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+							deUint32		compareMask				= 0xffffffffu,
+							deUint32		writeMask				= 0xffffffffu,
+							deUint32		reference				= 0);
+		};
+
+		DepthStencilState (vk::VkBool32		depthTestEnable			= false,
+						   vk::VkBool32		depthWriteEnable		= false,
+						   vk::VkCompareOp	depthCompareOp			= vk::VK_COMPARE_OP_ALWAYS,
+						   vk::VkBool32		depthBoundsTestEnable	= false,
+						   vk::VkBool32		stencilTestEnable		= false,
+						   StencilOpState	front					= StencilOpState(),
+						   StencilOpState	back					= StencilOpState(),
+						   float			minDepthBounds			= -1.0f,
+						   float			maxDepthBounds			= 1.0f);
+	};
+
+	class PipelineShaderStage : public vk::VkPipelineShaderStageCreateInfo
+	{
+	public:
+		PipelineShaderStage (vk::VkShaderModule shaderModule, const char* pName, vk::VkShaderStageFlagBits stage);
+	};
+
+	class DynamicState : public vk::VkPipelineDynamicStateCreateInfo
+	{
+	public:
+		DynamicState			(const std::vector<vk::VkDynamicState>& dynamicStates = std::vector<vk::VkDynamicState>(0));
+
+		DynamicState			(const DynamicState& other);
+		DynamicState& operator= (const DynamicState& other);
+
+		std::vector<vk::VkDynamicState> m_dynamicStates;
+	};
+
+	PipelineCreateInfo				(vk::VkPipelineLayout								layout,
+								     vk::VkRenderPass									renderPass,
+									 int												subpass,
+									 vk::VkPipelineCreateFlags							flags);
+
+	PipelineCreateInfo& addShader	(const vk::VkPipelineShaderStageCreateInfo&			shader);
+
+	PipelineCreateInfo& addState	(const vk::VkPipelineVertexInputStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineInputAssemblyStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineColorBlendStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineViewportStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDepthStencilStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineTessellationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineRasterizationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineMultisampleStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDynamicStateCreateInfo&		state);
+
+private:
+	std::vector<vk::VkPipelineShaderStageCreateInfo>		m_shaders;
+
+	vk::VkPipelineVertexInputStateCreateInfo				m_vertexInputState;
+	vk::VkPipelineInputAssemblyStateCreateInfo				m_inputAssemblyState;
+	std::vector<vk::VkPipelineColorBlendAttachmentState>	m_colorBlendStateAttachments;
+	vk::VkPipelineColorBlendStateCreateInfo					m_colorBlendState;
+	vk::VkPipelineViewportStateCreateInfo					m_viewportState;
+	vk::VkPipelineDepthStencilStateCreateInfo				m_dynamicDepthStencilState;
+	vk::VkPipelineTessellationStateCreateInfo				m_tessState;
+	vk::VkPipelineRasterizationStateCreateInfo				m_rasterState;
+	vk::VkPipelineMultisampleStateCreateInfo				m_multisampleState;
+	vk::VkPipelineDynamicStateCreateInfo					m_dynamicState;
+
+	std::vector<vk::VkDynamicState>							m_dynamicStates;
+
+	std::vector<vk::VkViewport>								m_viewports;
+	std::vector<vk::VkRect2D>								m_scissors;
+
+	std::vector<vk::VkSampleMask>							m_multisampleStateSampleMask;
+};
+
+class SamplerCreateInfo : public vk::VkSamplerCreateInfo
+{
+public:
+	SamplerCreateInfo (vk::VkFilter				magFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkFilter				minFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkSamplerMipmapMode	mipmapMode				= vk::VK_SAMPLER_MIPMAP_MODE_NEAREST,
+					   vk::VkSamplerAddressMode	addressU				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressV				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressW				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   float					mipLodBias				= 0.0f,
+					   vk::VkBool32				anisotropyEnable		= vk::VK_FALSE,
+					   float					maxAnisotropy			= 1.0f,
+					   vk::VkBool32				compareEnable			= false,
+					   vk::VkCompareOp			compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+					   float					minLod					= 0.0f,
+					   float					maxLod					= 16.0f,
+					   vk::VkBorderColor		borderColor				= vk::VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+					   vk::VkBool32				unnormalizedCoordinates	= false);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATECREATEINFOUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.cpp
new file mode 100644
index 0000000..752493e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.cpp
@@ -0,0 +1,868 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Depth Stencil Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateDSTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktDynamicStateTestCaseUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuCommandLine.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+#include "vkRefUtil.hpp"
+#include "vkImageUtil.hpp"
+
+#include "vktDynamicStateCreateInfoUtil.hpp"
+#include "vktDynamicStateImageObjectUtil.hpp"
+#include "vktDynamicStateBufferObjectUtil.hpp"
+#include "vkPrograms.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+namespace
+{
+
+class DepthStencilBaseCase : public TestInstance
+{
+public:
+	DepthStencilBaseCase (Context& context, const char* vertexShaderName, const char* fragmentShaderName)
+		: TestInstance						(context)
+		, m_colorAttachmentFormat			(vk::VK_FORMAT_R8G8B8A8_UNORM)
+		, m_topology						(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP)
+		, m_vk								(context.getDeviceInterface())
+		, m_vertexShaderName				(vertexShaderName)
+		, m_fragmentShaderName				(fragmentShaderName)
+	{
+	}
+
+protected:
+
+	enum
+	{
+		WIDTH   = 128,
+		HEIGHT  = 128
+	};
+
+	vk::VkFormat									m_colorAttachmentFormat;
+	vk::VkFormat									m_depthStencilAttachmentFormat;
+
+	vk::VkPrimitiveTopology							m_topology;
+
+	const vk::DeviceInterface&						m_vk;
+
+	vk::Move<vk::VkPipeline>						m_pipeline_1;
+	vk::Move<vk::VkPipeline>						m_pipeline_2;
+	vk::Move<vk::VkPipelineLayout>					m_pipelineLayout;
+
+	de::SharedPtr<Image>							m_colorTargetImage;
+	vk::Move<vk::VkImageView>						m_colorTargetView;
+
+	de::SharedPtr<Image>							m_depthStencilImage;
+	vk::Move<vk::VkImageView>						m_attachmentView;
+
+	PipelineCreateInfo::VertexInputState			m_vertexInputState;
+	de::SharedPtr<Buffer>							m_vertexBuffer;
+
+	vk::Move<vk::VkCommandPool>						m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>					m_cmdBuffer;
+
+	vk::Move<vk::VkFramebuffer>						m_framebuffer;
+	vk::Move<vk::VkRenderPass>						m_renderPass;
+
+	const std::string								m_vertexShaderName;
+	const std::string								m_fragmentShaderName;
+
+	std::vector<PositionColorVertex>				m_data;
+
+	PipelineCreateInfo::DepthStencilState			m_depthStencilState_1;
+	PipelineCreateInfo::DepthStencilState			m_depthStencilState_2;
+
+	void initialize (void)
+	{
+		const vk::VkDevice device = m_context.getDevice();
+
+		vk::VkFormatProperties formatProperties;
+		// check for VK_FORMAT_D24_UNORM_S8_UINT support
+		m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), vk::VK_FORMAT_D24_UNORM_S8_UINT, &formatProperties);
+		if (formatProperties.optimalTilingFeatures & vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
+		{
+			m_depthStencilAttachmentFormat = vk::VK_FORMAT_D24_UNORM_S8_UINT;
+		}
+		else
+		{
+			// check for VK_FORMAT_D32_SFLOAT_S8_UINT support
+			m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), vk::VK_FORMAT_D32_SFLOAT_S8_UINT, &formatProperties);
+			if (formatProperties.optimalTilingFeatures & vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
+			{
+				m_depthStencilAttachmentFormat = vk::VK_FORMAT_D32_SFLOAT_S8_UINT;
+			}
+			else
+				throw tcu::NotSupportedError("No valid depth stencil attachment available");
+		}
+
+		const PipelineLayoutCreateInfo pipelineLayoutCreateInfo;
+		m_pipelineLayout = vk::createPipelineLayout(m_vk, device, &pipelineLayoutCreateInfo);
+
+		const vk::Unique<vk::VkShaderModule> vs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> fs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+		const vk::VkExtent3D imageExtent = { WIDTH, HEIGHT, 1 };
+		const ImageCreateInfo targetImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_colorAttachmentFormat, imageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+													vk::VK_IMAGE_TILING_OPTIMAL, vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+		m_colorTargetImage = Image::createAndAlloc(m_vk, device, targetImageCreateInfo, m_context.getDefaultAllocator());
+
+		const ImageCreateInfo depthStencilImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_depthStencilAttachmentFormat, imageExtent,
+														  1, 1, vk::VK_SAMPLE_COUNT_1_BIT, vk::VK_IMAGE_TILING_OPTIMAL,
+														  vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
+
+		m_depthStencilImage = Image::createAndAlloc(m_vk, device, depthStencilImageCreateInfo, m_context.getDefaultAllocator());
+
+		const ImageViewCreateInfo colorTargetViewInfo(m_colorTargetImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_colorAttachmentFormat);
+		m_colorTargetView = vk::createImageView(m_vk, device, &colorTargetViewInfo);
+
+		const ImageViewCreateInfo attachmentViewInfo(m_depthStencilImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_depthStencilAttachmentFormat);
+		m_attachmentView = vk::createImageView(m_vk, device, &attachmentViewInfo);
+
+		RenderPassCreateInfo renderPassCreateInfo;
+		renderPassCreateInfo.addAttachment(AttachmentDescription(m_colorAttachmentFormat,
+																 vk::VK_SAMPLE_COUNT_1_BIT,
+																 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_IMAGE_LAYOUT_GENERAL,
+																 vk::VK_IMAGE_LAYOUT_GENERAL));
+
+		renderPassCreateInfo.addAttachment(AttachmentDescription(m_depthStencilAttachmentFormat,
+																 vk::VK_SAMPLE_COUNT_1_BIT,
+																 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL));
+
+		const vk::VkAttachmentReference colorAttachmentReference =
+		{
+			0,
+			vk::VK_IMAGE_LAYOUT_GENERAL
+		};
+
+		const vk::VkAttachmentReference depthAttachmentReference =
+		{
+			1,
+			vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
+		};
+
+		renderPassCreateInfo.addSubpass(SubpassDescription(
+			vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
+			0,
+			0,
+			DE_NULL,
+			1,
+			&colorAttachmentReference,
+			DE_NULL,
+			depthAttachmentReference,
+			0,
+			DE_NULL));
+
+		m_renderPass = vk::createRenderPass(m_vk, device, &renderPassCreateInfo);
+
+		const vk::VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0,
+			(deUint32)sizeof(tcu::Vec4) * 2,
+			vk::VK_VERTEX_INPUT_RATE_VERTEX,
+		};
+
+		const vk::VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,
+				0u,
+				vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+				0u
+			},
+			{
+				1u,
+				0u,
+				vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+				(deUint32)(sizeof(float)* 4),
+			}
+		};
+
+		m_vertexInputState = PipelineCreateInfo::VertexInputState(
+			1,
+			&vertexInputBindingDescription,
+			2,
+			vertexInputAttributeDescriptions);
+
+		const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+		PipelineCreateInfo pipelineCreateInfo_1(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo_1.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo_1.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo_1.addState(m_depthStencilState_1);
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::DynamicState());
+
+		PipelineCreateInfo pipelineCreateInfo_2(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo_2.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo_2.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo_2.addState(m_depthStencilState_2);
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::DynamicState());
+
+		m_pipeline_1 = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo_1);
+		m_pipeline_2 = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo_2);
+
+		std::vector<vk::VkImageView> attachments(2);
+		attachments[0] = *m_colorTargetView;
+		attachments[1] = *m_attachmentView;
+
+		const FramebufferCreateInfo framebufferCreateInfo(*m_renderPass, attachments, WIDTH, HEIGHT, 1);
+
+		m_framebuffer = vk::createFramebuffer(m_vk, device, &framebufferCreateInfo);
+
+		const vk::VkDeviceSize dataSize = m_data.size() * sizeof(PositionColorVertex);
+		m_vertexBuffer = Buffer::createAndAlloc(m_vk, device, BufferCreateInfo(dataSize, vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
+												m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+
+		deUint8* ptr = reinterpret_cast<unsigned char *>(m_vertexBuffer->getBoundMemory().getHostPtr());
+		deMemcpy(ptr, &m_data[0], (size_t)dataSize);
+
+		vk::flushMappedMemoryRange(m_vk, device,
+			m_vertexBuffer->getBoundMemory().getMemory(),
+			m_vertexBuffer->getBoundMemory().getOffset(),
+			sizeof(dataSize));
+
+		const CmdPoolCreateInfo cmdPoolCreateInfo(m_context.getUniversalQueueFamilyIndex());
+		m_cmdPool = vk::createCommandPool(m_vk, device, &cmdPoolCreateInfo);
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*m_cmdPool,											// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo);
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		DE_ASSERT(false);
+		return tcu::TestStatus::fail("Implement iterate() method!");
+	}
+
+	void beginRenderPass (void)
+	{
+		const vk::VkClearColorValue clearColor = { { 0.0f, 0.0f, 0.0f, 1.0f } };
+		beginRenderPassWithClearColor(clearColor);
+	}
+
+	void beginRenderPassWithClearColor (const vk::VkClearColorValue &clearColor)
+	{
+		const CmdBufferBeginInfo beginInfo;
+		m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo);
+
+		initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL);
+		initialTransitionDepthStencil2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+		const ImageSubresourceRange subresourceRangeImage(vk::VK_IMAGE_ASPECT_COLOR_BIT);
+		m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(),
+			vk::VK_IMAGE_LAYOUT_GENERAL, &clearColor, 1, &subresourceRangeImage);
+
+		const vk::VkClearDepthStencilValue depthStencilClearValue = { 0.0f, 0 };
+
+		const ImageSubresourceRange subresourceRangeDepthStencil[2] = { vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_ASPECT_STENCIL_BIT };
+		m_vk.cmdClearDepthStencilImage(*m_cmdBuffer, m_depthStencilImage->object(),
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &depthStencilClearValue, 2, subresourceRangeDepthStencil);
+
+		const vk::VkRect2D renderArea = { { 0, 0 }, { WIDTH, HEIGHT } };
+		const RenderPassBeginInfo renderPassBegin(*m_renderPass, *m_framebuffer, renderArea);
+
+		m_vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+	}
+
+	void setDynamicViewportState (const deUint32 width, const deUint32 height)
+	{
+		vk::VkViewport viewport;
+		viewport.x = 0;
+		viewport.y = 0;
+		viewport.width = static_cast<float>(width);
+		viewport.height = static_cast<float>(height);
+		viewport.minDepth = 0.0f;
+		viewport.maxDepth = 1.0f;
+
+		m_vk.cmdSetViewport(*m_cmdBuffer, 0, 1, &viewport);
+
+		vk::VkRect2D scissor;
+		scissor.offset.x = 0;
+		scissor.offset.y = 0;
+		scissor.extent.width = width;
+		scissor.extent.height = height;
+		m_vk.cmdSetScissor(*m_cmdBuffer, 0, 1, &scissor);
+	}
+
+	void setDynamicViewportState(const deUint32 viewportCount, const vk::VkViewport* pViewports, const vk::VkRect2D* pScissors)
+	{
+		m_vk.cmdSetViewport(*m_cmdBuffer, 0, viewportCount, pViewports);
+		m_vk.cmdSetScissor(*m_cmdBuffer, 0, viewportCount, pScissors);
+	}
+
+	void setDynamicRasterizationState(const float lineWidth = 1.0f,
+							   const float depthBiasConstantFactor = 0.0f,
+							   const float depthBiasClamp = 0.0f,
+							   const float depthBiasSlopeFactor = 0.0f)
+	{
+		m_vk.cmdSetLineWidth(*m_cmdBuffer, lineWidth);
+		m_vk.cmdSetDepthBias(*m_cmdBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+	}
+
+	void setDynamicBlendState(const float const1 = 0.0f, const float const2 = 0.0f,
+							  const float const3 = 0.0f, const float const4 = 0.0f)
+	{
+		float blendConstantsants[4] = { const1, const2, const3, const4 };
+		m_vk.cmdSetBlendConstants(*m_cmdBuffer, blendConstantsants);
+	}
+
+	void setDynamicDepthStencilState(const float minDepthBounds = -1.0f,
+									 const float maxDepthBounds = 1.0f,
+									 const deUint32 stencilFrontCompareMask = 0xffffffffu,
+									 const deUint32 stencilFrontWriteMask = 0xffffffffu,
+									 const deUint32 stencilFrontReference = 0,
+									 const deUint32 stencilBackCompareMask = 0xffffffffu,
+									 const deUint32 stencilBackWriteMask = 0xffffffffu,
+									 const deUint32 stencilBackReference = 0)
+	{
+		m_vk.cmdSetDepthBounds(*m_cmdBuffer, minDepthBounds, maxDepthBounds);
+		m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontCompareMask);
+		m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontWriteMask);
+		m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontReference);
+		m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackCompareMask);
+		m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackWriteMask);
+		m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackReference);
+	}
+};
+
+class DepthBoundsParamTestInstance : public DepthStencilBaseCase
+{
+public:
+	DepthBoundsParamTestInstance (Context &context, ShaderMap shaders)
+		: DepthStencilBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		// Check if depth bounds test is supported
+		{
+			const vk::VkPhysicalDeviceFeatures& deviceFeatures = m_context.getDeviceFeatures();
+
+			if (!deviceFeatures.depthBounds)
+				throw tcu::NotSupportedError("Depth bounds test is unsupported");
+		}
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 0.375f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.0f, 1.0f, 0.375f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 0.375f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.0f, -1.0f, 0.375f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.0f, 1.0f, 0.625f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 0.625f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.0f, -1.0f, 0.625f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 0.625f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		m_depthStencilState_1 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_TRUE, vk::VK_TRUE, vk::VK_COMPARE_OP_ALWAYS, vk::VK_FALSE);
+
+		// enable depth bounds test
+		m_depthStencilState_2 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_FALSE, vk::VK_FALSE, vk::VK_COMPARE_OP_NEVER, vk::VK_TRUE);
+
+		DepthStencilBaseCase::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState(0.5f, 0.75f);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_1);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 4, 0);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_2);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 8, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= 0.0f && xCoord <= 1.0f && yCoord >= -1.0f && yCoord <= 1.0f)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+					else
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+class StencilParamsBasicTestInstance : public DepthStencilBaseCase
+{
+protected:
+	deUint32 m_writeMask;
+	deUint32 m_readMask;
+	deUint32 m_expectedValue;
+	tcu::Vec4 m_expectedColor;
+
+public:
+	StencilParamsBasicTestInstance (Context& context, const char* vertexShaderName, const char* fragmentShaderName,
+									const deUint32 writeMask, const deUint32 readMask,
+									const deUint32 expectedValue, const tcu::Vec4 expectedColor)
+		: DepthStencilBaseCase  (context, vertexShaderName, fragmentShaderName)
+		, m_expectedColor		(1.0f, 1.0f, 1.0f, 1.0f)
+	{
+		m_writeMask = writeMask;
+		m_readMask = readMask;
+		m_expectedValue = expectedValue;
+		m_expectedColor = expectedColor;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState frontState_1 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_ALWAYS);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState backState_1 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_ALWAYS);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState frontState_2 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_EQUAL);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState backState_2 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_EQUAL);
+
+		// enable stencil test
+		m_depthStencilState_1 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_FALSE, vk::VK_FALSE, vk::VK_COMPARE_OP_NEVER, vk::VK_FALSE, vk::VK_TRUE, frontState_1, backState_1);
+
+		m_depthStencilState_2 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_FALSE, vk::VK_FALSE, vk::VK_COMPARE_OP_NEVER, vk::VK_FALSE, vk::VK_TRUE, frontState_2, backState_2);
+
+		DepthStencilBaseCase::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_1);
+		setDynamicDepthStencilState(-1.0f, 1.0f, 0xFF, m_writeMask, 0x0F, 0xFF, m_writeMask, 0x0F);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_2);
+		setDynamicDepthStencilState(-1.0f, 1.0f, m_readMask, 0xFF, m_expectedValue, m_readMask, 0xFF, m_expectedValue);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 4, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= -1.0f && xCoord <= 1.0f && yCoord >= -1.0f && yCoord <= 1.0f)
+						referenceFrame.getLevel(0).setPixel(m_expectedColor, x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+class StencilParamsBasicTestCase : public TestCase
+{
+protected:
+	TestInstance* createInstance(Context& context) const
+	{
+		return new StencilParamsBasicTestInstance(context, "VertexFetch.vert", "VertexFetch.frag",
+			m_writeMask, m_readMask, m_expectedValue, m_expectedColor);
+	}
+
+	virtual void initPrograms(vk::SourceCollections& programCollection) const
+	{
+		programCollection.glslSources.add("VertexFetch.vert") <<
+			glu::VertexSource(ShaderSourceProvider::getSource(m_testCtx.getArchive(), "vulkan/dynamic_state/VertexFetch.vert"));
+
+		programCollection.glslSources.add("VertexFetch.frag") <<
+			glu::FragmentSource(ShaderSourceProvider::getSource(m_testCtx.getArchive(), "vulkan/dynamic_state/VertexFetch.frag"));
+	}
+
+	deUint32 m_writeMask;
+	deUint32 m_readMask;
+	deUint32 m_expectedValue;
+	tcu::Vec4 m_expectedColor;
+
+public:
+	StencilParamsBasicTestCase (tcu::TestContext& context, const char *name, const char *description,
+								const deUint32 writeMask, const deUint32 readMask,
+								const deUint32 expectedValue, const tcu::Vec4 expectedColor)
+		: TestCase				(context, name, description)
+		, m_writeMask			(writeMask)
+		, m_readMask			(readMask)
+		, m_expectedValue		(expectedValue)
+		, m_expectedColor		(expectedColor)
+	{
+	}
+};
+
+class StencilParamsAdvancedTestInstance : public DepthStencilBaseCase
+{
+public:
+	StencilParamsAdvancedTestInstance (Context& context, ShaderMap shaders)
+		: DepthStencilBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState frontState_1 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_ALWAYS);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState backState_1 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_ALWAYS);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState frontState_2 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_NOT_EQUAL);
+
+		const PipelineCreateInfo::DepthStencilState::StencilOpState backState_2 =
+			PipelineCreateInfo::DepthStencilState::StencilOpState(
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_STENCIL_OP_REPLACE,
+			vk::VK_COMPARE_OP_NOT_EQUAL);
+
+		// enable stencil test
+		m_depthStencilState_1 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_FALSE, vk::VK_FALSE, vk::VK_COMPARE_OP_NEVER, vk::VK_FALSE, vk::VK_TRUE, frontState_1, backState_1);
+
+		m_depthStencilState_2 = PipelineCreateInfo::DepthStencilState(
+			vk::VK_FALSE, vk::VK_FALSE, vk::VK_COMPARE_OP_NEVER, vk::VK_FALSE, vk::VK_TRUE, frontState_2, backState_2);
+
+		DepthStencilBaseCase::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_1);
+		setDynamicDepthStencilState(-1.0f, 1.0f, 0xFF, 0x0E, 0x0F, 0xFF, 0x0E, 0x0F);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline_2);
+		setDynamicDepthStencilState(-1.0f, 1.0f, 0xFF, 0xFF, 0x0E, 0xFF, 0xFF, 0x0E);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 4, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= -0.5f && xCoord <= 0.5f && yCoord >= -0.5f && yCoord <= 0.5f)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+					else
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+} //anonymous
+
+DynamicStateDSTests::DynamicStateDSTests (tcu::TestContext& testCtx)
+	: TestCaseGroup (testCtx, "ds_state", "Tests for depth stencil state")
+{
+	/* Left blank on purpose */
+}
+
+DynamicStateDSTests::~DynamicStateDSTests ()
+{
+}
+
+void DynamicStateDSTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/dynamic_state/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/dynamic_state/VertexFetch.frag";
+
+	addChild(new InstanceFactory<DepthBoundsParamTestInstance>(m_testCtx, "depth_bounds", "Perform depth bounds test", shaderPaths));
+	addChild(new StencilParamsBasicTestCase(m_testCtx, "stencil_params_basic_1", "Perform basic stencil test 1", 0x0D, 0x06, 0x05, tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f)));
+	addChild(new StencilParamsBasicTestCase(m_testCtx, "stencil_params_basic_2", "Perform basic stencil test 2", 0x06, 0x02, 0x05, tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f)));
+	addChild(new InstanceFactory<StencilParamsAdvancedTestInstance>(m_testCtx, "stencil_params_advanced", "Perform advanced stencil test", shaderPaths));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.hpp
new file mode 100644
index 0000000..e0652ad
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateDSTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTDYNAMICSTATEDSTESTS_HPP
+#define _VKTDYNAMICSTATEDSTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Depth Stencil Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateDSTests : public tcu::TestCaseGroup
+{
+public:
+							DynamicStateDSTests		(tcu::TestContext& testCtx);
+							~DynamicStateDSTests	(void);
+	void					init					(void);
+
+private:
+	DynamicStateDSTests								(const DynamicStateDSTests& other);
+	DynamicStateDSTests&	operator=				(const DynamicStateDSTests& other);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEDSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.cpp
new file mode 100644
index 0000000..7da5f06
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.cpp
@@ -0,0 +1,448 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests - General
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateGeneralTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+#include "vktDynamicStateTestCaseUtil.hpp"
+#include "vktDynamicStateBaseClass.hpp"
+#include "vktDynamicStateCreateInfoUtil.hpp"
+#include "vktDynamicStateImageObjectUtil.hpp"
+#include "vktDynamicStateBufferObjectUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuRGBA.hpp"
+
+#include "vkDefs.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+namespace
+{
+
+class StateSwitchTestInstance : public DynamicStateBaseClass
+{
+public:
+	StateSwitchTestInstance (Context &context, ShaderMap shaders)
+		: DynamicStateBaseClass (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_topology = vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog& log		= m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// bind states here
+		vk::VkViewport viewport = { 0, 0, (float)WIDTH, (float)HEIGHT, 0.0f, 0.0f };
+		vk::VkRect2D scissor_1	= { { 0, 0 }, { WIDTH / 2, HEIGHT / 2 } };
+		vk::VkRect2D scissor_2	= { { WIDTH / 2, HEIGHT / 2 }, { WIDTH / 2, HEIGHT / 2 } };
+
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset	= 0;
+		const vk::VkBuffer vertexBuffer				= m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		// bind first state
+		setDynamicViewportState(1, &viewport, &scissor_1);
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		// bind second state
+		setDynamicViewportState(1, &viewport, &scissor_2);
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		//validation
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+
+		tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+		referenceFrame.allocLevel(0);
+
+		const deInt32 frameWidth	= referenceFrame.getWidth();
+		const deInt32 frameHeight	= referenceFrame.getHeight();
+
+		tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+		for (int y = 0; y < frameHeight; y++)
+		{
+			const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+			for (int x = 0; x < frameWidth; x++)
+			{
+				const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+				if ((yCoord >= -1.0f && yCoord <= 0.0f && xCoord >= -1.0f && xCoord <= 0.0f) ||
+					(yCoord > 0.0f && yCoord <= 1.0f && xCoord > 0.0f && xCoord < 1.0f))
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+			}
+		}
+
+		const vk::VkOffset3D zeroOffset					= { 0, 0, 0 };
+		const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+																						  vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT,
+																						  vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+		if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+			referenceFrame.getLevel(0), renderedFrame, 0.05f,
+			tcu::COMPARE_LOG_RESULT))
+		{
+
+			return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+		}
+
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+	}
+};
+
+class BindOrderTestInstance : public DynamicStateBaseClass
+{
+public:
+	BindOrderTestInstance (Context& context, ShaderMap shaders)
+		: DynamicStateBaseClass (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_topology = vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log		= m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// bind states here
+		vk::VkViewport viewport = { 0.0f, 0.0f, (float)WIDTH, (float)HEIGHT, 0.0f, 0.0f };
+		vk::VkRect2D scissor_1	= { { 0, 0 }, { WIDTH / 2, HEIGHT / 2 } };
+		vk::VkRect2D scissor_2	= { { WIDTH / 2, HEIGHT / 2 }, { WIDTH / 2, HEIGHT / 2 } };
+
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+		setDynamicViewportState(1, &viewport, &scissor_1);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		// rebind in different order
+		setDynamicBlendState();
+		setDynamicRasterizationState();
+		setDynamicDepthStencilState();
+
+		// bind first state
+		setDynamicViewportState(1, &viewport, &scissor_1);
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		setDynamicViewportState(1, &viewport, &scissor_2);
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		//validation
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+
+		tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+		referenceFrame.allocLevel(0);
+
+		const deInt32 frameWidth = referenceFrame.getWidth();
+		const deInt32 frameHeight = referenceFrame.getHeight();
+
+		tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+		for (int y = 0; y < frameHeight; y++)
+		{
+			const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+			for (int x = 0; x < frameWidth; x++)
+			{
+				const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+				if ((yCoord >= -1.0f && yCoord <= 0.0f && xCoord >= -1.0f && xCoord <= 0.0f) ||
+					(yCoord > 0.0f && yCoord <= 1.0f && xCoord > 0.0f && xCoord < 1.0f))
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+			}
+		}
+
+		const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+		const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+			vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+		if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+			referenceFrame.getLevel(0), renderedFrame, 0.05f,
+			tcu::COMPARE_LOG_RESULT))
+		{
+			return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+		}
+
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+	}
+};
+
+class StatePersistenceTestInstance : public DynamicStateBaseClass
+{
+protected:
+	vk::Move<vk::VkPipeline> m_pipelineAdditional;
+
+public:
+	StatePersistenceTestInstance (Context& context, ShaderMap shaders)
+		: DynamicStateBaseClass (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 1.0f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+	virtual void initPipeline (const vk::VkDevice device)
+	{
+		// shaders
+		const vk::Unique<vk::VkShaderModule> vs (createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> fs (createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+		const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+		PipelineCreateInfo pipelineCreateInfo_1(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo_1.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo_1.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::InputAssemblerState(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::DepthStencilState());
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo_1.addState(PipelineCreateInfo::DynamicState());
+
+		PipelineCreateInfo pipelineCreateInfo_2(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo_2.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo_2.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::InputAssemblerState(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::DepthStencilState());
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo_2.addState(PipelineCreateInfo::DynamicState());
+
+		m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo_1);
+		m_pipelineAdditional = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo_2);
+	}
+
+	virtual tcu::TestStatus iterate(void)
+	{
+		tcu::TestLog &log				= m_context.getTestContext().getLog();
+		const vk::VkQueue queue			= m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// bind states here
+		const vk::VkViewport viewport	= { 0.0f, 0.0f, (float)WIDTH, (float)HEIGHT, 0.0f, 0.0f };
+		const vk::VkRect2D scissor_1	= { { 0, 0 }, { WIDTH / 2, HEIGHT / 2 } };
+		const vk::VkRect2D scissor_2	= { { WIDTH / 2, HEIGHT / 2 }, { WIDTH / 2, HEIGHT / 2 } };
+
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		// bind first state
+		setDynamicViewportState(1, &viewport, &scissor_1);
+		// draw quad using vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineAdditional);
+
+		// bind second state
+		setDynamicViewportState(1, &viewport, &scissor_2);
+		// draw quad using vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST
+		m_vk.cmdDraw(*m_cmdBuffer, 6, 1, 4, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		//validation
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+
+		tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+		referenceFrame.allocLevel(0);
+
+		const deInt32 frameWidth	= referenceFrame.getWidth();
+		const deInt32 frameHeight	= referenceFrame.getHeight();
+
+		tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+		for (int y = 0; y < frameHeight; y++)
+		{
+			const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+			for (int x = 0; x < frameWidth; x++)
+			{
+				const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+				if (yCoord >= -1.0f && yCoord <= 0.0f && xCoord >= -1.0f && xCoord <= 0.0f)
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+				else if (yCoord > 0.0f && yCoord <= 1.0f && xCoord > 0.0f && xCoord < 1.0f)
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+			}
+		}
+
+		const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+		const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+			vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+		if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+			referenceFrame.getLevel(0), renderedFrame, 0.05f,
+			tcu::COMPARE_LOG_RESULT))
+		{
+			return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+		}
+
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+	}
+};
+
+} //anonymous
+
+DynamicStateGeneralTests::DynamicStateGeneralTests (tcu::TestContext& testCtx)
+	: TestCaseGroup (testCtx, "general_state", "General tests for dynamic states")
+{
+	/* Left blank on purpose */
+}
+
+DynamicStateGeneralTests::~DynamicStateGeneralTests (void) {}
+
+void DynamicStateGeneralTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/dynamic_state/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/dynamic_state/VertexFetch.frag";
+
+	addChild(new InstanceFactory<StateSwitchTestInstance>(m_testCtx, "state_switch", "Perform multiple draws with different VP states (scissor test)", shaderPaths));
+	addChild(new InstanceFactory<BindOrderTestInstance>(m_testCtx, "bind_order", "Check if binding order is not important for pipeline configuration", shaderPaths));
+	addChild(new InstanceFactory<StatePersistenceTestInstance>(m_testCtx, "state_persistence", "Check if bound states are persistent across pipelines", shaderPaths));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.hpp
new file mode 100644
index 0000000..da0ca99
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateGeneralTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTDYNAMICSTATEGENERALTESTS_HPP
+#define _VKTDYNAMICSTATEGENERALTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests - General
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateGeneralTests : public tcu::TestCaseGroup
+{
+public:
+								DynamicStateGeneralTests	(tcu::TestContext& testCtx);
+								~DynamicStateGeneralTests	(void);
+	void						init						(void);
+
+private:
+	DynamicStateGeneralTests								(const DynamicStateGeneralTests& other);
+	DynamicStateGeneralTests&	operator=					(const DynamicStateGeneralTests& other);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEGENERALTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.cpp
new file mode 100644
index 0000000..ba3ee95
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.cpp
@@ -0,0 +1,939 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateImageObjectUtil.hpp"
+
+#include "tcuSurface.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vktDynamicStateCreateInfoUtil.hpp"
+#include "vktDynamicStateBufferObjectUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+void MemoryOp::pack (int				pixelSize,
+					 int				width,
+					 int				height,
+					 int				depth,
+					 vk::VkDeviceSize	rowPitchOrZero,
+					 vk::VkDeviceSize	depthPitchOrZero,
+					 const void *		srcBuffer,
+					 void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch	= depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const char *srcRow = reinterpret_cast<const char *>(srcBuffer);
+	const char *srcStart;
+	srcStart = srcRow;
+	char *dstRow = reinterpret_cast<char *>(destBuffer);
+	char *dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else
+	{
+		// slower, per row path
+		for (int d = 0; d < depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * depthPitch;
+			vk::VkDeviceSize offsetDepthSrc = d * (pixelSize * width * height);
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(rowPitch));
+				srcRow += pixelSize * width;
+				dstRow += rowPitch;
+			}
+		}
+	}
+}
+
+void MemoryOp::unpack (int					pixelSize,
+					   int					width,
+					   int					height,
+					   int					depth,
+					   vk::VkDeviceSize		rowPitchOrZero,
+					   vk::VkDeviceSize		depthPitchOrZero,
+					   const void *			srcBuffer,
+					   void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch = depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const char *srcRow = reinterpret_cast<const char *>(srcBuffer);
+	const char *srcStart;
+	srcStart = srcRow;
+	char *dstRow = reinterpret_cast<char *>(destBuffer);
+	char *dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else {
+		// slower, per row path
+		for (size_t d = 0; d < (size_t)depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * (pixelSize * width * height);
+			vk::VkDeviceSize offsetDepthSrc = d * depthPitch;
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(pixelSize * width));
+				srcRow += rowPitch;
+				dstRow += pixelSize * width;
+			}
+		}
+	}
+}
+
+Image::Image (const vk::DeviceInterface& vk,
+			  vk::VkDevice				device,
+			  vk::VkFormat				format,
+			  const vk::VkExtent3D&		extend,
+			  deUint32					levelCount,
+			  deUint32					layerCount,
+			  vk::Move<vk::VkImage>		object_)
+	: m_allocation		(DE_NULL)
+	, m_object			(object_)
+	, m_format			(format)
+	, m_extent			(extend)
+	, m_levelCount		(levelCount)
+	, m_layerCount		(layerCount)
+	, m_vk(vk)
+	, m_device(device)
+{
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface (vk::VkQueue					queue,
+												vk::Allocator&				allocator,
+												vk::VkImageLayout			layout,
+												vk::VkOffset3D				offset,
+												int							width,
+												int							height,
+												vk::VkImageAspectFlagBits	aspect,
+												unsigned int				mipLevel,
+												unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readVolume (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   vk::VkImageAspectFlagBits	aspect,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * depth * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, depth, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface1D(vk::VkQueue				queue,
+												 vk::Allocator&				allocator,
+												 vk::VkImageLayout			layout,
+												 vk::VkOffset3D				offset,
+												 int						width,
+												 vk::VkImageAspectFlagBits	aspect,
+												 unsigned int				mipLevel,
+												 unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect,
+		m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, 1, 1, m_pixelAccessData.data());
+}
+
+void Image::read (vk::VkQueue					queue,
+				  vk::Allocator&				allocator,
+				  vk::VkImageLayout				layout,
+				  vk::VkOffset3D				offset,
+				  int							width,
+				  int							height,
+				  int							depth,
+				  unsigned int					mipLevel,
+				  unsigned int					arrayElement,
+				  vk::VkImageAspectFlagBits		aspect,
+				  vk::VkImageType				type,
+				  void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource = copyToLinearImage(queue, allocator, layout, offset, width,
+															 height, depth, mipLevel, arrayElement, aspect, type);
+	const vk::VkOffset3D zeroOffset = {0, 0, 0};
+	stagingResource->readLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+}
+
+void Image::readUsingBuffer (vk::VkQueue				queue,
+							 vk::Allocator&				allocator,
+							 vk::VkImageLayout			layout,
+							 vk::VkOffset3D				offset,
+							 int						width,
+							 int						height,
+							 int						depth,
+							 unsigned int				mipLevel,
+							 unsigned int				arrayElement,
+							 vk::VkImageAspectFlagBits	aspect,
+							 void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);;
+
+	de::SharedPtr<Buffer> stagingResource;
+
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() * width * height * depth;
+
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+				break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		vk::VkBufferImageCopy region =
+		{
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyImageToBuffer(*copyCmdBuffer, object(), layout, stagingResource->object(), 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+
+	char* destPtr = reinterpret_cast<char*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(data, destPtr, static_cast<size_t>(bufferSize));
+}
+
+tcu::ConstPixelBufferAccess Image::readSurfaceLinear (vk::VkOffset3D				offset,
+													  int							width,
+													  int							height,
+													  int							depth,
+													  vk::VkImageAspectFlagBits		aspect,
+													  unsigned int					mipLevel,
+													  unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	readLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+void Image::readLinear (vk::VkOffset3D				offset,
+						int							width,
+						int							height,
+						int							depth,
+						unsigned int				mipLevel,
+						unsigned int				arrayElement,
+						vk::VkImageAspectFlagBits	aspect,
+						void *						data)
+{
+	vk::VkImageSubresource imageSubResource = { aspect, mipLevel, arrayElement };
+
+	vk::VkSubresourceLayout imageLayout;
+	deMemset(&imageLayout, 0, sizeof(imageLayout));
+
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource, &imageLayout);
+
+	const char* srcPtr = reinterpret_cast<const char*>(getBoundMemory().getHostPtr());
+	srcPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::unpack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, srcPtr, data);
+}
+
+de::SharedPtr<Image> Image::copyToLinearImage (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement,
+											   vk::VkImageAspectFlagBits	aspect,
+											   vk::VkImageType				type)
+{
+	de::SharedPtr<Image> stagingResource;
+	{
+		vk::VkExtent3D stagingExtent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+		ImageCreateInfo stagingResourceCreateInfo(type, m_format, stagingExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+												  vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+
+		stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+												vk::MemoryRequirement::HostVisible);
+
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+		vk::VkImageCopy region = { {aspect, mipLevel, arrayElement, 1}, offset, {aspect, 0, 0, 1}, zeroOffset, {(deUint32)width, (deUint32)height, (deUint32)depth} };
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, object(), layout, stagingResource->object(), vk::VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+	return stagingResource;
+}
+
+void Image::uploadVolume(const tcu::ConstPixelBufferAccess&	access,
+						 vk::VkQueue						queue,
+						 vk::Allocator&						allocator,
+						 vk::VkImageLayout					layout,
+						 vk::VkOffset3D						offset,
+						 vk::VkImageAspectFlagBits			aspect,
+						 unsigned int						mipLevel,
+						 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface (const tcu::ConstPixelBufferAccess&	access,
+						   vk::VkQueue							queue,
+						   vk::Allocator&						allocator,
+						   vk::VkImageLayout					layout,
+						   vk::VkOffset3D						offset,
+						   vk::VkImageAspectFlagBits			aspect,
+						   unsigned int							mipLevel,
+						   unsigned int							arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface1D (const tcu::ConstPixelBufferAccess&	access,
+							 vk::VkQueue						queue,
+							 vk::Allocator&						allocator,
+							 vk::VkImageLayout					layout,
+							 vk::VkOffset3D						offset,
+							 vk::VkImageAspectFlagBits			aspect,
+							 unsigned int						mipLevel,
+							 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurfaceLinear (const tcu::ConstPixelBufferAccess&	access,
+								 vk::VkOffset3D						offset,
+								 int								width,
+								 int								height,
+								 int								depth,
+								 vk::VkImageAspectFlagBits			aspect,
+								 unsigned int						mipLevel,
+								 unsigned int						arrayElement)
+{
+	uploadLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, access.getDataPtr());
+}
+
+void Image::upload (vk::VkQueue					queue,
+					vk::Allocator&				allocator,
+					vk::VkImageLayout			layout,
+					vk::VkOffset3D				offset,
+					int							width,
+					int							height,
+					int							depth,
+					unsigned int				mipLevel,
+					unsigned int				arrayElement,
+					vk::VkImageAspectFlagBits	aspect,
+					vk::VkImageType				type,
+					const void *				data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource;
+	vk::VkExtent3D extent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+	ImageCreateInfo stagingResourceCreateInfo(
+		type, m_format, extent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+		vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+	stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+								vk::MemoryRequirement::HostVisible);
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	stagingResource->uploadLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		vk::VkImageCopy region = {{aspect, 0, 0, 1},
+									zeroOffset,
+									{aspect, mipLevel, arrayElement, 1},
+									offset,
+									{(deUint32)width, (deUint32)height, (deUint32)depth}};
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, stagingResource->object(),
+								vk::VK_IMAGE_LAYOUT_GENERAL, object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadUsingBuffer (vk::VkQueue					queue,
+							   vk::Allocator&				allocator,
+							   vk::VkImageLayout			layout,
+							   vk::VkOffset3D				offset,
+							   int							width,
+							   int							height,
+							   int							depth,
+							   unsigned int					mipLevel,
+							   unsigned int					arrayElement,
+							   vk::VkImageAspectFlagBits	aspect,
+							   const void *					data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Buffer> stagingResource;
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() *width*height*depth;
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+				break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+	char* destPtr = reinterpret_cast<char*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(destPtr, data, static_cast<size_t>(bufferSize));
+	vk::flushMappedMemoryRange(m_vk, m_device, stagingResource->getBoundMemory().getMemory(), stagingResource->getBoundMemory().getOffset(), bufferSize);
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		vk::VkBufferImageCopy region = {
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyBufferToImage(*copyCmdBuffer, stagingResource->object(),
+			object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadLinear (vk::VkOffset3D			offset,
+						  int						width,
+						  int						height,
+						  int						depth,
+						  unsigned int				mipLevel,
+						  unsigned int				arrayElement,
+						  vk::VkImageAspectFlagBits	aspect,
+						  const void *				data)
+{
+	vk::VkSubresourceLayout imageLayout;
+
+	vk::VkImageSubresource imageSubResource = {aspect, mipLevel, arrayElement};
+
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource,
+													&imageLayout);
+
+	char* destPtr = reinterpret_cast<char*>(getBoundMemory().getHostPtr());
+
+	destPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::pack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, data, destPtr);
+}
+
+vk::VkDeviceSize Image::getPixelOffset (vk::VkOffset3D		offset,
+										vk::VkDeviceSize	rowPitch,
+										vk::VkDeviceSize	depthPitch,
+										unsigned int		level,
+										unsigned int		layer)
+{
+	DE_ASSERT(level < m_levelCount);
+	DE_ASSERT(layer < m_layerCount);
+
+	vk::VkDeviceSize mipLevelSizes[32];
+	vk::VkDeviceSize mipLevelRectSizes[32];
+	tcu::IVec3 mipExtend
+	= tcu::IVec3(m_extent.width, m_extent.height, m_extent.depth);
+
+	vk::VkDeviceSize arrayElemSize = 0;
+	for (unsigned int i = 0; i < m_levelCount && (mipExtend[0] > 1 || mipExtend[1] > 1 || mipExtend[2] > 1); ++i)
+	{
+		// Rect size is just a 3D image size;
+		mipLevelSizes[i] = mipExtend[2] * depthPitch;
+
+		arrayElemSize += mipLevelSizes[0];
+
+		mipExtend = tcu::max(mipExtend / 2, tcu::IVec3(1));
+	}
+
+	vk::VkDeviceSize pixelOffset = layer * arrayElemSize;
+	for (size_t i = 0; i < level; ++i) {
+		pixelOffset += mipLevelSizes[i];
+	}
+	pixelOffset += offset.z * mipLevelRectSizes[level];
+	pixelOffset += offset.y * rowPitch;
+	pixelOffset += offset.x;
+
+	return pixelOffset;
+}
+
+void Image::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindImageMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Image> Image::createAndAlloc(const vk::DeviceInterface&	vk,
+										   vk::VkDevice					device,
+										   const vk::VkImageCreateInfo& createInfo,
+										   vk::Allocator&				allocator,
+										   vk::MemoryRequirement		memoryRequirement)
+{
+	de::SharedPtr<Image> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements imageRequirements = vk::getImageMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(imageRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Image> Image::create(const vk::DeviceInterface&	vk,
+								   vk::VkDevice					device,
+								   const vk::VkImageCreateInfo	&createInfo)
+{
+	return de::SharedPtr<Image>(new Image(vk, device, createInfo.format, createInfo.extent,
+								createInfo.mipLevels, createInfo.arrayLayers,
+								vk::createImage(vk, device, &createInfo)));
+}
+
+void transition2DImage (const vk::DeviceInterface&	vk,
+						vk::VkCommandBuffer				cmdBuffer,
+						vk::VkImage					image,
+						vk::VkImageAspectFlags		aspectMask,
+						vk::VkImageLayout			oldLayout,
+						vk::VkImageLayout			newLayout)
+{
+	vk::VkImageMemoryBarrier barrier;
+	barrier.sType					= vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+	barrier.pNext					= DE_NULL;
+	barrier.srcAccessMask				= 0;
+	barrier.dstAccessMask				= 0;
+	barrier.oldLayout				= oldLayout;
+	barrier.newLayout				= newLayout;
+	barrier.srcQueueFamilyIndex		= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.dstQueueFamilyIndex	= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.image					= image;
+	barrier.subresourceRange.aspectMask		= aspectMask;
+	barrier.subresourceRange.baseMipLevel	= 0;
+	barrier.subresourceRange.levelCount		= 1;
+	barrier.subresourceRange.baseArrayLayer = 0;
+	barrier.subresourceRange.layerCount		= 1;
+
+	vk.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+						  0, (const vk::VkMemoryBarrier*)DE_NULL,
+						  0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+						  1, &barrier);
+}
+
+void initialTransitionColor2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT | vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.hpp
new file mode 100644
index 0000000..fbf78ae
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateImageObjectUtil.hpp
@@ -0,0 +1,288 @@
+#ifndef _VKTDYNAMICSTATEIMAGEOBJECTUTIL_HPP
+#define _VKTDYNAMICSTATEIMAGEOBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include "tcuTexture.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class MemoryOp
+{
+public:
+	static void pack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+
+	static void unpack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+};
+
+class Image
+{
+public:
+	static de::SharedPtr<Image> create				(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkImageCreateInfo& createInfo);
+
+	static de::SharedPtr<Image> createAndAlloc		(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 const vk::VkImageCreateInfo&			createInfo,
+													 vk::Allocator&							allocator,
+													 vk::MemoryRequirement					memoryRequirement = vk::MemoryRequirement::Any);
+
+	tcu::ConstPixelBufferAccess readSurface			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurface1D		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readVolume			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurfaceLinear	(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						read				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 void *									data);
+
+	void						readUsingBuffer		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						readLinear			(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						uploadVolume		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurface		 (const tcu::ConstPixelBufferAccess&	access,
+														vk::VkQueue							queue,
+														vk::Allocator&						allocator,
+														vk::VkImageLayout					layout,
+														vk::VkOffset3D						offset,
+														vk::VkImageAspectFlagBits			aspect,
+														unsigned int						mipLevel = 0,
+														unsigned int						arrayElement = 0);
+
+	void						uploadSurface1D		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurfaceLinear	(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						upload				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 const void *							data);
+
+	void						uploadUsingBuffer	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	void						uploadLinear		(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	de::SharedPtr<Image>		copyToLinearImage	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type);
+
+	const vk::VkFormat&			getFormat			(void) const											{ return m_format;		}
+	vk::VkImage					object				(void) const											{ return *m_object;		}
+	void						bindMemory			(de::MovePtr<vk::Allocation>			allocation);
+	vk::Allocation				getBoundMemory		(void) const											{ return *m_allocation; }
+
+private:
+	vk::VkDeviceSize			getPixelOffset		(vk::VkOffset3D							offset,
+													 vk::VkDeviceSize						rowPitch,
+													 vk::VkDeviceSize						depthPitch,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement);
+
+								Image				(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 vk::VkFormat							format,
+													 const vk::VkExtent3D&					extend,
+													 deUint32								levelCount,
+													 deUint32								layerCount,
+													 vk::Move<vk::VkImage>					object);
+
+	Image											(const Image& other);	// Not allowed!
+	Image&						operator=			(const Image& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>	m_allocation;
+	vk::Unique<vk::VkImage>		m_object;
+
+	vk::VkFormat				m_format;
+	vk::VkExtent3D				m_extent;
+	deUint32					m_levelCount;
+	deUint32					m_layerCount;
+
+	std::vector<deUint8>		m_pixelAccessData;
+
+	const vk::DeviceInterface&	m_vk;
+	vk::VkDevice				m_device;
+};
+
+void transition2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageAspectFlags aspectMask, vk::VkImageLayout oldLayout, vk::VkImageLayout newLayout);
+
+void initialTransitionColor2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEIMAGEOBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.cpp
new file mode 100644
index 0000000..61a663f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.cpp
@@ -0,0 +1,721 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic Raster State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateRSTests.hpp"
+
+#include "vktDynamicStateBaseClass.hpp"
+#include "vktDynamicStateTestCaseUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuRGBA.hpp"
+
+#include "deMath.h"
+
+namespace vkt
+{
+namespace DynamicState
+{
+namespace
+{
+
+class DepthBiasBaseCase : public TestInstance
+{
+public:
+	DepthBiasBaseCase (Context& context, const char* vertexShaderName, const char* fragmentShaderName)
+		: TestInstance						(context)
+		, m_colorAttachmentFormat			(vk::VK_FORMAT_R8G8B8A8_UNORM)
+		, m_topology						(vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP)
+		, m_vk								(context.getDeviceInterface())
+		, m_vertexShaderName				(vertexShaderName)
+		, m_fragmentShaderName				(fragmentShaderName)
+	{
+	}
+
+protected:
+
+	enum
+	{
+		WIDTH	= 128,
+		HEIGHT	= 128
+	};
+
+	vk::VkFormat									m_colorAttachmentFormat;
+	vk::VkFormat									m_depthStencilAttachmentFormat;
+
+	vk::VkPrimitiveTopology							m_topology;
+
+	const vk::DeviceInterface&						m_vk;
+
+	vk::Move<vk::VkPipeline>						m_pipeline;
+	vk::Move<vk::VkPipelineLayout>					m_pipelineLayout;
+
+	de::SharedPtr<Image>							m_colorTargetImage;
+	vk::Move<vk::VkImageView>						m_colorTargetView;
+
+	de::SharedPtr<Image>							m_depthStencilImage;
+	vk::Move<vk::VkImageView>						m_attachmentView;
+
+	PipelineCreateInfo::VertexInputState			m_vertexInputState;
+	de::SharedPtr<Buffer>							m_vertexBuffer;
+
+	vk::Move<vk::VkCommandPool>						m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>					m_cmdBuffer;
+
+	vk::Move<vk::VkFramebuffer>						m_framebuffer;
+	vk::Move<vk::VkRenderPass>						m_renderPass;
+
+	std::string										m_vertexShaderName;
+	std::string										m_fragmentShaderName;
+
+	std::vector<PositionColorVertex>				m_data;
+
+	PipelineCreateInfo::DepthStencilState			m_depthStencilState;
+
+	void initialize (void)
+	{
+		const vk::VkDevice device	= m_context.getDevice();
+
+		vk::VkFormatProperties formatProperties;
+		// check for VK_FORMAT_D24_UNORM_S8_UINT support
+		m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), vk::VK_FORMAT_D24_UNORM_S8_UINT, &formatProperties);
+		if (formatProperties.optimalTilingFeatures & vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
+		{
+			m_depthStencilAttachmentFormat = vk::VK_FORMAT_D24_UNORM_S8_UINT;
+		}
+		else
+		{
+			// check for VK_FORMAT_D32_SFLOAT_S8_UINT support
+			m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), vk::VK_FORMAT_D32_SFLOAT_S8_UINT, &formatProperties);
+			if (formatProperties.optimalTilingFeatures & vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
+			{
+				m_depthStencilAttachmentFormat = vk::VK_FORMAT_D32_SFLOAT_S8_UINT;
+			}
+			else
+				throw tcu::NotSupportedError("No valid depth stencil attachment available");
+		}
+
+		const PipelineLayoutCreateInfo pipelineLayoutCreateInfo;
+		m_pipelineLayout			= vk::createPipelineLayout(m_vk, device, &pipelineLayoutCreateInfo);
+
+		const vk::Unique<vk::VkShaderModule> vs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> fs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+		const vk::VkExtent3D imageExtent = { WIDTH, HEIGHT, 1 };
+		ImageCreateInfo targetImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_colorAttachmentFormat, imageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT, vk::VK_IMAGE_TILING_OPTIMAL,
+											  vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+		m_colorTargetImage = Image::createAndAlloc(m_vk, device, targetImageCreateInfo, m_context.getDefaultAllocator());
+
+		const ImageCreateInfo depthStencilImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_depthStencilAttachmentFormat, imageExtent,
+														  1, 1, vk::VK_SAMPLE_COUNT_1_BIT, vk::VK_IMAGE_TILING_OPTIMAL,
+														  vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
+
+		m_depthStencilImage = Image::createAndAlloc(m_vk, device, depthStencilImageCreateInfo, m_context.getDefaultAllocator());
+
+		const ImageViewCreateInfo colorTargetViewInfo(m_colorTargetImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_colorAttachmentFormat);
+		m_colorTargetView = vk::createImageView(m_vk, device, &colorTargetViewInfo);
+
+		const ImageViewCreateInfo attachmentViewInfo(m_depthStencilImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_depthStencilAttachmentFormat);
+		m_attachmentView = vk::createImageView(m_vk, device, &attachmentViewInfo);
+
+		RenderPassCreateInfo renderPassCreateInfo;
+		renderPassCreateInfo.addAttachment(AttachmentDescription(m_colorAttachmentFormat,
+																 vk::VK_SAMPLE_COUNT_1_BIT,
+																 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_IMAGE_LAYOUT_GENERAL,
+																 vk::VK_IMAGE_LAYOUT_GENERAL));
+
+		renderPassCreateInfo.addAttachment(AttachmentDescription(m_depthStencilAttachmentFormat,
+																 vk::VK_SAMPLE_COUNT_1_BIT,
+																 vk::VK_ATTACHMENT_LOAD_OP_LOAD,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																 vk::VK_ATTACHMENT_STORE_OP_STORE,
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL));
+
+		const vk::VkAttachmentReference colorAttachmentReference =
+		{
+			0,
+			vk::VK_IMAGE_LAYOUT_GENERAL
+		};
+
+		const vk::VkAttachmentReference depthAttachmentReference =
+		{
+			1,
+			vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
+		};
+
+		renderPassCreateInfo.addSubpass(SubpassDescription(vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
+														   0,
+														   0,
+														   DE_NULL,
+														   1,
+														   &colorAttachmentReference,
+														   DE_NULL,
+														   depthAttachmentReference,
+														   0,
+														   DE_NULL));
+
+		m_renderPass = vk::createRenderPass(m_vk, device, &renderPassCreateInfo);
+
+		const vk::VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0,
+			(deUint32)sizeof(tcu::Vec4) * 2,
+			vk::VK_VERTEX_INPUT_RATE_VERTEX,
+		};
+
+		const vk::VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,
+				0u,
+				vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+				0u
+			},
+			{
+				1u,
+				0u,
+				vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+				(deUint32)(sizeof(float)* 4),
+			}
+		};
+
+		m_vertexInputState = PipelineCreateInfo::VertexInputState(1,
+																  &vertexInputBindingDescription,
+																  2,
+																  vertexInputAttributeDescriptions);
+
+		const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+		PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(1));
+		pipelineCreateInfo.addState(m_depthStencilState);
+		pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::DynamicState());
+
+		m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo);
+
+		std::vector<vk::VkImageView> attachments(2);
+		attachments[0] = *m_colorTargetView;
+		attachments[1] = *m_attachmentView;
+
+		const FramebufferCreateInfo framebufferCreateInfo(*m_renderPass, attachments, WIDTH, HEIGHT, 1);
+
+		m_framebuffer = vk::createFramebuffer(m_vk, device, &framebufferCreateInfo);
+
+		const vk::VkDeviceSize dataSize = m_data.size() * sizeof(PositionColorVertex);
+		m_vertexBuffer = Buffer::createAndAlloc(m_vk, device, BufferCreateInfo(dataSize,
+			vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT),
+			m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+
+		deUint8* ptr = reinterpret_cast<unsigned char *>(m_vertexBuffer->getBoundMemory().getHostPtr());
+		deMemcpy(ptr, &m_data[0], static_cast<size_t>(dataSize));
+
+		vk::flushMappedMemoryRange(m_vk, device,
+								   m_vertexBuffer->getBoundMemory().getMemory(),
+								   m_vertexBuffer->getBoundMemory().getOffset(),
+								   sizeof(dataSize));
+
+		const CmdPoolCreateInfo cmdPoolCreateInfo(m_context.getUniversalQueueFamilyIndex());
+		m_cmdPool = vk::createCommandPool(m_vk, device, &cmdPoolCreateInfo);
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*m_cmdPool,											// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		m_cmdBuffer = vk::allocateCommandBuffer(m_vk, device, &cmdBufferAllocateInfo);
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		DE_ASSERT(false);
+		return tcu::TestStatus::fail("Should reimplement iterate() method");
+	}
+
+	void beginRenderPass (void)
+	{
+		const vk::VkClearColorValue clearColor = { { 0.0f, 0.0f, 0.0f, 1.0f } };
+		beginRenderPassWithClearColor(clearColor);
+	}
+
+	void beginRenderPassWithClearColor (const vk::VkClearColorValue &clearColor)
+	{
+		const CmdBufferBeginInfo beginInfo;
+		m_vk.beginCommandBuffer(*m_cmdBuffer, &beginInfo);
+
+		initialTransitionColor2DImage(m_vk, *m_cmdBuffer, m_colorTargetImage->object(), vk::VK_IMAGE_LAYOUT_GENERAL);
+		initialTransitionDepthStencil2DImage(m_vk, *m_cmdBuffer, m_depthStencilImage->object(), vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
+
+		const ImageSubresourceRange subresourceRangeImage(vk::VK_IMAGE_ASPECT_COLOR_BIT);
+		m_vk.cmdClearColorImage(*m_cmdBuffer, m_colorTargetImage->object(),
+								vk::VK_IMAGE_LAYOUT_GENERAL, &clearColor, 1, &subresourceRangeImage);
+
+		const vk::VkClearDepthStencilValue depthStencilClearValue = { 0.0f, 0 };
+
+		const ImageSubresourceRange subresourceRangeDepthStencil[2] = { vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_ASPECT_STENCIL_BIT };
+		m_vk.cmdClearDepthStencilImage(*m_cmdBuffer, m_depthStencilImage->object(),
+									   vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &depthStencilClearValue, 2, subresourceRangeDepthStencil);
+
+		const vk::VkRect2D renderArea = { { 0, 0 }, { WIDTH, HEIGHT } };
+		const RenderPassBeginInfo renderPassBegin(*m_renderPass, *m_framebuffer, renderArea);
+
+		m_vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+	}
+
+	void setDynamicViewportState (const deUint32 width, const deUint32 height)
+	{
+		vk::VkViewport viewport;
+		viewport.x = 0;
+		viewport.y = 0;
+		viewport.width = static_cast<float>(width);
+		viewport.height = static_cast<float>(height);
+		viewport.minDepth = 0.0f;
+		viewport.maxDepth = 1.0f;
+
+		m_vk.cmdSetViewport(*m_cmdBuffer, 0, 1, &viewport);
+
+		vk::VkRect2D scissor;
+		scissor.offset.x = 0;
+		scissor.offset.y = 0;
+		scissor.extent.width = width;
+		scissor.extent.height = height;
+		m_vk.cmdSetScissor(*m_cmdBuffer, 0, 1, &scissor);
+	}
+
+	void setDynamicViewportState (const deUint32 viewportCount, const vk::VkViewport* pViewports, const vk::VkRect2D* pScissors)
+	{
+		m_vk.cmdSetViewport(*m_cmdBuffer, 0, viewportCount, pViewports);
+		m_vk.cmdSetScissor(*m_cmdBuffer, 0, viewportCount, pScissors);
+	}
+
+	void setDynamicRasterizationState (const float lineWidth = 1.0f,
+		const float depthBiasConstantFactor = 0.0f,
+		const float depthBiasClamp = 0.0f,
+		const float depthBiasSlopeFactor = 0.0f)
+	{
+		m_vk.cmdSetLineWidth(*m_cmdBuffer, lineWidth);
+		m_vk.cmdSetDepthBias(*m_cmdBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
+	}
+
+	void setDynamicBlendState (const float const1 = 0.0f, const float const2 = 0.0f,
+		const float const3 = 0.0f, const float const4 = 0.0f)
+	{
+		float blendConstantsants[4] = { const1, const2, const3, const4 };
+		m_vk.cmdSetBlendConstants(*m_cmdBuffer, blendConstantsants);
+	}
+
+	void setDynamicDepthStencilState (const float minDepthBounds = -1.0f, const float maxDepthBounds = 1.0f,
+		const deUint32 stencilFrontCompareMask = 0xffffffffu, const deUint32 stencilFrontWriteMask = 0xffffffffu,
+		const deUint32 stencilFrontReference = 0, const deUint32 stencilBackCompareMask = 0xffffffffu,
+		const deUint32 stencilBackWriteMask = 0xffffffffu, const deUint32 stencilBackReference = 0)
+	{
+		m_vk.cmdSetDepthBounds(*m_cmdBuffer, minDepthBounds, maxDepthBounds);
+		m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontCompareMask);
+		m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontWriteMask);
+		m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_FRONT_BIT, stencilFrontReference);
+		m_vk.cmdSetStencilCompareMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackCompareMask);
+		m_vk.cmdSetStencilWriteMask(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackWriteMask);
+		m_vk.cmdSetStencilReference(*m_cmdBuffer, vk::VK_STENCIL_FACE_BACK_BIT, stencilBackReference);
+	}
+};
+
+class DepthBiasParamTestInstance : public DepthBiasBaseCase
+{
+public:
+	DepthBiasParamTestInstance (Context& context, ShaderMap shaders)
+		: DepthBiasBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 0.5f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 0.5f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 0.5f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 0.5f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 0.5f, 1.0f), tcu::RGBA::red().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 0.5f, 1.0f), tcu::RGBA::red().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 0.5f, 1.0f), tcu::RGBA::red().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 0.5f, 1.0f), tcu::RGBA::red().toVec()));
+
+		// enable depth test
+		m_depthStencilState = PipelineCreateInfo::DepthStencilState(
+			vk::VK_TRUE, vk::VK_TRUE, vk::VK_COMPARE_OP_GREATER_OR_EQUAL);
+
+		DepthBiasBaseCase::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log		= m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset	= 0;
+		const vk::VkBuffer vertexBuffer				= m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		setDynamicRasterizationState(1.0f, 0.0f);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 4, 0);
+
+		setDynamicRasterizationState(1.0f, -1.0f);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 8, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= -0.5f && xCoord <= 0.5f && yCoord >= -0.5f && yCoord <= 0.5f)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+					else
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+class DepthBiasClampParamTestInstance : public DepthBiasBaseCase
+{
+public:
+	DepthBiasClampParamTestInstance (Context& context, ShaderMap shaders)
+		: DepthBiasBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f), tcu::RGBA::blue().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, 0.0f, 1.0f), tcu::RGBA::blue().toVec()));
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, 0.5f, 0.01f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, 0.5f, 0.01f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, -0.5f, 0.01f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, -0.5f, 0.01f, 1.0f), tcu::RGBA::green().toVec()));
+
+		// enable depth test
+		m_depthStencilState = PipelineCreateInfo::DepthStencilState(vk::VK_TRUE, vk::VK_TRUE, vk::VK_COMPARE_OP_GREATER_OR_EQUAL);
+
+		DepthBiasBaseCase::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		setDynamicRasterizationState(1.0f, 1000.0f, 0.005f);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 0, 0);
+
+		setDynamicRasterizationState(1.0f, 0.0f);
+		m_vk.cmdDraw(*m_cmdBuffer, 4, 1, 4, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth	= referenceFrame.getWidth();
+			const deInt32 frameHeight	= referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= -0.5f && xCoord <= 0.5f && yCoord >= -0.5f && yCoord <= 0.5f)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+					else
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset					= { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+class LineWidthParamTestInstance : public DynamicStateBaseClass
+{
+public:
+	LineWidthParamTestInstance (Context& context, ShaderMap shaders)
+		: DynamicStateBaseClass (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		// Check if line width test is supported
+		{
+			const vk::VkPhysicalDeviceFeatures& deviceFeatures = m_context.getDeviceFeatures();
+
+			if (!deviceFeatures.wideLines)
+				throw tcu::NotSupportedError("Line width test is unsupported");
+		}
+
+		m_topology = vk::VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 0.0f, 0.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log		= m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		vk::VkPhysicalDeviceProperties deviceProperties;
+		m_context.getInstanceInterface().getPhysicalDeviceProperties(m_context.getPhysicalDevice(), &deviceProperties);
+
+		setDynamicViewportState(WIDTH, HEIGHT);
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+		setDynamicRasterizationState(deFloatFloor(deviceProperties.limits.lineWidthRange[1]));
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset	= 0;
+		const vk::VkBuffer vertexBuffer				= m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+					float lineHalfWidth = (float)(deFloor(deviceProperties.limits.lineWidthRange[1]) / frameHeight);
+
+					if (xCoord >= -1.0f && xCoord <= 1.0f && yCoord >= -lineHalfWidth && yCoord <= lineHalfWidth)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+																							  vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT,
+																							  vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+} //anonymous
+
+DynamicStateRSTests::DynamicStateRSTests (tcu::TestContext& testCtx)
+	: TestCaseGroup (testCtx, "rs_state", "Tests for rasterizer state")
+{
+	/* Left blank on purpose */
+}
+
+DynamicStateRSTests::~DynamicStateRSTests ()
+{
+}
+
+void DynamicStateRSTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX]		= "vulkan/dynamic_state/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT]	= "vulkan/dynamic_state/VertexFetch.frag";
+
+	addChild(new InstanceFactory<DepthBiasParamTestInstance>(m_testCtx, "depth_bias", "Test depth bias functionality", shaderPaths));
+	addChild(new InstanceFactory<DepthBiasClampParamTestInstance>(m_testCtx, "depth_bias_clamp", "Test depth bias clamp functionality", shaderPaths));
+	addChild(new InstanceFactory<LineWidthParamTestInstance>(m_testCtx, "line_width", "Draw a line with width set to max defined by physical device", shaderPaths));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.hpp
new file mode 100644
index 0000000..aa120b0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateRSTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTDYNAMICSTATERSTESTS_HPP
+#define _VKTDYNAMICSTATERSTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic Raster State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateRSTests : public tcu::TestCaseGroup
+{
+public:
+					DynamicStateRSTests			(tcu::TestContext& testCtx);
+					~DynamicStateRSTests		(void);
+	void			init(void);
+
+private:
+	DynamicStateRSTests							(const DynamicStateRSTests& other);
+	DynamicStateRSTests&		operator=		(const DynamicStateRSTests& other);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATERSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTestCaseUtil.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTestCaseUtil.hpp
new file mode 100644
index 0000000..a53e21f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTestCaseUtil.hpp
@@ -0,0 +1,112 @@
+#ifndef _VKTDYNAMICSTATETESTCASEUTIL_HPP
+#define _VKTDYNAMICSTATETESTCASEUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests Test Case Utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuResource.hpp"
+
+#include "vktTestCase.hpp"
+
+#include "gluShaderUtil.hpp"
+#include "vkPrograms.hpp"
+
+#include <map>
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+struct PositionColorVertex
+{
+	PositionColorVertex(const tcu::Vec4& position_, const tcu::Vec4& color_)
+		: position(position_)
+		, color(color_)
+	{}
+	tcu::Vec4 position;
+	tcu::Vec4 color;
+};
+
+class ShaderSourceProvider
+{
+public:
+	static std::string getSource(tcu::Archive& archive, const char* path)
+	{
+		tcu::Resource *resource = archive.getResource(path);
+
+		std::vector<deUint8> readBuffer(resource->getSize() + 1);
+		resource->read(&readBuffer[0], resource->getSize());
+		readBuffer[readBuffer.size() - 1] = 0;
+
+		return reinterpret_cast<const char*>(&readBuffer[0]);
+	}
+};
+
+typedef std::map<glu::ShaderType, const char*> ShaderMap;
+
+template<typename Instance>
+class InstanceFactory : public TestCase
+{
+public:
+	InstanceFactory (tcu::TestContext& testCtx, const std::string& name, const std::string& desc,
+		const std::map<glu::ShaderType, const char*> shaderPaths)
+		: TestCase		(testCtx, name, desc)
+		, m_shaderPaths (shaderPaths)
+	{
+	}
+
+	TestInstance* createInstance (Context& context) const
+	{
+		return new Instance(context, m_shaderPaths);
+	}
+
+	virtual void initPrograms (vk::SourceCollections& programCollection) const
+	{
+		for (ShaderMap::const_iterator i = m_shaderPaths.begin(); i != m_shaderPaths.end(); ++i)
+		{
+			programCollection.glslSources.add(i->second) <<
+				glu::ShaderSource(i->first, ShaderSourceProvider::getSource(m_testCtx.getArchive(), i->second));
+		}
+	}
+
+private:
+	const ShaderMap m_shaderPaths;
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATETESTCASEUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.cpp
new file mode 100644
index 0000000..0b1556f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.cpp
@@ -0,0 +1,72 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateTests.hpp"
+
+#include "vktDynamicStateVPTests.hpp"
+#include "vktDynamicStateRSTests.hpp"
+#include "vktDynamicStateCBTests.hpp"
+#include "vktDynamicStateDSTests.hpp"
+#include "vktDynamicStateGeneralTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* group)
+{
+	tcu::TestContext&	testCtx		= group->getTestContext();
+
+	group->addChild(new DynamicStateVPTests(testCtx));
+	group->addChild(new DynamicStateRSTests(testCtx));
+	group->addChild(new DynamicStateCBTests(testCtx));
+	group->addChild(new DynamicStateDSTests(testCtx));
+	group->addChild(new DynamicStateGeneralTests(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "dynamic_state", "Dynamic State Tests", createChildren);
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.hpp
new file mode 100644
index 0000000..87d79a5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTDYNAMICSTATETESTS_HPP
+#define _VKTDYNAMICSTATETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+tcu::TestCaseGroup*		createTests			(tcu::TestContext& testCtx);
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.cpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.cpp
new file mode 100644
index 0000000..bd835cf
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.cpp
@@ -0,0 +1,418 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Viewport Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktDynamicStateVPTests.hpp"
+
+#include "vktDynamicStateBaseClass.hpp"
+#include "vktDynamicStateTestCaseUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuRGBA.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+namespace
+{
+
+class ViewportStateBaseCase : public DynamicStateBaseClass
+{
+public:
+	ViewportStateBaseCase (Context& context, const char* vertexShaderName, const char* fragmentShaderName)
+		: DynamicStateBaseClass	(context, vertexShaderName, fragmentShaderName)
+	{}
+
+	void initialize(void)
+	{
+		m_topology = vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, 0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(-0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+		m_data.push_back(PositionColorVertex(tcu::Vec4(0.5f, -0.5f, 1.0f, 1.0f), tcu::RGBA::green().toVec()));
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual tcu::Texture2D buildReferenceFrame (void)
+	{
+		DE_ASSERT(false);
+		return tcu::Texture2D(tcu::TextureFormat(), 0, 0);
+	}
+
+	virtual void setDynamicStates (void)
+	{
+		DE_ASSERT(false);
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log			= m_context.getTestContext().getLog();
+		const vk::VkQueue queue		= m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		setDynamicStates();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame = buildReferenceFrame();
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+class ViewportParamTestInstane : public ViewportStateBaseCase
+{
+public:
+	ViewportParamTestInstane (Context& context, ShaderMap shaders)
+		: ViewportStateBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		ViewportStateBaseCase::initialize();
+	}
+
+	virtual void setDynamicStates(void)
+	{
+		const vk::VkViewport viewport	= { 0.0f, 0.0f, (float)WIDTH * 2, (float)HEIGHT * 2, 0.0f, 0.0f };
+		const vk::VkRect2D scissor		= { { 0, 0 }, { WIDTH, HEIGHT } };
+
+		setDynamicViewportState(1, &viewport, &scissor);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+	}
+
+	virtual tcu::Texture2D buildReferenceFrame (void)
+	{
+		tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+		referenceFrame.allocLevel(0);
+
+		const deInt32 frameWidth	= referenceFrame.getWidth();
+		const deInt32 frameHeight	= referenceFrame.getHeight();
+
+		tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+		for (int y = 0; y < frameHeight; y++)
+		{
+			const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+			for (int x = 0; x < frameWidth; x++)
+			{
+				const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+				if (xCoord >= 0.0f && xCoord <= 1.0f && yCoord >= 0.0f && yCoord <= 1.0f)
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+			}
+		}
+
+		return referenceFrame;
+	}
+};
+
+class ScissorParamTestInstance : public ViewportStateBaseCase
+{
+public:
+	ScissorParamTestInstance (Context& context, ShaderMap shaders)
+		: ViewportStateBaseCase (context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+	{
+		ViewportStateBaseCase::initialize();
+	}
+
+	virtual void setDynamicStates (void)
+	{
+		const vk::VkViewport viewport	= { 0.0f, 0.0f, (float)WIDTH, (float)HEIGHT, 0.0f, 0.0f };
+		const vk::VkRect2D scissor		= { { 0, 0 }, { WIDTH / 2, HEIGHT / 2 } };
+
+		setDynamicViewportState(1, &viewport, &scissor);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+	}
+
+	virtual tcu::Texture2D buildReferenceFrame (void)
+	{
+		tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+		referenceFrame.allocLevel(0);
+
+		const deInt32 frameWidth	= referenceFrame.getWidth();
+		const deInt32 frameHeight	= referenceFrame.getHeight();
+
+		tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+		for (int y = 0; y < frameHeight; y++)
+		{
+			const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+			for (int x = 0; x < frameWidth; x++)
+			{
+				const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+				if (xCoord >= -0.5f && xCoord <= 0.0f && yCoord >= -0.5f && yCoord <= 0.0f)
+					referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+			}
+		}
+
+		return referenceFrame;
+	}
+};
+
+class ViewportArrayTestInstance : public DynamicStateBaseClass
+{
+protected:
+	std::string m_geometryShaderName;
+
+public:
+
+	ViewportArrayTestInstance (Context& context, ShaderMap shaders)
+		: DynamicStateBaseClass	(context, shaders[glu::SHADERTYPE_VERTEX], shaders[glu::SHADERTYPE_FRAGMENT])
+		, m_geometryShaderName	(shaders[glu::SHADERTYPE_GEOMETRY])
+	{
+		// Check geometry shader support
+		{
+			const vk::VkPhysicalDeviceFeatures& deviceFeatures = m_context.getDeviceFeatures();
+
+			if (!deviceFeatures.multiViewport)
+				throw tcu::NotSupportedError("Multi-viewport is not supported");
+
+			if (!deviceFeatures.geometryShader)
+				throw tcu::NotSupportedError("Geometry shaders are not supported");
+		}
+
+		for (int i = 0; i < 4; i++)
+		{
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, 1.0f, (float)i / 3.0f, 1.0f), tcu::RGBA::green().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, 1.0f, (float)i / 3.0f, 1.0f), tcu::RGBA::green().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(-1.0f, -1.0f, (float)i / 3.0f, 1.0f), tcu::RGBA::green().toVec()));
+			m_data.push_back(PositionColorVertex(tcu::Vec4(1.0f, -1.0f, (float)i / 3.0f, 1.0f), tcu::RGBA::green().toVec()));
+		}
+
+		DynamicStateBaseClass::initialize();
+	}
+
+	virtual void initPipeline (const vk::VkDevice device)
+	{
+		const vk::Unique<vk::VkShaderModule> vs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_vertexShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> gs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_geometryShaderName), 0));
+		const vk::Unique<vk::VkShaderModule> fs(createShaderModule(m_vk, device, m_context.getBinaryCollection().get(m_fragmentShaderName), 0));
+
+		const PipelineCreateInfo::ColorBlendState::Attachment vkCbAttachmentState;
+
+		PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*gs, "main", vk::VK_SHADER_STAGE_GEOMETRY_BIT));
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo.addState(PipelineCreateInfo::VertexInputState(m_vertexInputState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(m_topology));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &vkCbAttachmentState));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(4));
+		pipelineCreateInfo.addState(PipelineCreateInfo::DepthStencilState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::DynamicState());
+
+		m_pipeline = vk::createGraphicsPipeline(m_vk, device, DE_NULL, &pipelineCreateInfo);
+	}
+
+	virtual tcu::TestStatus iterate (void)
+	{
+		tcu::TestLog &log = m_context.getTestContext().getLog();
+		const vk::VkQueue queue = m_context.getUniversalQueue();
+
+		beginRenderPass();
+
+		// set states here
+		const float halfWidth		= (float)WIDTH / 2;
+		const float halfHeight		= (float)HEIGHT / 2;
+		const deInt32 quarterWidth	= WIDTH / 4;
+		const deInt32 quarterHeight = HEIGHT / 4;
+
+		const vk::VkViewport viewports[4] =
+		{
+			{ 0.0f, 0.0f, (float)halfWidth, (float)halfHeight, 0.0f, 0.0f },
+			{ halfWidth, 0.0f, (float)halfWidth, (float)halfHeight, 0.0f, 0.0f },
+			{ halfWidth, halfHeight, (float)halfWidth, (float)halfHeight, 0.0f, 0.0f },
+			{ 0.0f, halfHeight, (float)halfWidth, (float)halfHeight, 0.0f, 0.0f }
+		};
+
+		const vk::VkRect2D scissors[4] =
+		{
+			{ { quarterWidth, quarterHeight }, { quarterWidth, quarterHeight } },
+			{ { (deInt32)halfWidth, quarterHeight }, { quarterWidth, quarterHeight } },
+			{ { (deInt32)halfWidth, (deInt32)halfHeight }, { quarterWidth, quarterHeight } },
+			{ { quarterWidth, (deInt32)halfHeight }, { quarterWidth, quarterHeight } },
+		};
+
+		setDynamicViewportState(4, viewports, scissors);
+		setDynamicRasterizationState();
+		setDynamicBlendState();
+		setDynamicDepthStencilState();
+
+		m_vk.cmdBindPipeline(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+		const vk::VkDeviceSize vertexBufferOffset = 0;
+		const vk::VkBuffer vertexBuffer = m_vertexBuffer->object();
+		m_vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+		m_vk.cmdDraw(*m_cmdBuffer, static_cast<deUint32>(m_data.size()), 1, 0, 0);
+
+		m_vk.cmdEndRenderPass(*m_cmdBuffer);
+		m_vk.endCommandBuffer(*m_cmdBuffer);
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// validation
+		{
+			VK_CHECK(m_vk.queueWaitIdle(queue));
+
+			tcu::Texture2D referenceFrame(vk::mapVkFormat(m_colorAttachmentFormat), (int)(0.5 + WIDTH), (int)(0.5 + HEIGHT));
+			referenceFrame.allocLevel(0);
+
+			const deInt32 frameWidth = referenceFrame.getWidth();
+			const deInt32 frameHeight = referenceFrame.getHeight();
+
+			tcu::clear(referenceFrame.getLevel(0), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+			for (int y = 0; y < frameHeight; y++)
+			{
+				const float yCoord = (float)(y / (0.5*frameHeight)) - 1.0f;
+
+				for (int x = 0; x < frameWidth; x++)
+				{
+					const float xCoord = (float)(x / (0.5*frameWidth)) - 1.0f;
+
+					if (xCoord >= -0.5f && xCoord <= 0.5f && yCoord >= -0.5f && yCoord <= 0.5f)
+						referenceFrame.getLevel(0).setPixel(tcu::Vec4(0.0f, 1.0f, 0.0f, 1.0f), x, y);
+				}
+			}
+
+			const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+			const tcu::ConstPixelBufferAccess renderedFrame = m_colorTargetImage->readSurface(queue, m_context.getDefaultAllocator(),
+				vk::VK_IMAGE_LAYOUT_GENERAL, zeroOffset, WIDTH, HEIGHT, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+			if (!tcu::fuzzyCompare(log, "Result", "Image comparison result",
+				referenceFrame.getLevel(0), renderedFrame, 0.05f,
+				tcu::COMPARE_LOG_RESULT))
+			{
+				return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Image verification failed");
+			}
+
+			return tcu::TestStatus(QP_TEST_RESULT_PASS, "Image verification passed");
+		}
+	}
+};
+
+} //anonymous
+
+DynamicStateVPTests::DynamicStateVPTests (tcu::TestContext& testCtx)
+	: TestCaseGroup (testCtx, "vp_state", "Tests for viewport state")
+{
+	/* Left blank on purpose */
+}
+
+DynamicStateVPTests::~DynamicStateVPTests ()
+{
+}
+
+void DynamicStateVPTests::init (void)
+{
+	ShaderMap shaderPaths;
+	shaderPaths[glu::SHADERTYPE_VERTEX] = "vulkan/dynamic_state/VertexFetch.vert";
+	shaderPaths[glu::SHADERTYPE_FRAGMENT] = "vulkan/dynamic_state/VertexFetch.frag";
+
+	addChild(new InstanceFactory<ViewportParamTestInstane>(m_testCtx, "viewport", "Set viewport which is twice bigger than screen size", shaderPaths));
+	addChild(new InstanceFactory<ScissorParamTestInstance>(m_testCtx, "scissor", "Perform a scissor test on 1/4 bottom-left part of the surface", shaderPaths));
+
+	shaderPaths[glu::SHADERTYPE_GEOMETRY] = "vulkan/dynamic_state/ViewportArray.geom";
+	addChild(new InstanceFactory<ViewportArrayTestInstance>(m_testCtx, "viewport_array", "Multiple viewports and scissors", shaderPaths));
+}
+
+} // DynamicState
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.hpp b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.hpp
new file mode 100644
index 0000000..b22251c
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/dynamic_state/vktDynamicStateVPTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTDYNAMICSTATEVPTESTS_HPP
+#define _VKTDYNAMICSTATEVPTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Dynamic State Viewport Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace DynamicState
+{
+
+class DynamicStateVPTests : public tcu::TestCaseGroup
+{
+public:
+							DynamicStateVPTests			(tcu::TestContext& testCtx);
+							~DynamicStateVPTests		(void);
+	void					init						(void);
+
+private:
+	DynamicStateVPTests									(const DynamicStateVPTests& other);
+	DynamicStateVPTests&	operator=					(const DynamicStateVPTests& other);
+};
+
+} // DynamicState
+} // vkt
+
+#endif // _VKTDYNAMICSTATEVPTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/image/CMakeLists.txt b/external/vulkancts/modules/vulkan/image/CMakeLists.txt
new file mode 100644
index 0000000..475182a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/CMakeLists.txt
@@ -0,0 +1,25 @@
+include_directories(..)
+
+set(DEQP_VK_IMAGE_SRCS
+	vktImageTests.cpp
+	vktImageTests.hpp
+	vktImageTestsUtil.cpp
+	vktImageTestsUtil.hpp
+	vktImageLoadStoreTests.cpp
+	vktImageLoadStoreTests.hpp
+	vktImageQualifiersTests.cpp
+	vktImageQualifiersTests.hpp
+	vktImageSizeTests.cpp
+	vktImageSizeTests.hpp
+	vktImageTexture.cpp
+	vktImageTexture.hpp
+	)
+
+set(DEQP_VK_IMAGE_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-image STATIC ${DEQP_VK_IMAGE_SRCS})
+target_link_libraries(deqp-vk-image ${DEQP_VK_IMAGE_LIBS})
diff --git a/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.cpp b/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.cpp
new file mode 100644
index 0000000..d7c4bab
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.cpp
@@ -0,0 +1,1436 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image load/store Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageLoadStoreTests.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vktImageTestsUtil.hpp"
+#include "vktImageTexture.hpp"
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkImageUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+
+#include "tcuImageCompare.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuFloat.hpp"
+
+#include <string>
+#include <vector>
+
+using namespace vk;
+
+namespace vkt
+{
+namespace image
+{
+namespace
+{
+
+inline VkImageCreateInfo makeImageCreateInfo (const Texture& texture, const VkFormat format, const VkImageUsageFlags usage, const VkImageCreateFlags flags)
+{
+	const VkImageCreateInfo imageParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,														// VkStructureType			sType;
+		DE_NULL,																					// const void*				pNext;
+		(isCube(texture) ? (VkImageCreateFlags)VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0u) | flags,	// VkImageCreateFlags		flags;
+		mapImageType(texture.type()),																// VkImageType				imageType;
+		format,																						// VkFormat					format;
+		makeExtent3D(texture.layerSize()),															// VkExtent3D				extent;
+		1u,																							// deUint32					mipLevels;
+		(deUint32)texture.numLayers(),																// deUint32					arrayLayers;
+		VK_SAMPLE_COUNT_1_BIT,																		// VkSampleCountFlagBits	samples;
+		VK_IMAGE_TILING_OPTIMAL,																	// VkImageTiling			tiling;
+		usage,																						// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,																	// VkSharingMode			sharingMode;
+		0u,																							// deUint32					queueFamilyIndexCount;
+		DE_NULL,																					// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,																	// VkImageLayout			initialLayout;
+	};
+	return imageParams;
+}
+
+inline VkBufferImageCopy makeBufferImageCopy (const Texture& texture)
+{
+	return image::makeBufferImageCopy(makeExtent3D(texture.layerSize()), texture.numLayers());
+}
+
+ImageType getImageTypeForSingleLayer (const ImageType imageType)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_1D_ARRAY:
+			return IMAGE_TYPE_1D;
+
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			// A single layer for cube is a 2d face
+			return IMAGE_TYPE_2D;
+
+		case IMAGE_TYPE_3D:
+			return IMAGE_TYPE_3D;
+
+		case IMAGE_TYPE_BUFFER:
+			return IMAGE_TYPE_BUFFER;
+
+		default:
+			DE_FATAL("Internal test error");
+			return IMAGE_TYPE_LAST;
+	}
+}
+
+float computeStoreColorScale (const VkFormat format, const tcu::IVec3 imageSize)
+{
+	const int maxImageDimension = de::max(imageSize.x(), de::max(imageSize.y(), imageSize.z()));
+	const float div = static_cast<float>(maxImageDimension - 1);
+
+	if (isUnormFormat(format))
+		return 1.0f / div;
+	else if (isSnormFormat(format))
+		return 2.0f / div;
+	else
+		return 1.0f;
+}
+
+inline float computeStoreColorBias (const VkFormat format)
+{
+	return isSnormFormat(format) ? -1.0f : 0.0f;
+}
+
+inline bool isIntegerFormat (const VkFormat format)
+{
+	return isIntFormat(format) || isUintFormat(format);
+}
+
+tcu::ConstPixelBufferAccess getLayerOrSlice (const Texture& texture, const tcu::ConstPixelBufferAccess access, const int layer)
+{
+	switch (texture.type())
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_BUFFER:
+			// Not layered
+			DE_ASSERT(layer == 0);
+			return access;
+
+		case IMAGE_TYPE_1D_ARRAY:
+			return tcu::getSubregion(access, 0, layer, access.getWidth(), 1);
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+		case IMAGE_TYPE_3D:			// 3d texture is treated as if depth was the layers
+			return tcu::getSubregion(access, 0, 0, layer, access.getWidth(), access.getHeight(), 1);
+
+		default:
+			DE_FATAL("Internal test error");
+			return tcu::ConstPixelBufferAccess();
+	}
+}
+
+std::string getFormatCaseName (const VkFormat format)
+{
+	const std::string fullName = getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+//! \return true if all layers match in both pixel buffers
+bool comparePixelBuffers (tcu::TestLog&						log,
+						  const Texture&					texture,
+						  const VkFormat					format,
+						  const tcu::ConstPixelBufferAccess	reference,
+						  const tcu::ConstPixelBufferAccess	result)
+{
+	DE_ASSERT(reference.getFormat() == result.getFormat());
+	DE_ASSERT(reference.getSize() == result.getSize());
+
+	const bool intFormat = isIntegerFormat(format);
+	const bool is3d = (texture.type() == IMAGE_TYPE_3D);
+	const int numLayersOrSlices = (is3d ? texture.size().z() : texture.numLayers());
+	const int numCubeFaces = 6;
+
+	int passedLayers = 0;
+	for (int layerNdx = 0; layerNdx < numLayersOrSlices; ++layerNdx)
+	{
+		const std::string comparisonName = "Comparison" + de::toString(layerNdx);
+		const std::string comparisonDesc = "Image Comparison, " +
+			(isCube(texture) ? "face " + de::toString(layerNdx % numCubeFaces) + ", cube " + de::toString(layerNdx / numCubeFaces) :
+			is3d			 ? "slice " + de::toString(layerNdx) : "layer " + de::toString(layerNdx));
+
+		const tcu::ConstPixelBufferAccess refLayer = getLayerOrSlice(texture, reference, layerNdx);
+		const tcu::ConstPixelBufferAccess resultLayer = getLayerOrSlice(texture, result, layerNdx);
+
+		bool ok = false;
+		if (intFormat)
+			ok = tcu::intThresholdCompare(log, comparisonName.c_str(), comparisonDesc.c_str(), refLayer, resultLayer, tcu::UVec4(0), tcu::COMPARE_LOG_RESULT);
+		else
+			ok = tcu::floatThresholdCompare(log, comparisonName.c_str(), comparisonDesc.c_str(), refLayer, resultLayer, tcu::Vec4(0.01f), tcu::COMPARE_LOG_RESULT);
+
+		if (ok)
+			++passedLayers;
+	}
+	return passedLayers == numLayersOrSlices;
+}
+
+//!< Zero out invalid pixels in the image (denormalized, infinite, NaN values)
+void replaceBadFloatReinterpretValues (const tcu::PixelBufferAccess access)
+{
+	DE_ASSERT(tcu::getTextureChannelClass(access.getFormat().type) == tcu::TEXTURECHANNELCLASS_FLOATING_POINT);
+
+	for (int z = 0; z < access.getDepth(); ++z)
+	for (int y = 0; y < access.getHeight(); ++y)
+	for (int x = 0; x < access.getWidth(); ++x)
+	{
+		const tcu::Vec4 color(access.getPixel(x, y, z));
+		tcu::Vec4 newColor = color;
+
+		for (int i = 0; i < 4; ++i)
+		{
+			if (access.getFormat().type == tcu::TextureFormat::HALF_FLOAT)
+			{
+				const tcu::Float16 f(color[i]);
+				if (f.isDenorm() || f.isInf() || f.isNaN())
+					newColor[i] = 0.0f;
+			}
+			else
+			{
+				const tcu::Float32 f(color[i]);
+				if (f.isDenorm() || f.isInf() || f.isNaN())
+					newColor[i] = 0.0f;
+			}
+		}
+
+		if (newColor != color)
+			access.setPixel(newColor, x, y, z);
+	}
+}
+
+//!< replace invalid pixels in the image (-128)
+void replaceSnormReinterpretValues (const tcu::PixelBufferAccess access)
+{
+	DE_ASSERT(tcu::getTextureChannelClass(access.getFormat().type) == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT);
+
+	for (int z = 0; z < access.getDepth(); ++z)
+	for (int y = 0; y < access.getHeight(); ++y)
+	for (int x = 0; x < access.getWidth(); ++x)
+	{
+		const tcu::IVec4 color(access.getPixelInt(x, y, z));
+		tcu::IVec4 newColor = color;
+
+		for (int i = 0; i < 4; ++i)
+		{
+			const deInt32 oldColor(color[i]);
+			if (oldColor == -128) newColor[i] = -127;
+		}
+
+		if (newColor != color)
+		access.setPixel(newColor, x, y, z);
+	}
+}
+
+tcu::TextureLevel generateReferenceImage (const tcu::IVec3& imageSize, const VkFormat imageFormat, const VkFormat readFormat)
+{
+	// Generate a reference image data using the storage format
+
+	tcu::TextureLevel reference(mapVkFormat(imageFormat), imageSize.x(), imageSize.y(), imageSize.z());
+	const tcu::PixelBufferAccess access = reference.getAccess();
+
+	const float storeColorScale = computeStoreColorScale(imageFormat, imageSize);
+	const float storeColorBias = computeStoreColorBias(imageFormat);
+
+	const bool intFormat = isIntegerFormat(imageFormat);
+	const int xMax = imageSize.x() - 1;
+	const int yMax = imageSize.y() - 1;
+
+	for (int z = 0; z < imageSize.z(); ++z)
+	for (int y = 0; y < imageSize.y(); ++y)
+	for (int x = 0; x < imageSize.x(); ++x)
+	{
+		const tcu::IVec4 color(x^y^z, (xMax - x)^y^z, x^(yMax - y)^z, (xMax - x)^(yMax - y)^z);
+
+		if (intFormat)
+			access.setPixel(color, x, y, z);
+		else
+			access.setPixel(color.asFloat()*storeColorScale + storeColorBias, x, y, z);
+	}
+
+	// If the image is to be accessed as a float texture, get rid of invalid values
+
+	if (isFloatFormat(readFormat) && imageFormat != readFormat)
+		replaceBadFloatReinterpretValues(tcu::PixelBufferAccess(mapVkFormat(readFormat), imageSize, access.getDataPtr()));
+	if (isSnormFormat(readFormat) && imageFormat != readFormat)
+		replaceSnormReinterpretValues(tcu::PixelBufferAccess(mapVkFormat(readFormat), imageSize, access.getDataPtr()));
+
+	return reference;
+}
+
+inline tcu::TextureLevel generateReferenceImage (const tcu::IVec3& imageSize, const VkFormat imageFormat)
+{
+	return generateReferenceImage(imageSize, imageFormat, imageFormat);
+}
+
+void flipHorizontally (const tcu::PixelBufferAccess access)
+{
+	const int xMax = access.getWidth() - 1;
+	const int halfWidth = access.getWidth() / 2;
+
+	if (isIntegerFormat(mapTextureFormat(access.getFormat())))
+		for (int z = 0; z < access.getDepth(); z++)
+		for (int y = 0; y < access.getHeight(); y++)
+		for (int x = 0; x < halfWidth; x++)
+		{
+			const tcu::UVec4 temp = access.getPixelUint(xMax - x, y, z);
+			access.setPixel(access.getPixelUint(x, y, z), xMax - x, y, z);
+			access.setPixel(temp, x, y, z);
+		}
+	else
+		for (int z = 0; z < access.getDepth(); z++)
+		for (int y = 0; y < access.getHeight(); y++)
+		for (int x = 0; x < halfWidth; x++)
+		{
+			const tcu::Vec4 temp = access.getPixel(xMax - x, y, z);
+			access.setPixel(access.getPixel(x, y, z), xMax - x, y, z);
+			access.setPixel(temp, x, y, z);
+		}
+}
+
+#if defined(DE_DEBUG)
+inline bool colorScaleAndBiasAreValid (const VkFormat format, const float colorScale, const float colorBias)
+{
+	// Only normalized (fixed-point) formats may have scale/bias
+	const bool integerOrFloatFormat = isIntFormat(format) || isUintFormat(format) || isFloatFormat(format);
+	return !integerOrFloatFormat || (colorScale == 1.0f && colorBias == 0.0f);
+}
+#endif
+
+inline bool formatsAreCompatible (const VkFormat format0, const VkFormat format1)
+{
+	return format0 == format1 || mapVkFormat(format0).getPixelSize() == mapVkFormat(format1).getPixelSize();
+}
+
+void commandImageWriteBarrierBetweenShaderInvocations (Context& context, const VkCommandBuffer cmdBuffer, const VkImage image, const Texture& texture)
+{
+	const DeviceInterface& vk = context.getDeviceInterface();
+
+	const VkImageSubresourceRange fullImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, texture.numLayers());
+	const VkImageMemoryBarrier shaderWriteBarrier = makeImageMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, 0u,
+		VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
+		image, fullImageSubresourceRange);
+
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier);
+}
+
+void commandBufferWriteBarrierBeforeHostRead (Context& context, const VkCommandBuffer cmdBuffer, const VkBuffer buffer, const VkDeviceSize bufferSizeBytes)
+{
+	const DeviceInterface& vk = context.getDeviceInterface();
+
+	const VkBufferMemoryBarrier shaderWriteBarrier = makeBufferMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
+		buffer, 0ull, bufferSizeBytes);
+
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+}
+
+//! Copy all layers of an image to a buffer.
+void commandCopyImageToBuffer (Context&					context,
+							   const VkCommandBuffer	cmdBuffer,
+							   const VkImage			image,
+							   const VkBuffer			buffer,
+							   const VkDeviceSize		bufferSizeBytes,
+							   const Texture&			texture)
+{
+	const DeviceInterface& vk = context.getDeviceInterface();
+
+	const VkImageSubresourceRange fullImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, texture.numLayers());
+	const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
+		VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+		image, fullImageSubresourceRange);
+
+	const VkBufferImageCopy copyRegion = makeBufferImageCopy(texture);
+
+	const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
+		VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
+		buffer, 0ull, bufferSizeBytes);
+
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
+	vk.cmdCopyImageToBuffer(cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, buffer, 1u, &copyRegion);
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+}
+
+//! Minimum chunk size is determined by the offset alignment requirements.
+VkDeviceSize getOptimalUniformBufferChunkSize (Context& context, VkDeviceSize minimumRequiredChunkSizeBytes)
+{
+	const VkPhysicalDeviceProperties properties = getPhysicalDeviceProperties(context.getInstanceInterface(), context.getPhysicalDevice());
+	const VkDeviceSize alignment = properties.limits.minUniformBufferOffsetAlignment;
+
+	if (minimumRequiredChunkSizeBytes > alignment)
+		return alignment + (minimumRequiredChunkSizeBytes / alignment) * alignment;
+	else
+		return alignment;
+}
+
+class StoreTest : public TestCase
+{
+public:
+	enum TestFlags
+	{
+		FLAG_SINGLE_LAYER_BIND = 0x1,	//!< Run the shader multiple times, each time binding a different layer.
+	};
+
+							StoreTest			(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 const Texture&		texture,
+												 const VkFormat		format,
+												 const TestFlags	flags = static_cast<TestFlags>(0));
+
+	void					initPrograms		(SourceCollections& programCollection) const;
+
+	TestInstance*			createInstance		(Context&			context) const;
+
+private:
+	const Texture			m_texture;
+	const VkFormat			m_format;
+	const bool				m_singleLayerBind;
+};
+
+StoreTest::StoreTest (tcu::TestContext&		testCtx,
+					  const std::string&	name,
+					  const std::string&	description,
+					  const Texture&		texture,
+					  const VkFormat		format,
+					  const TestFlags		flags)
+	: TestCase			(testCtx, name, description)
+	, m_texture			(texture)
+	, m_format			(format)
+	, m_singleLayerBind	((flags & FLAG_SINGLE_LAYER_BIND) != 0)
+{
+	if (m_singleLayerBind)
+		DE_ASSERT(m_texture.numLayers() > 1);
+}
+
+void StoreTest::initPrograms (SourceCollections& programCollection) const
+{
+	const float storeColorScale = computeStoreColorScale(m_format, m_texture.size());
+	const float storeColorBias = computeStoreColorBias(m_format);
+	DE_ASSERT(colorScaleAndBiasAreValid(m_format, storeColorScale, storeColorBias));
+
+	const std::string xMax = de::toString(m_texture.size().x() - 1);
+	const std::string yMax = de::toString(m_texture.size().y() - 1);
+	const std::string signednessPrefix = isUintFormat(m_format) ? "u" : isIntFormat(m_format) ? "i" : "";
+	const std::string colorBaseExpr = signednessPrefix + "vec4("
+		+ "gx^gy^gz, "
+		+ "(" + xMax + "-gx)^gy^gz, "
+		+ "gx^(" + yMax + "-gy)^gz, "
+		+ "(" + xMax + "-gx)^(" + yMax + "-gy)^gz)";
+
+	const std::string colorExpr = colorBaseExpr + (storeColorScale == 1.0f ? "" : "*" + de::toString(storeColorScale))
+								  + (storeColorBias == 0.0f ? "" : " + float(" + de::toString(storeColorBias) + ")");
+
+	const int dimension = (m_singleLayerBind ? m_texture.layerDimension() : m_texture.dimension());
+	const std::string texelCoordStr = (dimension == 1 ? "gx" : dimension == 2 ? "ivec2(gx, gy)" : dimension == 3 ? "ivec3(gx, gy, gz)" : "");
+
+	const ImageType usedImageType = (m_singleLayerBind ? getImageTypeForSingleLayer(m_texture.type()) : m_texture.type());
+	const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_format));
+	const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_format), usedImageType);
+
+	std::ostringstream src;
+	src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
+		<< "\n"
+		<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+		<< "layout (binding = 0, " << formatQualifierStr << ") writeonly uniform highp " << imageTypeStr << " u_image;\n";
+
+	if (m_singleLayerBind)
+		src << "layout (binding = 1) readonly uniform Constants {\n"
+			<< "    int u_layerNdx;\n"
+			<< "};\n";
+
+	src << "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "    int gx = int(gl_GlobalInvocationID.x);\n"
+		<< "    int gy = int(gl_GlobalInvocationID.y);\n"
+		<< "    int gz = " << (m_singleLayerBind ? "u_layerNdx" : "int(gl_GlobalInvocationID.z)") << ";\n"
+		<< "    imageStore(u_image, " << texelCoordStr << ", " << colorExpr << ");\n"
+		<< "}\n";
+
+	programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+//! Generic test iteration algorithm for image tests
+class BaseTestInstance : public TestInstance
+{
+public:
+									BaseTestInstance						(Context&		context,
+																			 const Texture&	texture,
+																			 const VkFormat	format,
+																			 const bool		singleLayerBind);
+
+	tcu::TestStatus                 iterate									(void);
+
+	virtual							~BaseTestInstance						(void) {}
+
+protected:
+	virtual VkDescriptorSetLayout	prepareDescriptors						(void) = 0;
+	virtual tcu::TestStatus			verifyResult							(void) = 0;
+
+	virtual void					commandBeforeCompute					(const VkCommandBuffer	cmdBuffer) = 0;
+	virtual void					commandBetweenShaderInvocations			(const VkCommandBuffer	cmdBuffer) = 0;
+	virtual void					commandAfterCompute						(const VkCommandBuffer	cmdBuffer) = 0;
+
+	virtual void					commandBindDescriptorsForLayer			(const VkCommandBuffer	cmdBuffer,
+																			 const VkPipelineLayout pipelineLayout,
+																			 const int				layerNdx) = 0;
+
+	const Texture					m_texture;
+	const VkFormat					m_format;
+	const bool						m_singleLayerBind;
+};
+
+BaseTestInstance::BaseTestInstance (Context& context, const Texture& texture, const VkFormat format, const bool singleLayerBind)
+	: TestInstance		(context)
+	, m_texture			(texture)
+	, m_format			(format)
+	, m_singleLayerBind	(singleLayerBind)
+{
+}
+
+tcu::TestStatus BaseTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
+
+	const VkDescriptorSetLayout descriptorSetLayout = prepareDescriptors();
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	commandBeforeCompute(*cmdBuffer);
+
+	const tcu::IVec3 workSize = (m_singleLayerBind ? m_texture.layerSize() : m_texture.size());
+	const int loopNumLayers = (m_singleLayerBind ? m_texture.numLayers() : 1);
+	for (int layerNdx = 0; layerNdx < loopNumLayers; ++layerNdx)
+	{
+		commandBindDescriptorsForLayer(*cmdBuffer, *pipelineLayout, layerNdx);
+
+		if (layerNdx > 0)
+			commandBetweenShaderInvocations(*cmdBuffer);
+
+		vk.cmdDispatch(*cmdBuffer, workSize.x(), workSize.y(), workSize.z());
+	}
+
+	commandAfterCompute(*cmdBuffer);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	return verifyResult();
+}
+
+//! Base store test implementation
+class StoreTestInstance : public BaseTestInstance
+{
+public:
+									StoreTestInstance						(Context&		context,
+																			 const Texture&	texture,
+																			 const VkFormat	format,
+																			 const bool		singleLayerBind);
+
+protected:
+	tcu::TestStatus					verifyResult							(void);
+
+	// Add empty implementations for functions that might be not needed
+	void							commandBeforeCompute					(const VkCommandBuffer) {}
+	void							commandBetweenShaderInvocations			(const VkCommandBuffer) {}
+	void							commandAfterCompute						(const VkCommandBuffer) {}
+
+	de::MovePtr<Buffer>				m_imageBuffer;
+	const VkDeviceSize				m_imageSizeBytes;
+};
+
+StoreTestInstance::StoreTestInstance (Context& context, const Texture& texture, const VkFormat format, const bool singleLayerBind)
+	: BaseTestInstance		(context, texture, format, singleLayerBind)
+	, m_imageSizeBytes		(getImageSizeBytes(texture.size(), format))
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// A helper buffer with enough space to hold the whole image. Usage flags accommodate all derived test instances.
+
+	m_imageBuffer = de::MovePtr<Buffer>(new Buffer(
+		vk, device, allocator,
+		makeBufferCreateInfo(m_imageSizeBytes, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
+		MemoryRequirement::HostVisible));
+}
+
+tcu::TestStatus StoreTestInstance::verifyResult	(void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	const tcu::IVec3 imageSize = m_texture.size();
+	const tcu::TextureLevel reference = generateReferenceImage(imageSize, m_format);
+
+	const Allocation& alloc = m_imageBuffer->getAllocation();
+	invalidateMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_imageSizeBytes);
+	const tcu::ConstPixelBufferAccess result(mapVkFormat(m_format), imageSize, alloc.getHostPtr());
+
+	if (comparePixelBuffers(m_context.getTestContext().getLog(), m_texture, m_format, reference.getAccess(), result))
+		return tcu::TestStatus::pass("Passed");
+	else
+		return tcu::TestStatus::fail("Image comparison failed");
+}
+
+//! Store test for images
+class ImageStoreTestInstance : public StoreTestInstance
+{
+public:
+										ImageStoreTestInstance					(Context&				context,
+																				 const Texture&			texture,
+																				 const VkFormat			format,
+																				 const bool				singleLayerBind);
+
+protected:
+	VkDescriptorSetLayout				prepareDescriptors						(void);
+	void								commandBeforeCompute					(const VkCommandBuffer	cmdBuffer);
+	void								commandBetweenShaderInvocations			(const VkCommandBuffer	cmdBuffer);
+	void								commandAfterCompute						(const VkCommandBuffer	cmdBuffer);
+
+	void								commandBindDescriptorsForLayer			(const VkCommandBuffer	cmdBuffer,
+																				 const VkPipelineLayout pipelineLayout,
+																				 const int				layerNdx);
+
+	de::MovePtr<Image>					m_image;
+	de::MovePtr<Buffer>					m_constantsBuffer;
+	const VkDeviceSize					m_constantsBufferChunkSizeBytes;
+	Move<VkDescriptorSetLayout>			m_descriptorSetLayout;
+	Move<VkDescriptorPool>				m_descriptorPool;
+	DynArray<Move<VkDescriptorSet> >	m_allDescriptorSets;
+	DynArray<Move<VkImageView> >		m_allImageViews;
+};
+
+ImageStoreTestInstance::ImageStoreTestInstance (Context&		context,
+												const Texture&	texture,
+												const VkFormat	format,
+												const bool		singleLayerBind)
+	: StoreTestInstance					(context, texture, format, singleLayerBind)
+	, m_constantsBufferChunkSizeBytes	(getOptimalUniformBufferChunkSize(context, sizeof(deUint32)))
+	, m_allDescriptorSets				(texture.numLayers())
+	, m_allImageViews					(texture.numLayers())
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	Allocator&				allocator			= m_context.getDefaultAllocator();
+
+	m_image = de::MovePtr<Image>(new Image(
+		vk, device, allocator,
+		makeImageCreateInfo(m_texture, m_format, VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, 0u),
+		MemoryRequirement::Any));
+
+	// This buffer will be used to pass constants to the shader
+
+	const int numLayers = m_texture.numLayers();
+	const VkDeviceSize constantsBufferSizeBytes = numLayers * m_constantsBufferChunkSizeBytes;
+	m_constantsBuffer = de::MovePtr<Buffer>(new Buffer(
+		vk, device, allocator,
+		makeBufferCreateInfo(constantsBufferSizeBytes, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT),
+		MemoryRequirement::HostVisible));
+
+	{
+		const Allocation& alloc = m_constantsBuffer->getAllocation();
+		deUint8* const basePtr = static_cast<deUint8*>(alloc.getHostPtr());
+
+		deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(constantsBufferSizeBytes));
+
+		for (int layerNdx = 0; layerNdx < numLayers; ++layerNdx)
+		{
+			deUint32* valuePtr = reinterpret_cast<deUint32*>(basePtr + layerNdx * m_constantsBufferChunkSizeBytes);
+			*valuePtr = static_cast<deUint32>(layerNdx);
+		}
+
+		flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), constantsBufferSizeBytes);
+	}
+}
+
+VkDescriptorSetLayout ImageStoreTestInstance::prepareDescriptors (void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	const int numLayers = m_texture.numLayers();
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, numLayers)
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, numLayers)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, numLayers);
+
+	if (m_singleLayerBind)
+	{
+		for (int layerNdx = 0; layerNdx < numLayers; ++layerNdx)
+		{
+			m_allDescriptorSets[layerNdx] = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+			m_allImageViews[layerNdx] = makeImageView(vk, device, m_image->get(), mapImageViewType(getImageTypeForSingleLayer(m_texture.type())), m_format,
+													  makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, layerNdx, 1u));
+		}
+	}
+	else // bind all layers at once
+	{
+		m_allDescriptorSets[0] = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+		m_allImageViews[0] = makeImageView(vk, device, m_image->get(), mapImageViewType(m_texture.type()), m_format,
+										   makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, numLayers));
+	}
+
+	return *m_descriptorSetLayout;  // not passing the ownership
+}
+
+void ImageStoreTestInstance::commandBindDescriptorsForLayer (const VkCommandBuffer cmdBuffer, const VkPipelineLayout pipelineLayout, const int layerNdx)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	const VkDescriptorSet descriptorSet = *m_allDescriptorSets[layerNdx];
+	const VkImageView imageView = *m_allImageViews[layerNdx];
+
+	const VkDescriptorImageInfo descriptorImageInfo = makeDescriptorImageInfo(DE_NULL, imageView, VK_IMAGE_LAYOUT_GENERAL);
+
+	// Set the next chunk of the constants buffer. Each chunk begins with layer index that we've set before.
+	const VkDescriptorBufferInfo descriptorConstantsBufferInfo = makeDescriptorBufferInfo(
+		m_constantsBuffer->get(), layerNdx*m_constantsBufferChunkSizeBytes, m_constantsBufferChunkSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfo)
+		.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &descriptorConstantsBufferInfo)
+		.update(vk, device);
+	vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
+}
+
+void ImageStoreTestInstance::commandBeforeCompute (const VkCommandBuffer cmdBuffer)
+{
+	const DeviceInterface& vk = m_context.getDeviceInterface();
+
+	const VkImageSubresourceRange fullImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_texture.numLayers());
+	const VkImageMemoryBarrier setImageLayoutBarrier = makeImageMemoryBarrier(
+		0u, 0u,
+		VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
+		m_image->get(), fullImageSubresourceRange);
+
+	const VkDeviceSize constantsBufferSize = m_texture.numLayers() * m_constantsBufferChunkSizeBytes;
+	const VkBufferMemoryBarrier writeConstantsBarrier = makeBufferMemoryBarrier(
+		VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
+		m_constantsBuffer->get(), 0ull, constantsBufferSize);
+
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &writeConstantsBarrier, 1, &setImageLayoutBarrier);
+}
+
+void ImageStoreTestInstance::commandBetweenShaderInvocations (const VkCommandBuffer cmdBuffer)
+{
+	commandImageWriteBarrierBetweenShaderInvocations(m_context, cmdBuffer, m_image->get(), m_texture);
+}
+
+void ImageStoreTestInstance::commandAfterCompute (const VkCommandBuffer cmdBuffer)
+{
+	commandCopyImageToBuffer(m_context, cmdBuffer, m_image->get(), m_imageBuffer->get(), m_imageSizeBytes, m_texture);
+}
+
+//! Store test for buffers
+class BufferStoreTestInstance : public StoreTestInstance
+{
+public:
+									BufferStoreTestInstance					(Context&				context,
+																			 const Texture&			texture,
+																			 const VkFormat			format);
+
+protected:
+	VkDescriptorSetLayout			prepareDescriptors						(void);
+	void							commandAfterCompute						(const VkCommandBuffer	cmdBuffer);
+
+	void							commandBindDescriptorsForLayer			(const VkCommandBuffer	cmdBuffer,
+																			 const VkPipelineLayout pipelineLayout,
+																			 const int				layerNdx);
+
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSet>			m_descriptorSet;
+	Move<VkBufferView>				m_bufferView;
+};
+
+BufferStoreTestInstance::BufferStoreTestInstance (Context&			context,
+												  const Texture&	texture,
+												  const VkFormat	format)
+	: StoreTestInstance(context, texture, format, false)
+{
+}
+
+VkDescriptorSetLayout BufferStoreTestInstance::prepareDescriptors (void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+	m_bufferView = makeBufferView(vk, device, m_imageBuffer->get(), m_format, 0ull, m_imageSizeBytes);
+
+	return *m_descriptorSetLayout;  // not passing the ownership
+}
+
+void BufferStoreTestInstance::commandBindDescriptorsForLayer (const VkCommandBuffer cmdBuffer, const VkPipelineLayout pipelineLayout, const int layerNdx)
+{
+	DE_ASSERT(layerNdx == 0);
+	DE_UNREF(layerNdx);
+
+	const VkDevice			device	= m_context.getDevice();
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, &m_bufferView.get())
+		.update(vk, device);
+	vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+}
+
+void BufferStoreTestInstance::commandAfterCompute (const VkCommandBuffer cmdBuffer)
+{
+	commandBufferWriteBarrierBeforeHostRead(m_context, cmdBuffer, m_imageBuffer->get(), m_imageSizeBytes);
+}
+
+class LoadStoreTest : public TestCase
+{
+public:
+	enum TestFlags
+	{
+		FLAG_SINGLE_LAYER_BIND	= 1 << 0,	//!< Run the shader multiple times, each time binding a different layer.
+		FLAG_RESTRICT_IMAGES	= 1 << 1,	//!< If given, images in the shader will be qualified with "restrict".
+	};
+
+							LoadStoreTest			(tcu::TestContext&		testCtx,
+													 const std::string&		name,
+													 const std::string&		description,
+													 const Texture&			texture,
+													 const VkFormat			format,
+													 const VkFormat			imageFormat,
+													 const TestFlags		flags = static_cast<TestFlags>(0));
+
+	void					initPrograms			(SourceCollections&		programCollection) const;
+	TestInstance*			createInstance			(Context&				context) const;
+
+private:
+	const Texture			m_texture;
+	const VkFormat			m_format;				//!< Format as accessed in the shader
+	const VkFormat			m_imageFormat;			//!< Storage format
+	const bool				m_singleLayerBind;
+	const bool				m_restrictImages;
+};
+
+LoadStoreTest::LoadStoreTest (tcu::TestContext&		testCtx,
+							  const std::string&	name,
+							  const std::string&	description,
+							  const Texture&		texture,
+							  const VkFormat		format,
+							  const VkFormat		imageFormat,
+							  const TestFlags		flags)
+	: TestCase			(testCtx, name, description)
+	, m_texture			(texture)
+	, m_format			(format)
+	, m_imageFormat		(imageFormat)
+	, m_singleLayerBind ((flags & FLAG_SINGLE_LAYER_BIND) != 0)
+	, m_restrictImages	((flags & FLAG_RESTRICT_IMAGES) != 0)
+{
+	if (m_singleLayerBind)
+		DE_ASSERT(m_texture.numLayers() > 1);
+
+	DE_ASSERT(formatsAreCompatible(m_format, m_imageFormat));
+}
+
+void LoadStoreTest::initPrograms (SourceCollections& programCollection) const
+{
+	const int			dimension			= (m_singleLayerBind ? m_texture.layerDimension() : m_texture.dimension());
+	const ImageType		usedImageType		= (m_singleLayerBind ? getImageTypeForSingleLayer(m_texture.type()) : m_texture.type());
+	const std::string	formatQualifierStr	= getShaderImageFormatQualifier(mapVkFormat(m_format));
+	const std::string	imageTypeStr		= getShaderImageType(mapVkFormat(m_format), usedImageType);
+	const std::string	maybeRestrictStr	= (m_restrictImages ? "restrict " : "");
+	const std::string	xMax				= de::toString(m_texture.size().x() - 1);
+
+	std::ostringstream src;
+	src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
+		<< "\n"
+		<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+		<< "layout (binding = 0, " << formatQualifierStr << ") " << maybeRestrictStr << "readonly uniform highp " << imageTypeStr << " u_image0;\n"
+		<< "layout (binding = 1, " << formatQualifierStr << ") " << maybeRestrictStr << "writeonly uniform highp " << imageTypeStr << " u_image1;\n"
+		<< "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< (dimension == 1 ?
+			"    int pos = int(gl_GlobalInvocationID.x);\n"
+			"    imageStore(u_image1, pos, imageLoad(u_image0, " + xMax + "-pos));\n"
+			: dimension == 2 ?
+			"    ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
+			"    imageStore(u_image1, pos, imageLoad(u_image0, ivec2(" + xMax + "-pos.x, pos.y)));\n"
+			: dimension == 3 ?
+			"    ivec3 pos = ivec3(gl_GlobalInvocationID);\n"
+			"    imageStore(u_image1, pos, imageLoad(u_image0, ivec3(" + xMax + "-pos.x, pos.y, pos.z)));\n"
+			: "")
+		<< "}\n";
+
+	programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+//! Load/store test base implementation
+class LoadStoreTestInstance : public BaseTestInstance
+{
+public:
+									LoadStoreTestInstance				(Context&			context,
+																		 const Texture&		texture,
+																		 const VkFormat		format,
+																		 const VkFormat		imageFormat,
+																		 const bool			singleLayerBind);
+
+protected:
+	virtual Buffer*					getResultBuffer						(void) const = 0;	//!< Get the buffer that contains the result image
+
+	tcu::TestStatus					verifyResult						(void);
+
+	// Add empty implementations for functions that might be not needed
+	void							commandBeforeCompute				(const VkCommandBuffer) {}
+	void							commandBetweenShaderInvocations		(const VkCommandBuffer) {}
+	void							commandAfterCompute					(const VkCommandBuffer) {}
+
+	de::MovePtr<Buffer>				m_imageBuffer;		//!< Source data and helper buffer
+	const VkDeviceSize				m_imageSizeBytes;
+	const VkFormat					m_imageFormat;		//!< Image format (for storage, may be different than texture format)
+	tcu::TextureLevel				m_referenceImage;	//!< Used as input data and later to verify result image
+};
+
+LoadStoreTestInstance::LoadStoreTestInstance (Context&			context,
+											  const Texture&	texture,
+											  const VkFormat	format,
+											  const VkFormat	imageFormat,
+											  const bool		singleLayerBind)
+	: BaseTestInstance		(context, texture, format, singleLayerBind)
+	, m_imageSizeBytes		(getImageSizeBytes(texture.size(), format))
+	, m_imageFormat			(imageFormat)
+	, m_referenceImage		(generateReferenceImage(texture.size(), imageFormat, format))
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// A helper buffer with enough space to hold the whole image.
+
+	m_imageBuffer = de::MovePtr<Buffer>(new Buffer(
+		vk, device, allocator,
+		makeBufferCreateInfo(m_imageSizeBytes, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
+		MemoryRequirement::HostVisible));
+
+	// Copy reference data to buffer for subsequent upload to image.
+
+	const Allocation& alloc = m_imageBuffer->getAllocation();
+	deMemcpy(alloc.getHostPtr(), m_referenceImage.getAccess().getDataPtr(), static_cast<size_t>(m_imageSizeBytes));
+	flushMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_imageSizeBytes);
+}
+
+tcu::TestStatus LoadStoreTestInstance::verifyResult	(void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	// Apply the same transformation as done in the shader
+	const tcu::PixelBufferAccess reference = m_referenceImage.getAccess();
+	flipHorizontally(reference);
+
+	const Allocation& alloc = getResultBuffer()->getAllocation();
+	invalidateMappedMemoryRange(vk, device, alloc.getMemory(), alloc.getOffset(), m_imageSizeBytes);
+	const tcu::ConstPixelBufferAccess result(mapVkFormat(m_imageFormat), m_texture.size(), alloc.getHostPtr());
+
+	if (comparePixelBuffers(m_context.getTestContext().getLog(), m_texture, m_imageFormat, reference, result))
+		return tcu::TestStatus::pass("Passed");
+	else
+		return tcu::TestStatus::fail("Image comparison failed");
+}
+
+//! Load/store test for images
+class ImageLoadStoreTestInstance : public LoadStoreTestInstance
+{
+public:
+	struct PerLayerData
+	{
+										PerLayerData		(Move<VkDescriptorSet>	descriptorSet,
+															 Move<VkImageView>		imageViewSrc,
+															 Move<VkImageView>		imageViewDst);
+
+		const Unique<VkDescriptorSet>	descriptorSet;
+		const Unique<VkImageView>		imageViewSrc;
+		const Unique<VkImageView>		imageViewDst;
+	};
+
+											ImageLoadStoreTestInstance			(Context&				context,
+																				 const Texture&			texture,
+																				 const VkFormat			format,
+																				 const VkFormat			imageFormat,
+																				 const bool				singleLayerBind);
+
+protected:
+	VkDescriptorSetLayout					prepareDescriptors					(void);
+	void									commandBeforeCompute				(const VkCommandBuffer	cmdBuffer);
+	void									commandBetweenShaderInvocations		(const VkCommandBuffer	cmdBuffer);
+	void									commandAfterCompute					(const VkCommandBuffer	cmdBuffer);
+
+	void									commandBindDescriptorsForLayer		(const VkCommandBuffer	cmdBuffer,
+																				 const VkPipelineLayout pipelineLayout,
+																				 const int				layerNdx);
+
+	Buffer*									getResultBuffer						(void) const { return m_imageBuffer.get(); }
+
+	de::MovePtr<Image>						m_imageSrc;
+	de::MovePtr<Image>						m_imageDst;
+	Move<VkDescriptorSetLayout>				m_descriptorSetLayout;
+	Move<VkDescriptorPool>					m_descriptorPool;
+	DynArray<de::MovePtr<PerLayerData> >	m_perLayerData;
+};
+
+ImageLoadStoreTestInstance::PerLayerData::PerLayerData (Move<VkDescriptorSet>	descriptorSet_,
+														Move<VkImageView>		imageViewSrc_,
+														Move<VkImageView>		imageViewDst_)
+	: descriptorSet	(descriptorSet_)
+	, imageViewSrc	(imageViewSrc_)
+	, imageViewDst	(imageViewDst_)
+{
+}
+
+ImageLoadStoreTestInstance::ImageLoadStoreTestInstance (Context&		context,
+														const Texture&	texture,
+														const VkFormat	format,
+														const VkFormat	imageFormat,
+														const bool		singleLayerBind)
+	: LoadStoreTestInstance	(context, texture, format, imageFormat, singleLayerBind)
+	, m_perLayerData		(texture.numLayers())
+{
+	const DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const VkDevice				device				= m_context.getDevice();
+	Allocator&					allocator			= m_context.getDefaultAllocator();
+	const VkImageCreateFlags	imageFlags			= (m_format == m_imageFormat ? 0u : (VkImageCreateFlags)VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT);
+
+	m_imageSrc = de::MovePtr<Image>(new Image(
+		vk, device, allocator,
+		makeImageCreateInfo(m_texture, m_imageFormat, VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, imageFlags),
+		MemoryRequirement::Any));
+
+	m_imageDst = de::MovePtr<Image>(new Image(
+		vk, device, allocator,
+		makeImageCreateInfo(m_texture, m_imageFormat, VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, imageFlags),
+		MemoryRequirement::Any));
+}
+
+VkDescriptorSetLayout ImageLoadStoreTestInstance::prepareDescriptors (void)
+{
+	const VkDevice			device	= m_context.getDevice();
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+
+	const int numLayers = m_texture.numLayers();
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, numLayers)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, numLayers)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, numLayers);
+
+	if (m_singleLayerBind)
+	{
+		for (int layerNdx = 0; layerNdx < numLayers; ++layerNdx)
+		{
+			const VkImageViewType viewType = mapImageViewType(getImageTypeForSingleLayer(m_texture.type()));
+			const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, layerNdx, 1u);
+
+			de::MovePtr<PerLayerData> data(new PerLayerData(
+				makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout),
+				makeImageView(vk, device, m_imageSrc->get(), viewType, m_format, subresourceRange),
+				makeImageView(vk, device, m_imageDst->get(), viewType, m_format, subresourceRange)));
+
+			m_perLayerData[layerNdx] = data;
+		}
+	}
+	else // bind all layers at once
+	{
+		const VkImageViewType viewType = mapImageViewType(m_texture.type());
+		const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, numLayers);
+
+		de::MovePtr<PerLayerData> data(new PerLayerData(
+			makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout),
+			makeImageView(vk, device, m_imageSrc->get(), viewType, m_format, subresourceRange),
+			makeImageView(vk, device, m_imageDst->get(), viewType, m_format, subresourceRange)));
+
+		m_perLayerData[0] = data;
+	}
+
+	return *m_descriptorSetLayout;  // not passing the ownership
+}
+
+void ImageLoadStoreTestInstance::commandBindDescriptorsForLayer (const VkCommandBuffer cmdBuffer, const VkPipelineLayout pipelineLayout, const int layerNdx)
+{
+	const VkDevice			device	= m_context.getDevice();
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+
+	const PerLayerData* data = m_perLayerData[layerNdx].get();
+
+	const VkDescriptorImageInfo descriptorSrcImageInfo = makeDescriptorImageInfo(DE_NULL, *data->imageViewSrc, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+	const VkDescriptorImageInfo descriptorDstImageInfo = makeDescriptorImageInfo(DE_NULL, *data->imageViewDst, VK_IMAGE_LAYOUT_GENERAL);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*data->descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorSrcImageInfo)
+		.writeSingle(*data->descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImageInfo)
+		.update(vk, device);
+	vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0u, 1u, &data->descriptorSet.get(), 0u, DE_NULL);
+}
+
+void ImageLoadStoreTestInstance::commandBeforeCompute (const VkCommandBuffer cmdBuffer)
+{
+	const DeviceInterface& vk = m_context.getDeviceInterface();
+
+	const VkImageSubresourceRange fullImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_texture.numLayers());
+	{
+		const VkImageMemoryBarrier preCopyImageBarriers[] =
+		{
+			makeImageMemoryBarrier(
+				0u, 0u,
+				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+				m_imageSrc->get(), fullImageSubresourceRange),
+			makeImageMemoryBarrier(
+				0u, 0u,
+				VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
+				m_imageDst->get(), fullImageSubresourceRange)
+		};
+
+		const VkBufferMemoryBarrier barrierFlushHostWriteBeforeCopy = makeBufferMemoryBarrier(
+			VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
+			m_imageBuffer->get(), 0ull, m_imageSizeBytes);
+
+		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT,
+			(VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &barrierFlushHostWriteBeforeCopy, DE_LENGTH_OF_ARRAY(preCopyImageBarriers), preCopyImageBarriers);
+	}
+	{
+		const VkImageMemoryBarrier barrierAfterCopy = makeImageMemoryBarrier(
+			VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
+			VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+			m_imageSrc->get(), fullImageSubresourceRange);
+
+		const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_texture);
+
+		vk.cmdCopyBufferToImage(cmdBuffer, m_imageBuffer->get(), m_imageSrc->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &copyRegion);
+		vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &barrierAfterCopy);
+	}
+}
+
+void ImageLoadStoreTestInstance::commandBetweenShaderInvocations (const VkCommandBuffer cmdBuffer)
+{
+	commandImageWriteBarrierBetweenShaderInvocations(m_context, cmdBuffer, m_imageDst->get(), m_texture);
+}
+
+void ImageLoadStoreTestInstance::commandAfterCompute (const VkCommandBuffer cmdBuffer)
+{
+	commandCopyImageToBuffer(m_context, cmdBuffer, m_imageDst->get(), m_imageBuffer->get(), m_imageSizeBytes, m_texture);
+}
+
+//! Load/store test for buffers
+class BufferLoadStoreTestInstance : public LoadStoreTestInstance
+{
+public:
+									BufferLoadStoreTestInstance		(Context&				context,
+																	 const Texture&			texture,
+																	 const VkFormat			format,
+																	 const VkFormat			imageFormat);
+
+protected:
+	VkDescriptorSetLayout			prepareDescriptors				(void);
+	void							commandAfterCompute				(const VkCommandBuffer	cmdBuffer);
+
+	void							commandBindDescriptorsForLayer	(const VkCommandBuffer	cmdBuffer,
+																	 const VkPipelineLayout pipelineLayout,
+																	 const int				layerNdx);
+
+	Buffer*							getResultBuffer					(void) const { return m_imageBufferDst.get(); }
+
+	de::MovePtr<Buffer>				m_imageBufferDst;
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSet>			m_descriptorSet;
+	Move<VkBufferView>				m_bufferViewSrc;
+	Move<VkBufferView>				m_bufferViewDst;
+};
+
+BufferLoadStoreTestInstance::BufferLoadStoreTestInstance (Context&			context,
+														  const Texture&	texture,
+														  const VkFormat	format,
+														  const VkFormat	imageFormat)
+	: LoadStoreTestInstance(context, texture, format, imageFormat, false)
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// Create a destination buffer.
+
+	m_imageBufferDst = de::MovePtr<Buffer>(new Buffer(
+		vk, device, allocator,
+		makeBufferCreateInfo(m_imageSizeBytes, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT),
+		MemoryRequirement::HostVisible));
+}
+
+VkDescriptorSetLayout BufferLoadStoreTestInstance::prepareDescriptors (void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+	m_bufferViewSrc = makeBufferView(vk, device, m_imageBuffer->get(), m_format, 0ull, m_imageSizeBytes);
+	m_bufferViewDst = makeBufferView(vk, device, m_imageBufferDst->get(), m_format, 0ull, m_imageSizeBytes);
+
+	return *m_descriptorSetLayout;  // not passing the ownership
+}
+
+void BufferLoadStoreTestInstance::commandBindDescriptorsForLayer (const VkCommandBuffer cmdBuffer, const VkPipelineLayout pipelineLayout, const int layerNdx)
+{
+	DE_ASSERT(layerNdx == 0);
+	DE_UNREF(layerNdx);
+
+	const VkDevice			device	= m_context.getDevice();
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, &m_bufferViewSrc.get())
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, &m_bufferViewDst.get())
+		.update(vk, device);
+	vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+}
+
+void BufferLoadStoreTestInstance::commandAfterCompute (const VkCommandBuffer cmdBuffer)
+{
+	commandBufferWriteBarrierBeforeHostRead(m_context, cmdBuffer, m_imageBufferDst->get(), m_imageSizeBytes);
+}
+
+TestInstance* StoreTest::createInstance (Context& context) const
+{
+	if (m_texture.type() == IMAGE_TYPE_BUFFER)
+		return new BufferStoreTestInstance(context, m_texture, m_format);
+	else
+		return new ImageStoreTestInstance(context, m_texture, m_format, m_singleLayerBind);
+}
+
+TestInstance* LoadStoreTest::createInstance (Context& context) const
+{
+	if (m_texture.type() == IMAGE_TYPE_BUFFER)
+		return new BufferLoadStoreTestInstance(context, m_texture, m_format, m_imageFormat);
+	else
+		return new ImageLoadStoreTestInstance(context, m_texture, m_format, m_imageFormat, m_singleLayerBind);
+}
+
+// TODO Which image/format combinations should be supported? Spec says it should be queried with vkGetPhysicalDeviceImageFormatProperties.
+//      What about buffer/format? (texel storage buffer) (use vkGetPhysicalDeviceFormatProperties ?)
+
+static const Texture s_textures[] =
+{
+	Texture(IMAGE_TYPE_1D,			tcu::IVec3(64,	1,	1),	1),
+	Texture(IMAGE_TYPE_1D_ARRAY,	tcu::IVec3(64,	1,	1),	8),
+	Texture(IMAGE_TYPE_2D,			tcu::IVec3(64,	64,	1),	1),
+	Texture(IMAGE_TYPE_2D_ARRAY,	tcu::IVec3(64,	64,	1),	8),
+	Texture(IMAGE_TYPE_3D,			tcu::IVec3(64,	64,	8),	1),
+	Texture(IMAGE_TYPE_CUBE,		tcu::IVec3(64,	64,	1),	6),
+	Texture(IMAGE_TYPE_CUBE_ARRAY,	tcu::IVec3(64,	64,	1),	2*6),
+	Texture(IMAGE_TYPE_BUFFER,		tcu::IVec3(64,	1,	1),	1),
+};
+
+const Texture& getTestTexture (const ImageType imageType)
+{
+	for (int textureNdx = 0; textureNdx < DE_LENGTH_OF_ARRAY(s_textures); ++textureNdx)
+		if (s_textures[textureNdx].type() == imageType)
+			return s_textures[textureNdx];
+
+	DE_FATAL("Internal error");
+	return s_textures[0];
+}
+
+static const VkFormat s_formats[] =
+{
+	VK_FORMAT_R32G32B32A32_SFLOAT,
+	VK_FORMAT_R16G16B16A16_SFLOAT,
+	VK_FORMAT_R32_SFLOAT,
+
+	VK_FORMAT_R32G32B32A32_UINT,
+	VK_FORMAT_R16G16B16A16_UINT,
+	VK_FORMAT_R8G8B8A8_UINT,
+	VK_FORMAT_R32_UINT,
+
+	VK_FORMAT_R32G32B32A32_SINT,
+	VK_FORMAT_R16G16B16A16_SINT,
+	VK_FORMAT_R8G8B8A8_SINT,
+	VK_FORMAT_R32_SINT,
+
+	VK_FORMAT_R8G8B8A8_UNORM,
+
+	VK_FORMAT_R8G8B8A8_SNORM,
+};
+
+} // anonymous ns
+
+tcu::TestCaseGroup* createImageStoreTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "store", "Plain imageStore() cases"));
+
+	for (int textureNdx = 0; textureNdx < DE_LENGTH_OF_ARRAY(s_textures); ++textureNdx)
+	{
+		const Texture& texture = s_textures[textureNdx];
+		de::MovePtr<tcu::TestCaseGroup> groupByImageViewType (new tcu::TestCaseGroup(testCtx, getImageTypeName(texture.type()).c_str(), ""));
+		const bool isLayered = (texture.numLayers() > 1);
+
+		for (int formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(s_formats); ++formatNdx)
+		{
+			groupByImageViewType->addChild(new StoreTest(testCtx, getFormatCaseName(s_formats[formatNdx]), "", texture, s_formats[formatNdx]));
+
+			if (isLayered)
+				groupByImageViewType->addChild(new StoreTest(testCtx, getFormatCaseName(s_formats[formatNdx]) + "_single_layer", "",
+												texture, s_formats[formatNdx], StoreTest::FLAG_SINGLE_LAYER_BIND));
+		}
+		testGroup->addChild(groupByImageViewType.release());
+	}
+
+	return testGroup.release();
+}
+
+tcu::TestCaseGroup* createImageLoadStoreTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "load_store", "Cases with imageLoad() followed by imageStore()"));
+
+	for (int textureNdx = 0; textureNdx < DE_LENGTH_OF_ARRAY(s_textures); ++textureNdx)
+	{
+		const Texture& texture = s_textures[textureNdx];
+		de::MovePtr<tcu::TestCaseGroup> groupByImageViewType (new tcu::TestCaseGroup(testCtx, getImageTypeName(texture.type()).c_str(), ""));
+		const bool isLayered = (texture.numLayers() > 1);
+
+		for (int formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(s_formats); ++formatNdx)
+		{
+			groupByImageViewType->addChild(new LoadStoreTest(testCtx, getFormatCaseName(s_formats[formatNdx]), "",
+											texture, s_formats[formatNdx], s_formats[formatNdx]));
+
+			if (isLayered)
+				groupByImageViewType->addChild(new LoadStoreTest(testCtx, getFormatCaseName(s_formats[formatNdx]) + "_single_layer", "",
+												texture, s_formats[formatNdx], s_formats[formatNdx], LoadStoreTest::FLAG_SINGLE_LAYER_BIND));
+		}
+		testGroup->addChild(groupByImageViewType.release());
+	}
+
+	return testGroup.release();
+}
+
+tcu::TestCaseGroup* createImageFormatReinterpretTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "format_reinterpret",	"Cases with differing texture and image formats"));
+
+	for (int textureNdx = 0; textureNdx < DE_LENGTH_OF_ARRAY(s_textures); ++textureNdx)
+	{
+		const Texture& texture = s_textures[textureNdx];
+		de::MovePtr<tcu::TestCaseGroup> groupByImageViewType (new tcu::TestCaseGroup(testCtx, getImageTypeName(texture.type()).c_str(), ""));
+
+		for (int imageFormatNdx = 0; imageFormatNdx < DE_LENGTH_OF_ARRAY(s_formats); ++imageFormatNdx)
+		for (int formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(s_formats); ++formatNdx)
+		{
+			//TODO Are all conversions valid or do we have to limit (or expand) somehow? Is it stated anywhere in the spec?
+
+			const std::string caseName = getFormatCaseName(s_formats[imageFormatNdx]) + "_" + getFormatCaseName(s_formats[formatNdx]);
+			if (imageFormatNdx != formatNdx && formatsAreCompatible(s_formats[imageFormatNdx], s_formats[formatNdx]))
+				groupByImageViewType->addChild(new LoadStoreTest(testCtx, caseName, "", texture, s_formats[formatNdx], s_formats[imageFormatNdx]));
+		}
+		testGroup->addChild(groupByImageViewType.release());
+	}
+
+	return testGroup.release();
+}
+
+de::MovePtr<TestCase> createImageQualifierRestrictCase (tcu::TestContext& testCtx, const ImageType imageType, const std::string& name)
+{
+	const VkFormat format = VK_FORMAT_R32G32B32A32_UINT;
+	const Texture& texture = getTestTexture(imageType);
+	return de::MovePtr<TestCase>(new LoadStoreTest(testCtx, name, "", texture, format, format, LoadStoreTest::FLAG_RESTRICT_IMAGES));
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.hpp b/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.hpp
new file mode 100644
index 0000000..0eb5e8e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageLoadStoreTests.hpp
@@ -0,0 +1,56 @@
+#ifndef _VKTIMAGELOADSTORETESTS_HPP
+#define _VKTIMAGELOADSTORETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image load/store Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+#include "deUniquePtr.hpp"
+#include "vktImageTestsUtil.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+tcu::TestCaseGroup*		createImageStoreTests				(tcu::TestContext& testCtx);
+tcu::TestCaseGroup*		createImageLoadStoreTests			(tcu::TestContext& testCtx);
+tcu::TestCaseGroup*		createImageFormatReinterpretTests	(tcu::TestContext& testCtx);
+
+de::MovePtr<TestCase>	createImageQualifierRestrictCase	(tcu::TestContext& testCtx, const ImageType imageType, const std::string& name);
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGELOADSTORETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.cpp b/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.cpp
new file mode 100644
index 0000000..dadc7d5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.cpp
@@ -0,0 +1,792 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory qualifiers tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageQualifiersTests.hpp"
+#include "vktImageLoadStoreTests.hpp"
+#include "vktImageTestsUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkImageUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "deDefs.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include "tcuImageCompare.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuVectorType.hpp"
+
+using namespace vk;
+
+namespace vkt
+{
+namespace image
+{
+namespace
+{
+
+static const tcu::UVec3		g_localWorkGroupSizeBase	= tcu::UVec3(8, 8, 2);
+static const deInt32		g_ShaderReadOffsetsX[4]		= { 1, 4, 7, 10 };
+static const deInt32		g_ShaderReadOffsetsY[4]		= { 2, 5, 8, 11 };
+static const deInt32		g_ShaderReadOffsetsZ[4]		= { 3, 6, 9, 12 };
+static const char* const	g_ShaderReadOffsetsXStr		= "int[]( 1, 4, 7, 10 )";
+static const char* const	g_ShaderReadOffsetsYStr		= "int[]( 2, 5, 8, 11 )";
+static const char* const	g_ShaderReadOffsetsZStr		= "int[]( 3, 6, 9, 12 )";
+
+const tcu::UVec3 getComputeGridSize (const ImageType imageType, const tcu::UVec4& imageSize)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_3D:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+		case IMAGE_TYPE_BUFFER:
+			return tcu::UVec3(imageSize.x(), imageSize.y(), imageSize.z() * imageSize.w());
+
+		case IMAGE_TYPE_1D_ARRAY:
+			return tcu::UVec3(imageSize.x(), imageSize.w(), 1);
+
+		default:
+			DE_FATAL("Unknown image type");
+			return tcu::UVec3(1, 1, 1);
+	}
+}
+
+const tcu::UVec3 getLocalWorkGroupSize (const ImageType imageType, const tcu::UVec4& imageSize)
+{
+	const tcu::UVec3 computeGridSize	= getComputeGridSize(imageType, imageSize);
+
+	const tcu::UVec3 localWorkGroupSize = tcu::UVec3(de::min(g_localWorkGroupSizeBase.x(), computeGridSize.x()),
+													 de::min(g_localWorkGroupSizeBase.y(), computeGridSize.y()),
+													 de::min(g_localWorkGroupSizeBase.z(), computeGridSize.z()));
+	return localWorkGroupSize;
+}
+
+const tcu::UVec3 getNumWorkGroups (const ImageType imageType, const tcu::UVec4& imageSize)
+{
+	const tcu::UVec3 computeGridSize	= getComputeGridSize(imageType, imageSize);
+	const tcu::UVec3 localWorkGroupSize = getLocalWorkGroupSize(imageType, imageSize);
+
+	return computeGridSize / localWorkGroupSize;
+}
+
+tcu::ConstPixelBufferAccess getLayerOrSlice (const ImageType					imageType,
+											 const tcu::ConstPixelBufferAccess&	access,
+											 const deUint32						layer)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_BUFFER:
+			DE_ASSERT(layer == 0);
+			return access;
+
+		case IMAGE_TYPE_1D_ARRAY:
+			return tcu::getSubregion(access, 0, layer, access.getWidth(), 1);
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_3D:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return tcu::getSubregion(access, 0, 0, layer, access.getWidth(), access.getHeight(), 1);
+
+		default:
+			DE_FATAL("Unknown image type");
+			return tcu::ConstPixelBufferAccess();
+	}
+}
+
+bool comparePixelBuffers (tcu::TestContext&						testCtx,
+						  const ImageType						imageType,
+						  const tcu::UVec4&						imageSize,
+						  const tcu::TextureFormat&				format,
+						  const tcu::ConstPixelBufferAccess&	reference,
+						  const tcu::ConstPixelBufferAccess&	result)
+{
+	DE_ASSERT(reference.getFormat() == result.getFormat());
+	DE_ASSERT(reference.getSize() == result.getSize());
+
+	const bool		 intFormat			= isIntFormat(mapTextureFormat(format)) || isUintFormat(mapTextureFormat(format));
+	deUint32		 passedLayers		= 0;
+
+	for (deUint32 layerNdx = 0; layerNdx < imageSize.z() * imageSize.w(); ++layerNdx)
+	{
+		const std::string comparisonName = "Comparison" + de::toString(layerNdx);
+
+		std::string comparisonDesc = "Image Comparison, ";
+		switch (imageType)
+		{
+			case IMAGE_TYPE_3D:
+				comparisonDesc = comparisonDesc + "slice " + de::toString(layerNdx);
+				break;
+
+			case IMAGE_TYPE_CUBE:
+			case IMAGE_TYPE_CUBE_ARRAY:
+				comparisonDesc = comparisonDesc + "face " + de::toString(layerNdx % 6) + ", cube " + de::toString(layerNdx / 6);
+				break;
+
+			default:
+				comparisonDesc = comparisonDesc + "layer " + de::toString(layerNdx);
+				break;
+		}
+
+		const tcu::ConstPixelBufferAccess refLayer		= getLayerOrSlice(imageType, reference, layerNdx);
+		const tcu::ConstPixelBufferAccess resultLayer	= getLayerOrSlice(imageType, result, layerNdx);
+
+		bool ok = false;
+		if (intFormat)
+			ok = tcu::intThresholdCompare(testCtx.getLog(), comparisonName.c_str(), comparisonDesc.c_str(), refLayer, resultLayer, tcu::UVec4(0), tcu::COMPARE_LOG_RESULT);
+		else
+			ok = tcu::floatThresholdCompare(testCtx.getLog(), comparisonName.c_str(), comparisonDesc.c_str(), refLayer, resultLayer, tcu::Vec4(0.01f), tcu::COMPARE_LOG_RESULT);
+
+		if (ok)
+			++passedLayers;
+	}
+
+	return passedLayers == (imageSize.z() * imageSize.w());
+}
+
+const std::string getCoordStr (const ImageType		imageType,
+							   const std::string&	x,
+							   const std::string&	y,
+							   const std::string&	z)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+			return x;
+
+		case IMAGE_TYPE_1D_ARRAY:
+		case IMAGE_TYPE_2D:
+			return "ivec2(" + x + "," + y + ")";
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_3D:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return "ivec3(" + x + "," + y + "," + z + ")";
+
+		default:
+			DE_ASSERT(false);
+			return "";
+	}
+}
+
+class MemoryQualifierTestCase : public vkt::TestCase
+{
+public:
+
+	enum Qualifier
+	{
+		QUALIFIER_COHERENT = 0,
+		QUALIFIER_VOLATILE,
+		QUALIFIER_RESTRICT,
+		QUALIFIER_LAST
+	};
+
+								MemoryQualifierTestCase		(tcu::TestContext&			testCtx,
+															 const std::string&			name,
+															 const std::string&			description,
+															 const Qualifier			qualifier,
+															 const ImageType			imageType,
+															 const tcu::UVec4&			imageSize,
+															 const tcu::TextureFormat&	format,
+															 const glu::GLSLVersion		glslVersion);
+
+	virtual						~MemoryQualifierTestCase	(void) {}
+
+	virtual void				initPrograms				(SourceCollections&			programCollection) const;
+	virtual TestInstance*		createInstance				(Context&					context) const;
+
+protected:
+
+	const Qualifier				m_qualifier;
+	const ImageType				m_imageType;
+	const tcu::UVec4			m_imageSize;
+	const tcu::TextureFormat	m_format;
+	const glu::GLSLVersion		m_glslVersion;
+};
+
+MemoryQualifierTestCase::MemoryQualifierTestCase (tcu::TestContext&			testCtx,
+												  const std::string&		name,
+												  const std::string&		description,
+												  const Qualifier			qualifier,
+												  const ImageType			imageType,
+												  const tcu::UVec4&			imageSize,
+												  const tcu::TextureFormat&	format,
+												  const glu::GLSLVersion	glslVersion)
+	: vkt::TestCase(testCtx, name, description)
+	, m_qualifier(qualifier)
+	, m_imageType(imageType)
+	, m_imageSize(imageSize)
+	, m_format(format)
+	, m_glslVersion(glslVersion)
+{
+}
+
+void MemoryQualifierTestCase::initPrograms (SourceCollections& programCollection) const
+{
+	const char* const	versionDecl			= glu::getGLSLVersionDeclaration(m_glslVersion);
+
+	const char* const	qualifierName		= m_qualifier == QUALIFIER_COHERENT ? "coherent"
+											: m_qualifier == QUALIFIER_VOLATILE ? "volatile"
+											: DE_NULL;
+
+	const bool			uintFormat			= isUintFormat(mapTextureFormat(m_format));
+	const bool			intFormat			= isIntFormat(mapTextureFormat(m_format));
+	const std::string	colorVecTypeName	= std::string(uintFormat ? "u"	: intFormat ? "i" : "") + "vec4";
+	const std::string	colorScalarTypeName = std::string(uintFormat ? "uint" : intFormat ? "int" : "float");
+	const std::string	invocationCoord		= getCoordStr(m_imageType, "gx", "gy", "gz");
+	const std::string	shaderImageFormat	= getShaderImageFormatQualifier(m_format);
+	const std::string	shaderImageType		= getShaderImageType(m_format, m_imageType);
+
+	const tcu::UVec3	localWorkGroupSize	= getLocalWorkGroupSize(m_imageType, m_imageSize);
+	const std::string	localSizeX			= de::toString(localWorkGroupSize.x());
+	const std::string	localSizeY			= de::toString(localWorkGroupSize.y());
+	const std::string	localSizeZ			= de::toString(localWorkGroupSize.z());
+
+	std::ostringstream	programBuffer;
+
+	programBuffer
+		<< versionDecl << "\n"
+		<< "\n"
+		<< "precision highp " << shaderImageType << ";\n"
+		<< "\n"
+		<< "layout (local_size_x = " << localSizeX << ", local_size_y = " << localSizeY << ", local_size_z = " + localSizeZ << ") in;\n"
+		<< "layout (" << shaderImageFormat << ", binding=0) " << qualifierName << " uniform " << shaderImageType << " u_image;\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	int gx = int(gl_GlobalInvocationID.x);\n"
+		<< "	int gy = int(gl_GlobalInvocationID.y);\n"
+		<< "	int gz = int(gl_GlobalInvocationID.z);\n"
+		<< "	imageStore(u_image, " << invocationCoord << ", " << colorVecTypeName << "(gx^gy^gz));\n"
+		<< "\n"
+		<< "	memoryBarrier();\n"
+		<< "	barrier();\n"
+		<< "\n"
+		<< "	" << colorScalarTypeName << " sum = " << colorScalarTypeName << "(0);\n"
+		<< "	int groupBaseX = gx/" << localSizeX << "*" << localSizeX << ";\n"
+		<< "	int groupBaseY = gy/" << localSizeY << "*" << localSizeY << ";\n"
+		<< "	int groupBaseZ = gz/" << localSizeZ << "*" << localSizeZ << ";\n"
+		<< "	int xOffsets[] = " << g_ShaderReadOffsetsXStr << ";\n"
+		<< "	int yOffsets[] = " << g_ShaderReadOffsetsYStr << ";\n"
+		<< "	int zOffsets[] = " << g_ShaderReadOffsetsZStr << ";\n"
+		<< "	for (int i = 0; i < " << de::toString(DE_LENGTH_OF_ARRAY(g_ShaderReadOffsetsX)) << "; i++)\n"
+		<< "	{\n"
+		<< "		int readX = groupBaseX + (gx + xOffsets[i]) % " + localSizeX + ";\n"
+		<< "		int readY = groupBaseY + (gy + yOffsets[i]) % " + localSizeY + ";\n"
+		<< "		int readZ = groupBaseZ + (gz + zOffsets[i]) % " + localSizeZ + ";\n"
+		<< "		sum += imageLoad(u_image, " << getCoordStr(m_imageType, "readX", "readY", "readZ") << ").x;\n"
+		<< "	}\n"
+		<< "\n"
+		<< "	memoryBarrier();\n"
+		<< "	barrier();\n"
+		<< "\n"
+		<< "	imageStore(u_image, " + invocationCoord + ", " + colorVecTypeName + "(sum));\n"
+		<< "}\n";
+
+	programCollection.glslSources.add(m_name) << glu::ComputeSource(programBuffer.str());
+}
+
+class MemoryQualifierInstanceBase : public vkt::TestInstance
+{
+public:
+									MemoryQualifierInstanceBase		(Context&					context,
+																	 const std::string&			name,
+																	 const ImageType			imageType,
+																	 const tcu::UVec4&			imageSize,
+																	 const tcu::TextureFormat&	format);
+
+	virtual							~MemoryQualifierInstanceBase	(void) {};
+
+	virtual tcu::TestStatus			iterate							(void);
+
+	virtual void					prepareResources				(const VkDeviceSize			bufferSizeInBytes) = 0;
+
+	virtual void					prepareDescriptors				(void) = 0;
+
+	virtual void					commandsBeforeCompute			(const VkCommandBuffer		cmdBuffer,
+																	 const VkDeviceSize			bufferSizeInBytes) const = 0;
+
+	virtual void					commandsAfterCompute			(const VkCommandBuffer		cmdBuffer,
+																	 const VkDeviceSize			bufferSizeInBytes) const = 0;
+protected:
+
+	tcu::TextureLevel				generateReferenceImage			(void) const;
+
+	const std::string				m_name;
+	const ImageType					m_imageType;
+	const tcu::UVec4				m_imageSize;
+	const tcu::TextureFormat		m_format;
+
+	de::MovePtr<Buffer>				m_buffer;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorSet>			m_descriptorSet;
+};
+
+MemoryQualifierInstanceBase::MemoryQualifierInstanceBase (Context&					context,
+														  const std::string&		name,
+														  const ImageType			imageType,
+														  const tcu::UVec4&			imageSize,
+														  const tcu::TextureFormat&	format)
+	: vkt::TestInstance(context)
+	, m_name(name)
+	, m_imageType(imageType)
+	, m_imageSize(imageSize)
+	, m_format(format)
+{
+}
+
+tcu::TestStatus	MemoryQualifierInstanceBase::iterate (void)
+{
+	const VkDevice			device				= m_context.getDevice();
+	const DeviceInterface&	deviceInterface		= m_context.getDeviceInterface();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	const VkDeviceSize	bufferSizeInBytes = m_imageSize.x() * m_imageSize.y() * m_imageSize.z() * m_imageSize.w() * tcu::getPixelSize(m_format);
+
+	// Prepare resources for the test
+	prepareResources(bufferSizeInBytes);
+
+	// Prepare descriptor sets
+	prepareDescriptors();
+
+	// Create compute shader
+	const vk::Unique<VkShaderModule> shaderModule(createShaderModule(deviceInterface, device, m_context.getBinaryCollection().get(m_name), 0u));
+
+	// Create compute pipeline
+	const vk::Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(deviceInterface, device, *m_descriptorSetLayout));
+	const vk::Unique<VkPipeline> pipeline(makeComputePipeline(deviceInterface, device, *pipelineLayout, *shaderModule));
+
+	// Create command buffer
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(deviceInterface, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(deviceInterface, device, *cmdPool));
+
+	// Start recording commands
+	beginCommandBuffer(deviceInterface, *cmdBuffer);
+
+	deviceInterface.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	deviceInterface.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+
+	commandsBeforeCompute(*cmdBuffer, bufferSizeInBytes);
+
+	const tcu::UVec3 numGroups = getNumWorkGroups(m_imageType, m_imageSize);
+	deviceInterface.cmdDispatch(*cmdBuffer, numGroups.x(), numGroups.y(), numGroups.z());
+
+	commandsAfterCompute(*cmdBuffer, bufferSizeInBytes);
+
+	endCommandBuffer(deviceInterface, *cmdBuffer);
+
+	// Submit and wait for completion
+	submitCommandsAndWait(deviceInterface, device, queue, *cmdBuffer);
+
+	// Retrieve data from buffer to host memory
+	const Allocation& allocation = m_buffer->getAllocation();
+	invalidateMappedMemoryRange(deviceInterface, device, allocation.getMemory(), allocation.getOffset(), bufferSizeInBytes);
+
+	const tcu::UVec3 computeGridSize = getComputeGridSize(m_imageType, m_imageSize);
+	tcu::ConstPixelBufferAccess resultPixelBuffer(m_format, computeGridSize.x(), computeGridSize.y(), computeGridSize.z(), allocation.getHostPtr());
+
+	// Create a reference image
+	tcu::TextureLevel referenceImage = generateReferenceImage();
+	tcu::ConstPixelBufferAccess referencePixelBuffer = referenceImage.getAccess();
+
+	// Validate the result
+	if (comparePixelBuffers(m_context.getTestContext(), m_imageType, m_imageSize, m_format, referencePixelBuffer, resultPixelBuffer))
+		return tcu::TestStatus::pass("Passed");
+	else
+		return tcu::TestStatus::fail("Image comparison failed");
+}
+
+tcu::TextureLevel MemoryQualifierInstanceBase::generateReferenceImage (void) const
+{
+	// Generate a reference image data using the storage format
+	const tcu::UVec3 computeGridSize = getComputeGridSize(m_imageType, m_imageSize);
+
+	tcu::TextureLevel base(m_format, computeGridSize.x(), computeGridSize.y(), computeGridSize.z());
+	tcu::PixelBufferAccess baseAccess = base.getAccess();
+
+	tcu::TextureLevel reference(m_format, computeGridSize.x(), computeGridSize.y(), computeGridSize.z());
+	tcu::PixelBufferAccess referenceAccess = reference.getAccess();
+
+	for (deInt32 z = 0; z < baseAccess.getDepth(); ++z)
+		for (deInt32 y = 0; y < baseAccess.getHeight(); ++y)
+			for (deInt32 x = 0; x < baseAccess.getWidth(); ++x)
+			{
+				baseAccess.setPixel(tcu::IVec4(x^y^z), x, y, z);
+			}
+
+	const tcu::UVec3 localWorkGroupSize = getLocalWorkGroupSize(m_imageType, m_imageSize);
+
+	for (deInt32 z = 0; z < referenceAccess.getDepth(); ++z)
+		for (deInt32 y = 0; y < referenceAccess.getHeight(); ++y)
+			for (deInt32 x = 0; x < referenceAccess.getWidth(); ++x)
+			{
+				const deInt32	groupBaseX	= x / localWorkGroupSize.x() * localWorkGroupSize.x();
+				const deInt32	groupBaseY	= y / localWorkGroupSize.y() * localWorkGroupSize.y();
+				const deInt32	groupBaseZ	= z / localWorkGroupSize.z() * localWorkGroupSize.z();
+				deInt32			sum			= 0;
+
+				for (deInt32 i = 0; i < DE_LENGTH_OF_ARRAY(g_ShaderReadOffsetsX); i++)
+				{
+					sum += baseAccess.getPixelInt(
+						groupBaseX + (x + g_ShaderReadOffsetsX[i]) % localWorkGroupSize.x(),
+						groupBaseY + (y + g_ShaderReadOffsetsY[i]) % localWorkGroupSize.y(),
+						groupBaseZ + (z + g_ShaderReadOffsetsZ[i]) % localWorkGroupSize.z()).x();
+				}
+
+				referenceAccess.setPixel(tcu::IVec4(sum), x, y, z);
+			}
+
+	return reference;
+}
+
+class MemoryQualifierInstanceImage : public MemoryQualifierInstanceBase
+{
+public:
+						MemoryQualifierInstanceImage	(Context&					context,
+														 const std::string&			name,
+														 const ImageType			imageType,
+														 const tcu::UVec4&			imageSize,
+														 const tcu::TextureFormat&	format)
+							: MemoryQualifierInstanceBase(context, name, imageType, imageSize, format) {}
+
+	virtual				~MemoryQualifierInstanceImage	(void) {};
+
+	virtual void		prepareResources				(const VkDeviceSize			bufferSizeInBytes);
+
+	virtual void		prepareDescriptors				(void);
+
+	virtual void		commandsBeforeCompute			(const VkCommandBuffer		cmdBuffer,
+														 const VkDeviceSize			bufferSizeInBytes) const;
+
+	virtual void		commandsAfterCompute			(const VkCommandBuffer		cmdBuffer,
+														 const VkDeviceSize			bufferSizeInBytes) const;
+protected:
+
+	de::MovePtr<Image>	m_image;
+	Move<VkImageView>	m_imageView;
+};
+
+void MemoryQualifierInstanceImage::prepareResources (const VkDeviceSize bufferSizeInBytes)
+{
+	const VkDevice			device			= m_context.getDevice();
+	const DeviceInterface&	deviceInterface = m_context.getDeviceInterface();
+	Allocator&				allocator		= m_context.getDefaultAllocator();
+
+	// Create image
+	const VkImageCreateInfo imageCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,									// VkStructureType			sType;
+		DE_NULL,																// const void*				pNext;
+		m_imageType == IMAGE_TYPE_CUBE ||
+		m_imageType	== IMAGE_TYPE_CUBE_ARRAY
+		? (VkImageCreateFlags)VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0u,			// VkImageCreateFlags		flags;
+		mapImageType(m_imageType),												// VkImageType				imageType;
+		mapTextureFormat(m_format),												// VkFormat					format;
+		vk::makeExtent3D(m_imageSize.x(), m_imageSize.y(), m_imageSize.z()),	// VkExtent3D				extent;
+		1u,																		// deUint32					mipLevels;
+		m_imageSize.w(),														// deUint32					arrayLayers;
+		VK_SAMPLE_COUNT_1_BIT,													// VkSampleCountFlagBits	samples;
+		VK_IMAGE_TILING_OPTIMAL,												// VkImageTiling			tiling;
+		VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_STORAGE_BIT,			// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,												// VkSharingMode			sharingMode;
+		0u,																		// deUint32					queueFamilyIndexCount;
+		DE_NULL,																// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,												// VkImageLayout			initialLayout;
+	};
+
+	m_image = de::MovePtr<Image>(new Image(deviceInterface, device, allocator, imageCreateInfo, MemoryRequirement::Any));
+
+	// Create imageView
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_imageSize.w());
+	m_imageView = makeImageView(deviceInterface, device, m_image->get(), mapImageViewType(m_imageType), mapTextureFormat(m_format), subresourceRange);
+
+	// Create a buffer to store shader output (copied from image data)
+	const VkBufferCreateInfo	bufferCreateInfo = makeBufferCreateInfo(bufferSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
+	m_buffer = de::MovePtr<Buffer>(new Buffer(deviceInterface, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
+}
+void MemoryQualifierInstanceImage::prepareDescriptors (void)
+{
+	const VkDevice			device			= m_context.getDevice();
+	const DeviceInterface&	deviceInterface = m_context.getDeviceInterface();
+
+	// Create descriptor pool
+	m_descriptorPool =
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.build(deviceInterface, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	// Create descriptor set layout
+	m_descriptorSetLayout =
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(deviceInterface, device);
+
+	// Allocate descriptor set
+	m_descriptorSet = makeDescriptorSet(deviceInterface, device, *m_descriptorPool, *m_descriptorSetLayout);
+
+	// Set the bindings
+	const VkDescriptorImageInfo descriptorImageInfo = makeDescriptorImageInfo(DE_NULL, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfo)
+		.update(deviceInterface, device);
+}
+
+void MemoryQualifierInstanceImage::commandsBeforeCompute (const VkCommandBuffer cmdBuffer, const VkDeviceSize bufferSizeInBytes) const
+{
+	DE_UNREF(bufferSizeInBytes);
+
+	const DeviceInterface&			deviceInterface	 = m_context.getDeviceInterface();
+	const VkImageSubresourceRange	subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_imageSize.w());
+
+	const VkImageMemoryBarrier imageLayoutBarrier
+		= makeImageMemoryBarrier(0u,
+								 VK_ACCESS_SHADER_READ_BIT,
+								 VK_IMAGE_LAYOUT_UNDEFINED,
+								 VK_IMAGE_LAYOUT_GENERAL,
+								 m_image->get(),
+								 subresourceRange);
+
+	deviceInterface.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageLayoutBarrier);
+}
+
+void MemoryQualifierInstanceImage::commandsAfterCompute (const VkCommandBuffer cmdBuffer, const VkDeviceSize	bufferSizeInBytes) const
+{
+	const DeviceInterface&			deviceInterface	 = m_context.getDeviceInterface();
+	const VkImageSubresourceRange	subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_imageSize.w());
+
+	const VkImageMemoryBarrier imagePreCopyBarrier
+		= makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT,
+								 VK_ACCESS_TRANSFER_READ_BIT,
+								 VK_IMAGE_LAYOUT_GENERAL,
+								 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+								 m_image->get(),
+								 subresourceRange);
+
+	deviceInterface.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imagePreCopyBarrier);
+
+	const VkBufferImageCopy copyParams = makeBufferImageCopy(vk::makeExtent3D(m_imageSize.x(), m_imageSize.y(), m_imageSize.z()), m_imageSize.w());
+	deviceInterface.cmdCopyImageToBuffer(cmdBuffer, m_image->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_buffer->get(), 1u, &copyParams);
+
+	const VkBufferMemoryBarrier bufferPostCopyBarrier
+		= makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT,
+								  VK_ACCESS_HOST_READ_BIT,
+								  m_buffer->get(),
+								  0ull,
+								  bufferSizeInBytes);
+
+	deviceInterface.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferPostCopyBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+}
+
+class MemoryQualifierInstanceBuffer : public MemoryQualifierInstanceBase
+{
+public:
+						MemoryQualifierInstanceBuffer	(Context&					context,
+														 const std::string&			name,
+														 const ImageType			imageType,
+														 const tcu::UVec4&			imageSize,
+														 const tcu::TextureFormat&	format)
+							: MemoryQualifierInstanceBase(context, name, imageType, imageSize, format) {}
+
+	virtual				~MemoryQualifierInstanceBuffer	(void) {};
+
+	virtual void		prepareResources				(const VkDeviceSize			bufferSizeInBytes);
+
+	virtual void		prepareDescriptors				(void);
+
+	virtual void		commandsBeforeCompute			(const VkCommandBuffer,
+														 const VkDeviceSize) const {}
+
+	virtual void		commandsAfterCompute			(const VkCommandBuffer		cmdBuffer,
+														 const VkDeviceSize			bufferSizeInBytes) const;
+protected:
+
+	Move<VkBufferView>	m_bufferView;
+};
+
+void MemoryQualifierInstanceBuffer::prepareResources (const VkDeviceSize bufferSizeInBytes)
+{
+	const VkDevice			device			= m_context.getDevice();
+	const DeviceInterface&	deviceInterface = m_context.getDeviceInterface();
+	Allocator&				allocator		= m_context.getDefaultAllocator();
+
+	// Create a buffer to store shader output
+	const VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(bufferSizeInBytes, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT);
+	m_buffer = de::MovePtr<Buffer>(new Buffer(deviceInterface, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
+
+	m_bufferView = makeBufferView(deviceInterface, device, m_buffer->get(), mapTextureFormat(m_format), 0ull, bufferSizeInBytes);
+}
+
+void MemoryQualifierInstanceBuffer::prepareDescriptors (void)
+{
+	const VkDevice			device			= m_context.getDevice();
+	const DeviceInterface&	deviceInterface = m_context.getDeviceInterface();
+
+	// Create descriptor pool
+	m_descriptorPool =
+		DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
+		.build(deviceInterface, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	// Create descriptor set layout
+	m_descriptorSetLayout =
+		DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(deviceInterface, device);
+
+	// Allocate descriptor set
+	m_descriptorSet = makeDescriptorSet(deviceInterface, device, *m_descriptorPool, *m_descriptorSetLayout);
+
+	// Set the bindings
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, &m_bufferView.get())
+		.update(deviceInterface, device);
+}
+
+void MemoryQualifierInstanceBuffer::commandsAfterCompute (const VkCommandBuffer cmdBuffer, const VkDeviceSize bufferSizeInBytes) const
+{
+	const DeviceInterface&	deviceInterface = m_context.getDeviceInterface();
+
+	const VkBufferMemoryBarrier shaderWriteBarrier
+		= makeBufferMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT,
+								  VK_ACCESS_HOST_READ_BIT,
+								  m_buffer->get(),
+								  0ull,
+								  bufferSizeInBytes);
+
+	deviceInterface.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+}
+
+TestInstance* MemoryQualifierTestCase::createInstance (Context& context) const
+{
+	if ( m_imageType == IMAGE_TYPE_BUFFER )
+		return new MemoryQualifierInstanceBuffer(context, m_name, m_imageType, m_imageSize, m_format);
+	else
+		return new MemoryQualifierInstanceImage(context, m_name, m_imageType, m_imageSize, m_format);
+}
+
+} // anonymous ns
+
+tcu::TestCaseGroup* createImageQualifiersTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> imageQualifiersTests(new tcu::TestCaseGroup(testCtx, "qualifiers", "Coherent, volatile and restrict"));
+
+	struct ImageParameters
+	{
+		ImageType	imageType;
+		tcu::UVec4	imageSize;
+	};
+
+	static const ImageParameters imageParametersArray[] =
+	{
+		{ IMAGE_TYPE_1D,			tcu::UVec4(64, 1,  1, 1)	},
+		{ IMAGE_TYPE_1D_ARRAY,		tcu::UVec4(64, 1,  1, 8)	},
+		{ IMAGE_TYPE_2D,			tcu::UVec4(64, 64, 1, 1)	},
+		{ IMAGE_TYPE_2D_ARRAY,		tcu::UVec4(64, 64, 1, 8)	},
+		{ IMAGE_TYPE_3D,			tcu::UVec4(64, 64, 8, 1)	},
+		{ IMAGE_TYPE_CUBE,			tcu::UVec4(64, 64, 1, 6)	},
+		{ IMAGE_TYPE_CUBE_ARRAY,	tcu::UVec4(64, 64, 1, 6*8)	},
+		{ IMAGE_TYPE_BUFFER,		tcu::UVec4(64, 1,  1, 1)	}
+	};
+
+	static const tcu::TextureFormat formats[] =
+	{
+		tcu::TextureFormat(tcu::TextureFormat::R, tcu::TextureFormat::FLOAT),
+		tcu::TextureFormat(tcu::TextureFormat::R, tcu::TextureFormat::UNSIGNED_INT32),
+		tcu::TextureFormat(tcu::TextureFormat::R, tcu::TextureFormat::SIGNED_INT32),
+	};
+
+	for (deUint32 qualifierI = 0; qualifierI < MemoryQualifierTestCase::QUALIFIER_LAST; ++qualifierI)
+	{
+		const MemoryQualifierTestCase::Qualifier	memoryQualifier		= (MemoryQualifierTestCase::Qualifier)qualifierI;
+		const char* const							memoryQualifierName =
+			memoryQualifier == MemoryQualifierTestCase::QUALIFIER_COHERENT ? "coherent" :
+			memoryQualifier == MemoryQualifierTestCase::QUALIFIER_VOLATILE ? "volatile" :
+			memoryQualifier == MemoryQualifierTestCase::QUALIFIER_RESTRICT ? "restrict" :
+			DE_NULL;
+
+		de::MovePtr<tcu::TestCaseGroup> qualifierGroup(new tcu::TestCaseGroup(testCtx, memoryQualifierName, ""));
+
+		for (deInt32 imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(imageParametersArray); imageTypeNdx++)
+		{
+			const ImageType		imageType = imageParametersArray[imageTypeNdx].imageType;
+			const tcu::UVec4	imageSize = imageParametersArray[imageTypeNdx].imageSize;
+
+			if (memoryQualifier == MemoryQualifierTestCase::QUALIFIER_RESTRICT)
+			{
+				de::MovePtr<TestCase> restrictCase = createImageQualifierRestrictCase(testCtx, imageType, getImageTypeName(imageType));
+				qualifierGroup->addChild(restrictCase.release());
+			}
+			else
+			{
+				for (deInt32 formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(formats); formatNdx++)
+				{
+					const tcu::TextureFormat&	format		= formats[formatNdx];
+					const std::string			formatName	= getShaderImageFormatQualifier(formats[formatNdx]);
+
+					qualifierGroup->addChild(
+						new MemoryQualifierTestCase(testCtx, getImageTypeName(imageType) + std::string("_") + formatName,
+						"", memoryQualifier, imageType, imageSize, format, glu::GLSL_VERSION_440));
+				}
+			}
+		}
+
+		imageQualifiersTests->addChild(qualifierGroup.release());
+	}
+
+	return imageQualifiersTests.release();
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.hpp b/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.hpp
new file mode 100644
index 0000000..7955c5a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageQualifiersTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTIMAGEQUALIFIERSTESTS_HPP
+#define _VKTIMAGEQUALIFIERSTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory qualifiers tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+tcu::TestCaseGroup* createImageQualifiersTests (tcu::TestContext& testCtx);
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGEQUALIFIERSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/image/vktImageSizeTests.cpp b/external/vulkancts/modules/vulkan/image/vktImageSizeTests.cpp
new file mode 100644
index 0000000..e704871
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageSizeTests.cpp
@@ -0,0 +1,546 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image size Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageSizeTests.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vktImageTestsUtil.hpp"
+#include "vktImageTexture.hpp"
+
+#include "vkDefs.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkImageUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+
+#include <string>
+
+using namespace vk;
+
+namespace vkt
+{
+namespace image
+{
+namespace
+{
+
+//! Get a texture based on image type and suggested size.
+Texture getTexture (const ImageType imageType, const tcu::IVec3& size)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+			return Texture(imageType, tcu::IVec3(size.x(), 1, 1), 1);
+
+		case IMAGE_TYPE_1D_ARRAY:
+			return Texture(imageType, tcu::IVec3(size.x(), 1, 1), size.y());
+
+		case IMAGE_TYPE_2D:
+			return Texture(imageType, tcu::IVec3(size.x(), size.y(), 1), 1);
+
+		case IMAGE_TYPE_2D_ARRAY:
+			return Texture(imageType, tcu::IVec3(size.x(), size.y(), 1), size.z());
+
+		case IMAGE_TYPE_CUBE:
+			return Texture(imageType, tcu::IVec3(size.x(), size.x(), 1), 6);
+
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return Texture(imageType, tcu::IVec3(size.x(), size.x(), 1), 2*6);
+
+		case IMAGE_TYPE_3D:
+			return Texture(imageType, size, 1);
+
+		default:
+			DE_FATAL("Internal error");
+			return Texture(IMAGE_TYPE_LAST, tcu::IVec3(), 0);
+	}
+}
+
+inline VkImageCreateInfo makeImageCreateInfo (const Texture& texture, const VkFormat format)
+{
+	const VkImageCreateInfo imageParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,												// VkStructureType			sType;
+		DE_NULL,																			// const void*				pNext;
+		(isCube(texture) ? (VkImageCreateFlags)VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : 0u),	// VkImageCreateFlags		flags;
+		mapImageType(texture.type()),														// VkImageType				imageType;
+		format,																				// VkFormat					format;
+		makeExtent3D(texture.layerSize()),													// VkExtent3D				extent;
+		1u,																					// deUint32					mipLevels;
+		(deUint32)texture.numLayers(),														// deUint32					arrayLayers;
+		VK_SAMPLE_COUNT_1_BIT,																// VkSampleCountFlagBits	samples;
+		VK_IMAGE_TILING_OPTIMAL,															// VkImageTiling			tiling;
+		VK_IMAGE_USAGE_STORAGE_BIT,															// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,															// VkSharingMode			sharingMode;
+		0u,																					// deUint32					queueFamilyIndexCount;
+		DE_NULL,																			// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,															// VkImageLayout			initialLayout;
+	};
+	return imageParams;
+}
+
+//! Interpret the memory as IVec3
+inline tcu::IVec3 readIVec3 (const void* const data)
+{
+	const int* const p = reinterpret_cast<const int* const>(data);
+	return tcu::IVec3(p[0], p[1], p[2]);
+}
+
+tcu::IVec3 getExpectedImageSizeResult (const Texture& texture)
+{
+	// GLSL imageSize() function returns:
+	// z = 0 for cubes
+	// z = N for cube arrays, where N is the number of cubes
+	// y or z = L where L is the number of layers for other array types (e.g. 1D array, 2D array)
+	// z = D where D is the depth of 3d image
+
+	const tcu::IVec3 size = texture.size();
+	const int numCubeFaces = 6;
+
+	switch (texture.type())
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+			return tcu::IVec3(size.x(), 0, 0);
+
+		case IMAGE_TYPE_1D_ARRAY:
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_CUBE:
+			return tcu::IVec3(size.x(), size.y(), 0);
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_3D:
+			return size;
+
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return tcu::IVec3(size.x(), size.y(), size.z() / numCubeFaces);
+
+		default:
+			DE_FATAL("Internal error");
+			return tcu::IVec3();
+	}
+}
+
+class SizeTest : public TestCase
+{
+public:
+	enum TestFlags
+	{
+		FLAG_READONLY_IMAGE		= 1u << 0,
+		FLAG_WRITEONLY_IMAGE	= 1u << 1,
+	};
+
+						SizeTest			(tcu::TestContext&	testCtx,
+											 const std::string&	name,
+											 const std::string&	description,
+											 const Texture&		texture,
+											 const VkFormat		format,
+											 const deUint32		flags = 0);
+
+	void				initPrograms		(SourceCollections& programCollection) const;
+	TestInstance*		createInstance		(Context&			context) const;
+
+private:
+	const Texture		m_texture;
+	const VkFormat		m_format;
+	const bool			m_useReadonly;
+	const bool			m_useWriteonly;
+};
+
+SizeTest::SizeTest (tcu::TestContext&		testCtx,
+					const std::string&		name,
+					const std::string&		description,
+					const Texture&			texture,
+					const VkFormat			format,
+					const deUint32			flags)
+	: TestCase			(testCtx, name, description)
+	, m_texture			(texture)
+	, m_format			(format)
+	, m_useReadonly		((flags & FLAG_READONLY_IMAGE) != 0)
+	, m_useWriteonly	((flags & FLAG_WRITEONLY_IMAGE) != 0)
+{
+	// We expect at least one flag to be set.
+	DE_ASSERT(m_useReadonly || m_useWriteonly);
+}
+
+void SizeTest::initPrograms (SourceCollections& programCollection) const
+{
+	const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_format));
+	const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_format), m_texture.type());
+	const int dimension = m_texture.dimension();
+
+	std::ostringstream accessQualifier;
+	if (m_useReadonly)
+		accessQualifier << " readonly";
+	if (m_useWriteonly)
+		accessQualifier << " writeonly";
+
+	std::ostringstream src;
+	src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
+		<< "\n"
+		<< "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+		<< "layout (binding = 0, " << formatQualifierStr << ")" << accessQualifier.str() << " uniform highp " << imageTypeStr << " u_image;\n"
+		<< "layout (binding = 1) writeonly buffer Output {\n"
+		<< "    ivec3 size;\n"
+		<< "} sb_out;\n"
+		<< "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< (dimension == 1 ?
+			"    sb_out.size = ivec3(imageSize(u_image), 0, 0);\n"
+			: dimension == 2 || m_texture.type() == IMAGE_TYPE_CUBE ?		// cubes return ivec2
+			"    sb_out.size = ivec3(imageSize(u_image), 0);\n"
+			: dimension == 3 ?												// cube arrays return ivec3
+			"    sb_out.size = imageSize(u_image);\n"
+			: "")
+		<< "}\n";
+
+	programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
+}
+
+//! Build a case name, e.g. "readonly_writeonly_32x32"
+std::string getCaseName (const Texture& texture, const deUint32 flags)
+{
+	std::ostringstream str;
+	str << ((flags & SizeTest::FLAG_READONLY_IMAGE) != 0 ? "readonly_" : "")
+		<< ((flags & SizeTest::FLAG_WRITEONLY_IMAGE) != 0 ? "writeonly_" : "");
+
+	const int numComponents = texture.dimension();
+	for (int i = 0; i < numComponents; ++i)
+		str << (i == 0 ? "" : "x") << texture.size()[i];
+
+	return str.str();
+}
+
+//! Base test instance for image and buffer tests
+class SizeTestInstance : public TestInstance
+{
+public:
+									SizeTestInstance			(Context&				context,
+																 const Texture&			texture,
+																 const VkFormat			format);
+
+	tcu::TestStatus                 iterate						(void);
+
+	virtual							~SizeTestInstance			(void) {}
+
+protected:
+	virtual VkDescriptorSetLayout	prepareDescriptors			(void) = 0;
+	virtual VkDescriptorSet         getDescriptorSet			(void) const = 0;
+	virtual void					commandBeforeCompute		(const VkCommandBuffer	cmdBuffer) = 0;
+
+	const Texture					m_texture;
+	const VkFormat					m_format;
+	const VkDeviceSize				m_resultBufferSizeBytes;
+	de::MovePtr<Buffer>				m_resultBuffer;				//!< Shader writes the output here.
+};
+
+SizeTestInstance::SizeTestInstance (Context& context, const Texture& texture, const VkFormat format)
+	: TestInstance				(context)
+	, m_texture					(texture)
+	, m_format					(format)
+	, m_resultBufferSizeBytes	(3 * sizeof(deUint32))	// ivec3 in shader
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// Create an SSBO for shader output.
+
+	m_resultBuffer = de::MovePtr<Buffer>(new Buffer(
+		vk, device, allocator,
+		makeBufferCreateInfo(m_resultBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT),
+		MemoryRequirement::HostVisible));
+}
+
+tcu::TestStatus SizeTestInstance::iterate (void)
+{
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const VkDevice			device				= m_context.getDevice();
+	const VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	// Create memory barriers.
+
+	const VkBufferMemoryBarrier shaderWriteBarrier = makeBufferMemoryBarrier(
+		VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
+		m_resultBuffer->get(), 0ull, m_resultBufferSizeBytes);
+
+	// Create the pipeline.
+
+	const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
+
+	const VkDescriptorSetLayout descriptorSetLayout = prepareDescriptors();
+	const VkDescriptorSet descriptorSet = getDescriptorSet();
+
+	const Unique<VkPipelineLayout> pipelineLayout(makePipelineLayout(vk, device, descriptorSetLayout));
+	const Unique<VkPipeline> pipeline(makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
+
+	const Unique<VkCommandPool> cmdPool(makeCommandPool(vk, device, queueFamilyIndex));
+	const Unique<VkCommandBuffer> cmdBuffer(makeCommandBuffer(vk, device, *cmdPool));
+
+	beginCommandBuffer(vk, *cmdBuffer);
+
+	vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
+
+	commandBeforeCompute(*cmdBuffer);
+	vk.cmdDispatch(*cmdBuffer, 1, 1, 1);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &shaderWriteBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+
+	endCommandBuffer(vk, *cmdBuffer);
+
+	submitCommandsAndWait(vk, device, queue, *cmdBuffer);
+
+	// Compare the result.
+
+	const Allocation& bufferAlloc = m_resultBuffer->getAllocation();
+	invalidateMappedMemoryRange(vk, device, bufferAlloc.getMemory(), bufferAlloc.getOffset(), m_resultBufferSizeBytes);
+
+	const tcu::IVec3 resultSize = readIVec3(bufferAlloc.getHostPtr());
+	const tcu::IVec3 expectedSize = getExpectedImageSizeResult(m_texture);
+
+	if (resultSize != expectedSize)
+		return tcu::TestStatus::fail("Incorrect imageSize(): expected " + de::toString(expectedSize) + " but got " + de::toString(resultSize));
+	else
+		return tcu::TestStatus::pass("Passed");
+}
+
+class ImageSizeTestInstance : public SizeTestInstance
+{
+public:
+									ImageSizeTestInstance		(Context&				context,
+																 const Texture&			texture,
+																 const VkFormat			format);
+
+protected:
+	VkDescriptorSetLayout			prepareDescriptors			(void);
+	void							commandBeforeCompute		(const VkCommandBuffer	cmdBuffer);
+
+	VkDescriptorSet                 getDescriptorSet			(void) const { return *m_descriptorSet; }
+
+	de::MovePtr<Image>				m_image;
+	Move<VkImageView>				m_imageView;
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSet>			m_descriptorSet;
+};
+
+ImageSizeTestInstance::ImageSizeTestInstance (Context& context, const Texture& texture, const VkFormat format)
+	: SizeTestInstance	(context, texture, format)
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// Create an image. Its data be uninitialized, as we're not reading from it.
+
+	m_image = de::MovePtr<Image>(new Image(vk, device, allocator, makeImageCreateInfo(m_texture, m_format), MemoryRequirement::Any));
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_texture.numLayers());
+	m_imageView = makeImageView(vk, device, m_image->get(), mapImageViewType(m_texture.type()), m_format, subresourceRange);
+}
+
+VkDescriptorSetLayout ImageSizeTestInstance::prepareDescriptors (void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+
+	const VkDescriptorImageInfo descriptorImageInfo = makeDescriptorImageInfo(DE_NULL, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
+	const VkDescriptorBufferInfo descriptorBufferInfo = makeDescriptorBufferInfo(m_resultBuffer->get(), 0ull, m_resultBufferSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfo)
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorBufferInfo)
+		.update(vk, device);
+
+	return *m_descriptorSetLayout;
+}
+
+void ImageSizeTestInstance::commandBeforeCompute (const VkCommandBuffer cmdBuffer)
+{
+	const DeviceInterface& vk = m_context.getDeviceInterface();
+
+	const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, m_texture.numLayers());
+	const VkImageMemoryBarrier barrierSetImageLayout = makeImageMemoryBarrier(
+		0u, 0u,
+		VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
+		m_image->get(), subresourceRange);
+
+	vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &barrierSetImageLayout);
+}
+
+class BufferSizeTestInstance : public SizeTestInstance
+{
+public:
+									BufferSizeTestInstance		(Context&				context,
+																 const Texture&			texture,
+																 const VkFormat			format);
+
+protected:
+	VkDescriptorSetLayout			prepareDescriptors			(void);
+
+	void							commandBeforeCompute		(const VkCommandBuffer) {}
+	VkDescriptorSet					getDescriptorSet			(void) const { return *m_descriptorSet; }
+
+	de::MovePtr<Buffer>				m_imageBuffer;
+	Move<VkBufferView>				m_bufferView;
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSet>			m_descriptorSet;
+};
+
+BufferSizeTestInstance::BufferSizeTestInstance (Context& context, const Texture& texture, const VkFormat format)
+	: SizeTestInstance	(context, texture, format)
+{
+	const DeviceInterface&	vk			= m_context.getDeviceInterface();
+	const VkDevice			device		= m_context.getDevice();
+	Allocator&				allocator	= m_context.getDefaultAllocator();
+
+	// Create a texel storage buffer. Its data be uninitialized, as we're not reading from it.
+
+	const VkDeviceSize imageSizeBytes = getImageSizeBytes(m_texture.size(), m_format);
+	m_imageBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
+		makeBufferCreateInfo(imageSizeBytes, VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT), MemoryRequirement::Any));
+
+	m_bufferView = makeBufferView(vk, device, m_imageBuffer->get(), m_format, 0ull, imageSizeBytes);
+}
+
+VkDescriptorSetLayout BufferSizeTestInstance::prepareDescriptors (void)
+{
+	const DeviceInterface&	vk		= m_context.getDeviceInterface();
+	const VkDevice			device	= m_context.getDevice();
+
+	m_descriptorSetLayout = DescriptorSetLayoutBuilder()
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
+		.build(vk, device);
+
+	m_descriptorPool = DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER)
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
+		.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
+
+	const VkDescriptorBufferInfo descriptorBufferInfo = makeDescriptorBufferInfo(m_resultBuffer->get(), 0ull, m_resultBufferSizeBytes);
+
+	DescriptorSetUpdateBuilder()
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, &m_bufferView.get())
+		.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorBufferInfo)
+		.update(vk, device);
+
+	return *m_descriptorSetLayout;
+}
+
+TestInstance* SizeTest::createInstance (Context& context) const
+{
+	if (m_texture.type() == IMAGE_TYPE_BUFFER)
+		return new BufferSizeTestInstance(context, m_texture, m_format);
+	else
+		return new ImageSizeTestInstance(context, m_texture, m_format);
+}
+
+static const ImageType s_imageTypes[] =
+{
+	IMAGE_TYPE_1D,
+	IMAGE_TYPE_1D_ARRAY,
+	IMAGE_TYPE_2D,
+	IMAGE_TYPE_2D_ARRAY,
+	IMAGE_TYPE_3D,
+	IMAGE_TYPE_CUBE,
+	IMAGE_TYPE_CUBE_ARRAY,
+	IMAGE_TYPE_BUFFER,
+};
+
+//! Base sizes used to generate actual image/buffer sizes in the test.
+static const tcu::IVec3 s_baseImageSizes[] =
+{
+	tcu::IVec3(32, 32, 32),
+	tcu::IVec3(12, 34, 56),
+	tcu::IVec3(1,   1,  1),
+	tcu::IVec3(7,   1,  1),
+};
+
+static const deUint32 s_flags[] =
+{
+	SizeTest::FLAG_READONLY_IMAGE,
+	SizeTest::FLAG_WRITEONLY_IMAGE,
+	SizeTest::FLAG_READONLY_IMAGE | SizeTest::FLAG_WRITEONLY_IMAGE,
+};
+
+} // anonymous ns
+
+tcu::TestCaseGroup* createImageSizeTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "image_size", "imageSize() cases"));
+
+	const VkFormat format = VK_FORMAT_R32G32B32A32_SFLOAT;
+
+	for (int imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(s_imageTypes); ++imageTypeNdx)
+	{
+		de::MovePtr<tcu::TestCaseGroup> imageGroup(new tcu::TestCaseGroup(testCtx, getImageTypeName(s_imageTypes[imageTypeNdx]).c_str(), ""));
+
+		for (int flagNdx = 0; flagNdx < DE_LENGTH_OF_ARRAY(s_flags); ++flagNdx)
+		for (int imageSizeNdx = 0; imageSizeNdx < DE_LENGTH_OF_ARRAY(s_baseImageSizes); ++imageSizeNdx)
+		{
+			const Texture texture = getTexture(s_imageTypes[imageTypeNdx], s_baseImageSizes[imageSizeNdx]);
+			imageGroup->addChild(new SizeTest(testCtx, getCaseName(texture, s_flags[flagNdx]), "", texture, format, s_flags[flagNdx]));
+		}
+
+		testGroup->addChild(imageGroup.release());
+	}
+	return testGroup.release();
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageSizeTests.hpp b/external/vulkancts/modules/vulkan/image/vktImageSizeTests.hpp
new file mode 100644
index 0000000..18d35fd
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageSizeTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTIMAGESIZETESTS_HPP
+#define _VKTIMAGESIZETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image size tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+tcu::TestCaseGroup* createImageSizeTests	(tcu::TestContext& testCtx);
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGESIZETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTests.cpp b/external/vulkancts/modules/vulkan/image/vktImageTests.cpp
new file mode 100644
index 0000000..1c364de
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTests.cpp
@@ -0,0 +1,68 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageTests.hpp"
+#include "vktImageLoadStoreTests.hpp"
+#include "vktImageQualifiersTests.hpp"
+#include "vktImageSizeTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* imageTests)
+{
+	tcu::TestContext&	testCtx		= imageTests->getTestContext();
+
+	imageTests->addChild(createImageStoreTests(testCtx));
+	imageTests->addChild(createImageLoadStoreTests(testCtx));
+	imageTests->addChild(createImageFormatReinterpretTests(testCtx));
+	imageTests->addChild(createImageQualifiersTests(testCtx));
+	imageTests->addChild(createImageSizeTests(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "image", "Image tests", createChildren);
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTests.hpp b/external/vulkancts/modules/vulkan/image/vktImageTests.hpp
new file mode 100644
index 0000000..0be04ee
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTIMAGETESTS_HPP
+#define _VKTIMAGETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx);
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.cpp b/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.cpp
new file mode 100644
index 0000000..add8b64
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.cpp
@@ -0,0 +1,448 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests Utility Classes
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageTestsUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "tcuTextureUtil.hpp"
+
+using namespace vk;
+
+namespace vkt
+{
+namespace image
+{
+
+Buffer::Buffer (const DeviceInterface&		vk,
+				const VkDevice				device,
+				Allocator&					allocator,
+				const VkBufferCreateInfo&	bufferCreateInfo,
+				const MemoryRequirement		memoryRequirement)
+{
+	m_buffer = createBuffer(vk, device, &bufferCreateInfo);
+	m_allocation = allocator.allocate(getBufferMemoryRequirements(vk, device, *m_buffer), memoryRequirement);
+	VK_CHECK(vk.bindBufferMemory(device, *m_buffer, m_allocation->getMemory(), m_allocation->getOffset()));
+}
+
+Image::Image (const DeviceInterface&	vk,
+			  const VkDevice			device,
+			  Allocator&				allocator,
+			  const VkImageCreateInfo&	imageCreateInfo,
+			  const MemoryRequirement	memoryRequirement)
+{
+	m_image = createImage(vk, device, &imageCreateInfo);
+	m_allocation = allocator.allocate(getImageMemoryRequirements(vk, device, *m_image), memoryRequirement);
+	VK_CHECK(vk.bindImageMemory(device, *m_image, m_allocation->getMemory(), m_allocation->getOffset()));
+}
+
+VkBufferCreateInfo makeBufferCreateInfo (const VkDeviceSize			bufferSize,
+										 const VkBufferUsageFlags	usage)
+{
+	const VkBufferCreateInfo bufferCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u,										// VkBufferCreateFlags	flags;
+		bufferSize,								// VkDeviceSize			size;
+		usage,									// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+		0u,										// deUint32				queueFamilyIndexCount;
+		DE_NULL,								// const deUint32*		pQueueFamilyIndices;
+	};
+	return bufferCreateInfo;
+}
+
+VkBufferImageCopy makeBufferImageCopy (const VkExtent3D extent,
+									   const deUint32	arraySize)
+{
+	const VkBufferImageCopy copyParams =
+	{
+		0ull,																		//	VkDeviceSize				bufferOffset;
+		0u,																			//	deUint32					bufferRowLength;
+		0u,																			//	deUint32					bufferImageHeight;
+		makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, arraySize),	//	VkImageSubresourceLayers	imageSubresource;
+		makeOffset3D(0, 0, 0),														//	VkOffset3D					imageOffset;
+		extent,																		//	VkExtent3D					imageExtent;
+	};
+	return copyParams;
+}
+
+Move<VkCommandPool> makeCommandPool (const DeviceInterface& vk, const VkDevice device, const deUint32 queueFamilyIndex)
+{
+	const VkCommandPoolCreateInfo commandPoolParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,			// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,	// VkCommandPoolCreateFlags	flags;
+		queueFamilyIndex,									// deUint32					queueFamilyIndex;
+	};
+	return createCommandPool(vk, device, &commandPoolParams);
+}
+
+Move<VkCommandBuffer> makeCommandBuffer (const DeviceInterface& vk, const VkDevice device, const VkCommandPool commandPool)
+{
+	const VkCommandBufferAllocateInfo bufferAllocateParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,		// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		commandPool,										// VkCommandPool			commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,					// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	return allocateCommandBuffer(vk, device, &bufferAllocateParams);
+}
+
+Move<VkPipelineLayout> makePipelineLayout (const DeviceInterface&		vk,
+										   const VkDevice				device,
+										   const VkDescriptorSetLayout	descriptorSetLayout)
+{
+	const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkPipelineLayoutCreateFlags		flags;
+		1u,													// deUint32							setLayoutCount;
+		&descriptorSetLayout,								// const VkDescriptorSetLayout*		pSetLayouts;
+		0u,													// deUint32							pushConstantRangeCount;
+		DE_NULL,											// const VkPushConstantRange*		pPushConstantRanges;
+	};
+	return createPipelineLayout(vk, device, &pipelineLayoutParams);
+}
+
+Move<VkPipeline> makeComputePipeline (const DeviceInterface&	vk,
+									  const VkDevice			device,
+									  const VkPipelineLayout	pipelineLayout,
+									  const VkShaderModule		shaderModule)
+{
+	const VkPipelineShaderStageCreateInfo pipelineShaderStageParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+		DE_NULL,												// const void*							pNext;
+		0u,														// VkPipelineShaderStageCreateFlags		flags;
+		VK_SHADER_STAGE_COMPUTE_BIT,							// VkShaderStageFlagBits				stage;
+		shaderModule,											// VkShaderModule						module;
+		"main",													// const char*							pName;
+		DE_NULL,												// const VkSpecializationInfo*			pSpecializationInfo;
+	};
+	const VkComputePipelineCreateInfo pipelineCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkPipelineCreateFlags			flags;
+		pipelineShaderStageParams,							// VkPipelineShaderStageCreateInfo	stage;
+		pipelineLayout,										// VkPipelineLayout					layout;
+		DE_NULL,											// VkPipeline						basePipelineHandle;
+		0,													// deInt32							basePipelineIndex;
+	};
+	return createComputePipeline(vk, device, DE_NULL , &pipelineCreateInfo);
+}
+
+Move<VkBufferView> makeBufferView (const DeviceInterface&	vk,
+								   const VkDevice			vkDevice,
+								   const VkBuffer			buffer,
+								   const VkFormat			format,
+								   const VkDeviceSize		offset,
+								   const VkDeviceSize		size)
+{
+	const VkBufferViewCreateInfo bufferViewParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,	// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		0u,											// VkBufferViewCreateFlags	flags;
+		buffer,										// VkBuffer					buffer;
+		format,										// VkFormat					format;
+		offset,										// VkDeviceSize				offset;
+		size,										// VkDeviceSize				range;
+	};
+	return createBufferView(vk, vkDevice, &bufferViewParams);
+}
+
+Move<VkImageView> makeImageView (const DeviceInterface&			vk,
+								 const VkDevice					vkDevice,
+								 const VkImage					image,
+								 const VkImageViewType			imageViewType,
+								 const VkFormat					format,
+								 const VkImageSubresourceRange	subresourceRange)
+{
+	const VkImageViewCreateInfo imageViewParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkImageViewCreateFlags	flags;
+		image,											// VkImage					image;
+		imageViewType,									// VkImageViewType			viewType;
+		format,											// VkFormat					format;
+		makeComponentMappingRGBA(),						// VkComponentMapping		components;
+		subresourceRange,								// VkImageSubresourceRange	subresourceRange;
+	};
+	return createImageView(vk, vkDevice, &imageViewParams);
+}
+
+Move<VkDescriptorSet> makeDescriptorSet (const DeviceInterface&			vk,
+										 const VkDevice					device,
+										 const VkDescriptorPool			descriptorPool,
+										 const VkDescriptorSetLayout	setLayout)
+{
+	const VkDescriptorSetAllocateInfo allocateParams =
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,		// VkStructureType				sType;
+		DE_NULL,											// const void*					pNext;
+		descriptorPool,										// VkDescriptorPool				descriptorPool;
+		1u,													// deUint32						setLayoutCount;
+		&setLayout,											// const VkDescriptorSetLayout*	pSetLayouts;
+	};
+	return allocateDescriptorSet(vk, device, &allocateParams);
+}
+
+VkBufferMemoryBarrier makeBufferMemoryBarrier (const VkAccessFlags	srcAccessMask,
+											   const VkAccessFlags	dstAccessMask,
+											   const VkBuffer		buffer,
+											   const VkDeviceSize	offset,
+											   const VkDeviceSize	bufferSizeBytes)
+{
+	const VkBufferMemoryBarrier barrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		srcAccessMask,								// VkAccessFlags	srcAccessMask;
+		dstAccessMask,								// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			destQueueFamilyIndex;
+		buffer,										// VkBuffer			buffer;
+		offset,										// VkDeviceSize		offset;
+		bufferSizeBytes,							// VkDeviceSize		size;
+	};
+	return barrier;
+}
+
+VkImageMemoryBarrier makeImageMemoryBarrier	(const VkAccessFlags			srcAccessMask,
+											 const VkAccessFlags			dstAccessMask,
+											 const VkImageLayout			oldLayout,
+											 const VkImageLayout			newLayout,
+											 const VkImage					image,
+											 const VkImageSubresourceRange	subresourceRange)
+{
+	const VkImageMemoryBarrier barrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		srcAccessMask,									// VkAccessFlags			outputMask;
+		dstAccessMask,									// VkAccessFlags			inputMask;
+		oldLayout,										// VkImageLayout			oldLayout;
+		newLayout,										// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					destQueueFamilyIndex;
+		image,											// VkImage					image;
+		subresourceRange,								// VkImageSubresourceRange	subresourceRange;
+	};
+	return barrier;
+}
+
+void beginCommandBuffer (const DeviceInterface& vk, const VkCommandBuffer commandBuffer)
+{
+	const VkCommandBufferBeginInfo commandBufBeginParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		0u,												// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	VK_CHECK(vk.beginCommandBuffer(commandBuffer, &commandBufBeginParams));
+}
+void endCommandBuffer (const DeviceInterface& vk, const VkCommandBuffer commandBuffer)
+{
+	VK_CHECK(vk.endCommandBuffer(commandBuffer));
+}
+
+void submitCommandsAndWait (const DeviceInterface&	vk,
+							const VkDevice			device,
+							const VkQueue			queue,
+							const VkCommandBuffer	commandBuffer)
+{
+	const VkFenceCreateInfo	fenceParams =
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u,										// VkFenceCreateFlags	flags;
+	};
+	const Unique<VkFence> fence(createFence(vk, device, &fenceParams));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,		// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0u,									// deUint32					waitSemaphoreCount;
+		DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,									// deUint32					commandBufferCount;
+		&commandBuffer,						// const VkCommandBuffer*	pCommandBuffers;
+		0u,									// deUint32					signalSemaphoreCount;
+		DE_NULL,							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1u, &fence.get(), DE_TRUE, ~0ull));
+}
+
+VkImageType	mapImageType (const ImageType imageType)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_1D_ARRAY:
+		case IMAGE_TYPE_BUFFER:
+			return VK_IMAGE_TYPE_1D;
+
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return VK_IMAGE_TYPE_2D;
+
+		case IMAGE_TYPE_3D:
+			return VK_IMAGE_TYPE_3D;
+
+		default:
+			DE_ASSERT(false);
+			return VK_IMAGE_TYPE_LAST;
+	}
+}
+
+VkImageViewType	mapImageViewType (const ImageType imageType)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:			return VK_IMAGE_VIEW_TYPE_1D;
+		case IMAGE_TYPE_1D_ARRAY:	return VK_IMAGE_VIEW_TYPE_1D_ARRAY;
+		case IMAGE_TYPE_2D:			return VK_IMAGE_VIEW_TYPE_2D;
+		case IMAGE_TYPE_2D_ARRAY:	return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
+		case IMAGE_TYPE_3D:			return VK_IMAGE_VIEW_TYPE_3D;
+		case IMAGE_TYPE_CUBE:		return VK_IMAGE_VIEW_TYPE_CUBE;
+		case IMAGE_TYPE_CUBE_ARRAY:	return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
+
+		default:
+			DE_ASSERT(false);
+			return VK_IMAGE_VIEW_TYPE_LAST;
+	}
+}
+
+std::string getImageTypeName (const ImageType imageType)
+{
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:			return "1d";
+		case IMAGE_TYPE_1D_ARRAY:	return "1d_array";
+		case IMAGE_TYPE_2D:			return "2d";
+		case IMAGE_TYPE_2D_ARRAY:	return "2d_array";
+		case IMAGE_TYPE_3D:			return "3d";
+		case IMAGE_TYPE_CUBE:		return "cube";
+		case IMAGE_TYPE_CUBE_ARRAY:	return "cube_array";
+		case IMAGE_TYPE_BUFFER:		return "buffer";
+
+		default:
+			DE_ASSERT(false);
+			return "";
+	}
+}
+
+std::string getShaderImageType (const tcu::TextureFormat& format, const ImageType imageType)
+{
+	std::string formatPart = tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
+							 tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER   ? "i" : "";
+
+	std::string imageTypePart;
+	switch (imageType)
+	{
+		case IMAGE_TYPE_1D:			imageTypePart = "1D";			break;
+		case IMAGE_TYPE_1D_ARRAY:	imageTypePart = "1DArray";		break;
+		case IMAGE_TYPE_2D:			imageTypePart = "2D";			break;
+		case IMAGE_TYPE_2D_ARRAY:	imageTypePart = "2DArray";		break;
+		case IMAGE_TYPE_3D:			imageTypePart = "3D";			break;
+		case IMAGE_TYPE_CUBE:		imageTypePart = "Cube";			break;
+		case IMAGE_TYPE_CUBE_ARRAY:	imageTypePart = "CubeArray";	break;
+		case IMAGE_TYPE_BUFFER:		imageTypePart = "Buffer";		break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return formatPart + "image" + imageTypePart;
+}
+
+std::string getShaderImageFormatQualifier (const tcu::TextureFormat& format)
+{
+	const char* orderPart;
+	const char* typePart;
+
+	switch (format.order)
+	{
+		case tcu::TextureFormat::R:		orderPart = "r";	break;
+		case tcu::TextureFormat::RG:	orderPart = "rg";	break;
+		case tcu::TextureFormat::RGB:	orderPart = "rgb";	break;
+		case tcu::TextureFormat::RGBA:	orderPart = "rgba";	break;
+
+		default:
+			DE_ASSERT(false);
+			orderPart = DE_NULL;
+	}
+
+	switch (format.type)
+	{
+		case tcu::TextureFormat::FLOAT:				typePart = "32f";		break;
+		case tcu::TextureFormat::HALF_FLOAT:		typePart = "16f";		break;
+
+		case tcu::TextureFormat::UNSIGNED_INT32:	typePart = "32ui";		break;
+		case tcu::TextureFormat::UNSIGNED_INT16:	typePart = "16ui";		break;
+		case tcu::TextureFormat::UNSIGNED_INT8:		typePart = "8ui";		break;
+
+		case tcu::TextureFormat::SIGNED_INT32:		typePart = "32i";		break;
+		case tcu::TextureFormat::SIGNED_INT16:		typePart = "16i";		break;
+		case tcu::TextureFormat::SIGNED_INT8:		typePart = "8i";		break;
+
+		case tcu::TextureFormat::UNORM_INT16:		typePart = "16";		break;
+		case tcu::TextureFormat::UNORM_INT8:		typePart = "8";			break;
+
+		case tcu::TextureFormat::SNORM_INT16:		typePart = "16_snorm";	break;
+		case tcu::TextureFormat::SNORM_INT8:		typePart = "8_snorm";	break;
+
+		default:
+			DE_ASSERT(false);
+			typePart = DE_NULL;
+	}
+
+	return std::string() + orderPart + typePart;
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.hpp b/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.hpp
new file mode 100644
index 0000000..c1e1ceb
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTestsUtil.hpp
@@ -0,0 +1,209 @@
+#ifndef _VKTIMAGETESTSUTIL_HPP
+#define _VKTIMAGETESTSUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests Utility Classes
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkImageUtil.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+enum ImageType
+{
+	IMAGE_TYPE_1D = 0,
+	IMAGE_TYPE_1D_ARRAY,
+	IMAGE_TYPE_2D,
+	IMAGE_TYPE_2D_ARRAY,
+	IMAGE_TYPE_3D,
+	IMAGE_TYPE_CUBE,
+	IMAGE_TYPE_CUBE_ARRAY,
+	IMAGE_TYPE_BUFFER,
+
+	IMAGE_TYPE_LAST
+};
+
+vk::VkImageType			mapImageType					(const ImageType imageType);
+vk::VkImageViewType		mapImageViewType				(const ImageType imageType);
+std::string				getImageTypeName				(const ImageType imageType);
+std::string				getShaderImageType				(const tcu::TextureFormat& format, const ImageType imageType);
+std::string				getShaderImageFormatQualifier	(const tcu::TextureFormat& format);
+
+class Buffer
+{
+public:
+									Buffer			(const vk::DeviceInterface&		vk,
+													 const vk::VkDevice				device,
+													 vk::Allocator&					allocator,
+													 const vk::VkBufferCreateInfo&	bufferCreateInfo,
+													 const vk::MemoryRequirement	memoryRequirement);
+
+	vk::VkBuffer					get				(void) const { return *m_buffer; }
+	vk::VkBuffer					operator*		(void) const { return get(); }
+	vk::Allocation&					getAllocation	(void) const { return *m_allocation; }
+
+private:
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Move<vk::VkBuffer>			m_buffer;
+
+									Buffer			(const Buffer&);  // "deleted"
+	Buffer&							operator=		(const Buffer&);
+};
+
+class Image
+{
+public:
+									Image			(const vk::DeviceInterface&		vk,
+													 const vk::VkDevice				device,
+													 vk::Allocator&					allocator,
+													 const vk::VkImageCreateInfo&	imageCreateInfo,
+													 const vk::MemoryRequirement	memoryRequirement);
+
+	vk::VkImage						get				(void) const { return *m_image; }
+	vk::VkImage						operator*		(void) const { return get(); }
+	vk::Allocation&					getAllocation	(void) const { return *m_allocation; }
+
+private:
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Move<vk::VkImage>			m_image;
+
+									Image			(const Image&);  // "deleted"
+	Image&							operator=		(const Image&);
+};
+
+//! Dynamic size array, used to hold smart pointers like vk::Move which don't work with std::vector.
+template<typename T>
+class DynArray
+{
+public:
+				DynArray	(std::size_t size)			{ data = new T[size]; }
+				~DynArray	(void)						{ delete [] data; }
+
+	T&			operator[]	(std::size_t idx)			{ return data[idx]; }
+	const T&	operator[]	(std::size_t idx) const		{ return data[idx]; }
+
+private:
+	T* data;
+
+				DynArray	(const DynArray&);  // "deleted"
+	DynArray&	operator=	(const DynArray&);
+};
+
+vk::Move<vk::VkCommandPool>		makeCommandPool					(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const deUint32						queueFamilyIndex);
+
+vk::Move<vk::VkCommandBuffer>	makeCommandBuffer				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkCommandPool			commandPool);
+
+vk::Move<vk::VkPipelineLayout>	makePipelineLayout				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkDescriptorSetLayout	descriptorSetLayout);
+
+vk::Move<vk::VkPipeline>		makeComputePipeline				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkPipelineLayout			pipelineLayout,
+																 const vk::VkShaderModule			shaderModule);
+
+vk::Move<vk::VkBufferView>		makeBufferView					(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkBuffer					buffer,
+																 const vk::VkFormat					format,
+																 const vk::VkDeviceSize				offset,
+																 const vk::VkDeviceSize				size);
+
+vk::Move<vk::VkImageView>		makeImageView					(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkImage					image,
+																 const vk::VkImageViewType			imageViewType,
+																 const vk::VkFormat					format,
+																 const vk::VkImageSubresourceRange	subresourceRange);
+
+vk::Move<vk::VkDescriptorSet>	makeDescriptorSet				(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkDescriptorPool			descriptorPool,
+																 const vk::VkDescriptorSetLayout	setLayout);
+
+vk::VkBufferCreateInfo			makeBufferCreateInfo			(const vk::VkDeviceSize				bufferSize,
+																 const vk::VkBufferUsageFlags		usage);
+
+vk::VkBufferImageCopy			makeBufferImageCopy				(const vk::VkExtent3D				extent,
+																 const deUint32						arraySize);
+
+vk::VkBufferMemoryBarrier		makeBufferMemoryBarrier			(const vk::VkAccessFlags			srcAccessMask,
+																 const vk::VkAccessFlags			dstAccessMask,
+																 const vk::VkBuffer					buffer,
+																 const vk::VkDeviceSize				offset,
+																 const vk::VkDeviceSize				bufferSizeBytes);
+
+vk::VkImageMemoryBarrier		makeImageMemoryBarrier			(const vk::VkAccessFlags			srcAccessMask,
+																 const vk::VkAccessFlags			dstAccessMask,
+																 const vk::VkImageLayout			oldLayout,
+																 const vk::VkImageLayout			newLayout,
+																 const vk::VkImage					image,
+																 const vk::VkImageSubresourceRange	subresourceRange);
+
+void							beginCommandBuffer				(const vk::DeviceInterface&			vk,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+void							endCommandBuffer				(const vk::DeviceInterface&			vk,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+void							submitCommandsAndWait			(const vk::DeviceInterface&			vk,
+																 const vk::VkDevice					device,
+																 const vk::VkQueue					queue,
+																 const vk::VkCommandBuffer			cmdBuffer);
+
+inline vk::VkExtent3D makeExtent3D (const tcu::IVec3& vec)
+{
+	return vk::makeExtent3D(vec.x(), vec.y(), vec.z());
+}
+
+inline vk::VkDeviceSize getImageSizeBytes (const tcu::IVec3& imageSize, const vk::VkFormat format)
+{
+	return tcu::getPixelSize(vk::mapVkFormat(format)) * imageSize.x() * imageSize.y() * imageSize.z();
+}
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGETESTSUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTexture.cpp b/external/vulkancts/modules/vulkan/image/vktImageTexture.cpp
new file mode 100644
index 0000000..21e35f9
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTexture.cpp
@@ -0,0 +1,164 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Texture utility class
+ *//*--------------------------------------------------------------------*/
+
+#include "vktImageTexture.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+Texture::Texture (const ImageType type_, const tcu::IVec3& layerSize_, const int layers)
+	: m_layerSize	(layerSize_)
+	, m_type		(type_)
+	, m_numLayers	(layers)
+{
+	DE_ASSERT(m_numLayers >= 1);
+	DE_ASSERT(m_layerSize.x() >= 1 && m_layerSize.y() >= 1 && m_layerSize.z() >= 1);
+
+	switch (type_)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+			DE_ASSERT(m_numLayers == 1);
+			DE_ASSERT(m_layerSize.y() == 1 && m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_1D_ARRAY:
+			DE_ASSERT(m_layerSize.y() == 1 && m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_2D:
+			DE_ASSERT(m_numLayers == 1);
+			DE_ASSERT(m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_2D_ARRAY:
+			DE_ASSERT(m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_CUBE:
+			DE_ASSERT(m_numLayers == 6);
+			DE_ASSERT(m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_CUBE_ARRAY:
+			DE_ASSERT(m_numLayers >= 6 && m_numLayers % 6 == 0);
+			DE_ASSERT(m_layerSize.z() == 1);
+			break;
+
+		case IMAGE_TYPE_3D:
+			DE_ASSERT(m_numLayers == 1);
+			break;
+
+		default:
+			DE_FATAL("Internal error");
+			break;
+	}
+}
+
+tcu::IVec3 Texture::size (void) const
+{
+	switch (m_type)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_3D:
+			return m_layerSize;
+
+		case IMAGE_TYPE_1D_ARRAY:
+			return tcu::IVec3(m_layerSize.x(), m_numLayers, 1);
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return tcu::IVec3(m_layerSize.x(), m_layerSize.y(), m_numLayers);
+
+		default:
+			DE_FATAL("Internal error");
+			return tcu::IVec3();
+	}
+}
+
+int Texture::dimension (void) const
+{
+	switch (m_type)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+			return 1;
+
+		case IMAGE_TYPE_1D_ARRAY:
+		case IMAGE_TYPE_2D:
+			return 2;
+
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+		case IMAGE_TYPE_3D:
+			return 3;
+
+		default:
+			DE_FATAL("Internal error");
+			return 0;
+	}
+}
+
+int Texture::layerDimension (void) const
+{
+	switch (m_type)
+	{
+		case IMAGE_TYPE_1D:
+		case IMAGE_TYPE_BUFFER:
+		case IMAGE_TYPE_1D_ARRAY:
+			return 1;
+
+		case IMAGE_TYPE_2D:
+		case IMAGE_TYPE_2D_ARRAY:
+		case IMAGE_TYPE_CUBE:
+		case IMAGE_TYPE_CUBE_ARRAY:
+			return 2;
+
+		case IMAGE_TYPE_3D:
+			return 3;
+
+		default:
+			DE_FATAL("Internal error");
+			return 0;
+	}
+}
+
+} // image
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/image/vktImageTexture.hpp b/external/vulkancts/modules/vulkan/image/vktImageTexture.hpp
new file mode 100644
index 0000000..18715d2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/image/vktImageTexture.hpp
@@ -0,0 +1,73 @@
+#ifndef _VKTIMAGETEXTURE_HPP
+#define _VKTIMAGETEXTURE_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Mobica Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Texture utility class
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktImageTestsUtil.hpp"
+
+namespace vkt
+{
+namespace image
+{
+
+//! Texture buffer/image abstraction. Helps managing size and number of layers.
+class Texture
+{
+public:
+						Texture			(const ImageType type, const tcu::IVec3& layerSize, const int layers);
+
+	ImageType			type			(void) const { return m_type; }			//!< Texture type
+	tcu::IVec3			layerSize		(void) const { return m_layerSize; }	//!< Size of a single layer
+	int					numLayers		(void) const { return m_numLayers; }	//!< Number of array layers (for array and cube types)
+
+	tcu::IVec3			size			(void) const;	//!< Size including number of layers in additional dimension (e.g. z in 2d texture)
+	int					dimension		(void) const;	//!< Coordinate dimension used for addressing (e.g. 3 (x,y,z) for 2d array)
+	int					layerDimension	(void) const;	//!< Coordinate dimension used for addressing a single layer (e.g. 2 (x,y) for 2d array)
+
+private:
+	const tcu::IVec3	m_layerSize;
+	const ImageType		m_type;
+	const int			m_numLayers;
+};
+
+inline bool isCube (const Texture& texture)
+{
+	return texture.type() == IMAGE_TYPE_CUBE || texture.type() == IMAGE_TYPE_CUBE_ARRAY;
+}
+
+} // image
+} // vkt
+
+#endif // _VKTIMAGETEXTURE_HPP
diff --git a/external/vulkancts/modules/vulkan/memory/CMakeLists.txt b/external/vulkancts/modules/vulkan/memory/CMakeLists.txt
new file mode 100644
index 0000000..941fbfc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/CMakeLists.txt
@@ -0,0 +1,23 @@
+# Memory tests
+
+include_directories(..)
+
+set(DEQP_VK_MEMORY_SRCS
+	vktMemoryTests.cpp
+	vktMemoryTests.hpp
+	vktMemoryAllocationTests.cpp
+	vktMemoryAllocationTests.hpp
+	vktMemoryMappingTests.cpp
+	vktMemoryMappingTests.hpp
+	vktMemoryPipelineBarrierTests.hpp
+	vktMemoryPipelineBarrierTests.cpp
+	)
+
+set(DEQP_VK_MEMORY_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-memory STATIC ${DEQP_VK_MEMORY_SRCS})
+target_link_libraries(deqp-vk-memory ${DEQP_VK_MEMORY_LIBS})
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.cpp b/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.cpp
new file mode 100644
index 0000000..711efed
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.cpp
@@ -0,0 +1,638 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple memory allocation tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktMemoryAllocationTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+#include "tcuMaybe.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuTestLog.hpp"
+
+#include "vkPlatform.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRef.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkQueryUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deRandom.hpp"
+
+using tcu::Maybe;
+using tcu::TestLog;
+
+using std::string;
+using std::vector;
+
+using namespace vk;
+
+namespace vkt
+{
+namespace memory
+{
+namespace
+{
+enum
+{
+	// The min max for allocation count is 4096. Use 4000 to take into account
+	// possible memory allocations made by layers etc.
+	MAX_ALLOCATION_COUNT = 4000
+};
+
+struct TestConfig
+{
+	enum Order
+	{
+		ALLOC_FREE,
+		ALLOC_REVERSE_FREE,
+		MIXED_ALLOC_FREE,
+		ORDER_LAST
+	};
+
+	Maybe<VkDeviceSize>	memorySize;
+	Maybe<float>		memoryPercentage;
+	deUint32			memoryAllocationCount;
+	Order				order;
+
+	TestConfig (void)
+		: memoryAllocationCount	((deUint32)-1)
+		, order					(ORDER_LAST)
+	{
+	}
+};
+
+class AllocateFreeTestInstance : public TestInstance
+{
+public:
+						AllocateFreeTestInstance		(Context& context, const TestConfig config)
+		: TestInstance			(context)
+		, m_config				(config)
+		, m_result				(m_context.getTestContext().getLog())
+		, m_memoryTypeIndex		(0)
+		, m_memoryProperties	(getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
+	{
+		DE_ASSERT(!!m_config.memorySize != !!m_config.memoryPercentage);
+	}
+
+	tcu::TestStatus		iterate							(void);
+
+private:
+	const TestConfig						m_config;
+	tcu::ResultCollector					m_result;
+	deUint32								m_memoryTypeIndex;
+	const VkPhysicalDeviceMemoryProperties	m_memoryProperties;
+};
+
+tcu::TestStatus AllocateFreeTestInstance::iterate (void)
+{
+	TestLog&								log					= m_context.getTestContext().getLog();
+	const VkDevice							device				= m_context.getDevice();
+	const DeviceInterface&					vkd					= m_context.getDeviceInterface();
+
+	DE_ASSERT(m_config.memoryAllocationCount <= MAX_ALLOCATION_COUNT);
+
+	if (m_memoryTypeIndex == 0)
+	{
+		log << TestLog::Message << "Memory allocation count: " << m_config.memoryAllocationCount << TestLog::EndMessage;
+		log << TestLog::Message << "Single allocation size: " << (m_config.memorySize ? de::toString(*m_config.memorySize) : de::toString(100.0f * (*m_config.memoryPercentage)) + " percent of the heap size.") << TestLog::EndMessage;
+
+		if (m_config.order == TestConfig::ALLOC_REVERSE_FREE)
+			log << TestLog::Message << "Memory is freed in reversed order. " << TestLog::EndMessage;
+		else if (m_config.order == TestConfig::ALLOC_FREE)
+			log << TestLog::Message << "Memory is freed in same order as allocated. " << TestLog::EndMessage;
+		else if (m_config.order == TestConfig::MIXED_ALLOC_FREE)
+			log << TestLog::Message << "Memory is freed right after allocation. " << TestLog::EndMessage;
+		else
+			DE_FATAL("Unknown allocation order");
+	}
+
+	try
+	{
+		const VkMemoryType		memoryType		= m_memoryProperties.memoryTypes[m_memoryTypeIndex];
+		const VkMemoryHeap		memoryHeap		= m_memoryProperties.memoryHeaps[memoryType.heapIndex];
+
+		const VkDeviceSize		allocationSize	= (m_config.memorySize ? *m_config.memorySize : (VkDeviceSize)(*m_config.memoryPercentage * (float)memoryHeap.size));
+		vector<VkDeviceMemory>	memoryObjects	(m_config.memoryAllocationCount, (VkDeviceMemory)0);
+
+		log << TestLog::Message << "Memory type index: " << m_memoryTypeIndex << TestLog::EndMessage;
+
+		if (memoryType.heapIndex >= m_memoryProperties.memoryHeapCount)
+			m_result.fail("Invalid heap index defined for memory type.");
+
+		{
+			log << TestLog::Message << "Memory type: " << memoryType << TestLog::EndMessage;
+			log << TestLog::Message << "Memory heap: " << memoryHeap << TestLog::EndMessage;
+
+			if (allocationSize * m_config.memoryAllocationCount * 8 > memoryHeap.size)
+				TCU_THROW(NotSupportedError, "Memory heap doesn't have enough memory.");
+
+			try
+			{
+				if (m_config.order == TestConfig::ALLOC_FREE || m_config.order == TestConfig::ALLOC_REVERSE_FREE)
+				{
+					for (size_t ndx = 0; ndx < m_config.memoryAllocationCount; ndx++)
+					{
+						const VkMemoryAllocateInfo alloc =
+						{
+							VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
+							DE_NULL,								// pNext
+							allocationSize,							// allocationSize
+							m_memoryTypeIndex						// memoryTypeIndex;
+						};
+
+						VK_CHECK(vkd.allocateMemory(device, &alloc, (const VkAllocationCallbacks*)DE_NULL, &memoryObjects[ndx]));
+
+						TCU_CHECK(!!memoryObjects[ndx]);
+					}
+
+					if (m_config.order == TestConfig::ALLOC_FREE)
+					{
+						for (size_t ndx = 0; ndx < m_config.memoryAllocationCount; ndx++)
+						{
+							const VkDeviceMemory mem = memoryObjects[memoryObjects.size() - 1 - ndx];
+
+							vkd.freeMemory(device, mem, (const VkAllocationCallbacks*)DE_NULL);
+							memoryObjects[memoryObjects.size() - 1 - ndx] = (VkDeviceMemory)0;
+						}
+					}
+					else
+					{
+						for (size_t ndx = 0; ndx < m_config.memoryAllocationCount; ndx++)
+						{
+							const VkDeviceMemory mem = memoryObjects[ndx];
+
+							vkd.freeMemory(device, mem, (const VkAllocationCallbacks*)DE_NULL);
+							memoryObjects[ndx] = (VkDeviceMemory)0;
+						}
+					}
+				}
+				else
+				{
+					for (size_t ndx = 0; ndx < m_config.memoryAllocationCount; ndx++)
+					{
+						const VkMemoryAllocateInfo alloc =
+						{
+							VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
+							DE_NULL,								// pNext
+							allocationSize,							// allocationSize
+							m_memoryTypeIndex						// memoryTypeIndex;
+						};
+
+						VK_CHECK(vkd.allocateMemory(device, &alloc, (const VkAllocationCallbacks*)DE_NULL, &memoryObjects[ndx]));
+						TCU_CHECK(!!memoryObjects[ndx]);
+
+						vkd.freeMemory(device, memoryObjects[ndx], (const VkAllocationCallbacks*)DE_NULL);
+						memoryObjects[ndx] = (VkDeviceMemory)0;
+					}
+				}
+			}
+			catch (...)
+			{
+				for (size_t ndx = 0; ndx < m_config.memoryAllocationCount; ndx++)
+				{
+					const VkDeviceMemory mem = memoryObjects[ndx];
+
+					if (!!mem)
+					{
+						vkd.freeMemory(device, mem, (const VkAllocationCallbacks*)DE_NULL);
+						memoryObjects[ndx] = (VkDeviceMemory)0;
+					}
+				}
+
+				throw;
+			}
+		}
+	}
+	catch (const tcu::TestError& error)
+	{
+		m_result.fail(error.getMessage());
+	}
+
+	m_memoryTypeIndex++;
+
+	if (m_memoryTypeIndex < m_memoryProperties.memoryTypeCount)
+		return tcu::TestStatus::incomplete();
+	else
+		return tcu::TestStatus(m_result.getResult(), m_result.getMessage());
+}
+
+struct MemoryType
+{
+	deUint32		index;
+	VkMemoryType	type;
+};
+
+struct MemoryObject
+{
+	VkDeviceMemory	memory;
+	VkDeviceSize	size;
+};
+
+struct Heap
+{
+	VkMemoryHeap			heap;
+	VkDeviceSize			memoryUsage;
+	VkDeviceSize			maxMemoryUsage;
+	vector<MemoryType>		types;
+	vector<MemoryObject>	objects;
+};
+
+class RandomAllocFreeTestInstance : public TestInstance
+{
+public:
+						RandomAllocFreeTestInstance		(Context& context, deUint32 seed);
+						~RandomAllocFreeTestInstance	(void);
+
+	tcu::TestStatus		iterate							(void);
+
+private:
+	const size_t		m_opCount;
+	deUint32			m_memoryObjectCount;
+	size_t				m_opNdx;
+	de::Random			m_rng;
+	vector<Heap>		m_heaps;
+	vector<size_t>		m_nonFullHeaps;
+	vector<size_t>		m_nonEmptyHeaps;
+};
+
+RandomAllocFreeTestInstance::RandomAllocFreeTestInstance	(Context& context, deUint32 seed)
+	: TestInstance			(context)
+	, m_opCount				(128)
+	, m_memoryObjectCount	(0)
+	, m_opNdx				(0)
+	, m_rng					(seed)
+{
+	const VkPhysicalDevice					physicalDevice		= context.getPhysicalDevice();
+	const InstanceInterface&				vki					= context.getInstanceInterface();
+	const VkPhysicalDeviceMemoryProperties	memoryProperties	= getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+
+	TCU_CHECK(memoryProperties.memoryHeapCount <= 32);
+	TCU_CHECK(memoryProperties.memoryTypeCount <= 32);
+
+	m_heaps.resize(memoryProperties.memoryHeapCount);
+
+	m_nonFullHeaps.reserve(m_heaps.size());
+	m_nonEmptyHeaps.reserve(m_heaps.size());
+
+	for (deUint32 heapNdx = 0; heapNdx < memoryProperties.memoryHeapCount; heapNdx++)
+	{
+		m_heaps[heapNdx].heap			= memoryProperties.memoryHeaps[heapNdx];
+		m_heaps[heapNdx].memoryUsage	= 0;
+		m_heaps[heapNdx].maxMemoryUsage	= m_heaps[heapNdx].heap.size / 8;
+
+		m_heaps[heapNdx].objects.reserve(100);
+
+		m_nonFullHeaps.push_back((size_t)heapNdx);
+	}
+
+	for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < memoryProperties.memoryTypeCount; memoryTypeNdx++)
+	{
+		const MemoryType type =
+		{
+			memoryTypeNdx,
+			memoryProperties.memoryTypes[memoryTypeNdx]
+		};
+
+		TCU_CHECK(type.type.heapIndex < memoryProperties.memoryHeapCount);
+
+		m_heaps[type.type.heapIndex].types.push_back(type);
+	}
+}
+
+RandomAllocFreeTestInstance::~RandomAllocFreeTestInstance (void)
+{
+	const VkDevice							device				= m_context.getDevice();
+	const DeviceInterface&					vkd					= m_context.getDeviceInterface();
+
+	for (deUint32 heapNdx = 0; heapNdx < (deUint32)m_heaps.size(); heapNdx++)
+	{
+		const Heap&	heap	= m_heaps[heapNdx];
+
+		for (size_t objectNdx = 0; objectNdx < heap.objects.size(); objectNdx++)
+		{
+			if (!!heap.objects[objectNdx].memory)
+				vkd.freeMemory(device, heap.objects[objectNdx].memory, (const VkAllocationCallbacks*)DE_NULL);
+		}
+	}
+}
+
+tcu::TestStatus RandomAllocFreeTestInstance::iterate (void)
+{
+	const VkDevice			device			= m_context.getDevice();
+	const DeviceInterface&	vkd				= m_context.getDeviceInterface();
+	TestLog&				log				= m_context.getTestContext().getLog();
+	bool					allocateMore;
+
+	if (m_opNdx == 0)
+	{
+		log << TestLog::Message << "Performing " << m_opCount << " random VkAllocMemory() / VkFreeMemory() calls before freeing all memory." << TestLog::EndMessage;
+		log << TestLog::Message << "Using max 1/8 of the memory in each memory heap." << TestLog::EndMessage;
+	}
+
+	if (m_opNdx >= m_opCount)
+	{
+		if (m_nonEmptyHeaps.empty())
+			return tcu::TestStatus::pass("Pass");
+		else
+			allocateMore = false;
+	}
+	else if (!m_nonEmptyHeaps.empty() && !m_nonFullHeaps.empty() && (m_memoryObjectCount < MAX_ALLOCATION_COUNT))
+		allocateMore = m_rng.getBool(); // Randomize if both operations are doable.
+	else if (m_nonEmptyHeaps.empty())
+		allocateMore = true; // Allocate more if there are no objects to free.
+	else if (m_nonFullHeaps.empty())
+		allocateMore = false; // Free objects if there is no free space for new objects.
+	else
+	{
+		allocateMore = false;
+		DE_FATAL("Fail");
+	}
+
+	if (allocateMore)
+	{
+		const size_t		nonFullHeapNdx	= (size_t)(m_rng.getUint32() % (deUint32)m_nonFullHeaps.size());
+		const size_t		heapNdx			= m_nonFullHeaps[nonFullHeapNdx];
+		Heap&				heap			= m_heaps[heapNdx];
+		const MemoryType&	memoryType		= m_rng.choose<MemoryType>(heap.types.begin(), heap.types.end());
+		const VkDeviceSize	allocationSize	= 1 + (m_rng.getUint64() % (deUint64)(heap.maxMemoryUsage - heap.memoryUsage));
+
+
+		if ((allocationSize > (deUint64)(heap.maxMemoryUsage - heap.memoryUsage)) && (allocationSize != 1))
+			TCU_THROW(InternalError, "Test Error: trying to allocate memory more than the available heap size.");
+
+
+		const MemoryObject object =
+		{
+			(VkDeviceMemory)0,
+			allocationSize
+		};
+
+		heap.objects.push_back(object);
+
+		const VkMemoryAllocateInfo alloc =
+		{
+			VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
+			DE_NULL,								// pNext
+			object.size,							// allocationSize
+			memoryType.index						// memoryTypeIndex;
+		};
+
+		VK_CHECK(vkd.allocateMemory(device, &alloc, (const VkAllocationCallbacks*)DE_NULL, &heap.objects.back().memory));
+		TCU_CHECK(!!heap.objects.back().memory);
+		m_memoryObjectCount++;
+
+		// If heap was empty add to the non empty heaps.
+		if (heap.memoryUsage == 0)
+		{
+			DE_ASSERT(heap.objects.size() == 1);
+			m_nonEmptyHeaps.push_back(heapNdx);
+		}
+		else
+			DE_ASSERT(heap.objects.size() > 1);
+
+		heap.memoryUsage += allocationSize;
+
+		// If heap became full, remove from non full heaps.
+		if (heap.memoryUsage >= heap.maxMemoryUsage)
+		{
+			m_nonFullHeaps[nonFullHeapNdx] = m_nonFullHeaps.back();
+			m_nonFullHeaps.pop_back();
+		}
+	}
+	else
+	{
+		const size_t		nonEmptyHeapNdx	= (size_t)(m_rng.getUint32() % (deUint32)m_nonEmptyHeaps.size());
+		const size_t		heapNdx			= m_nonEmptyHeaps[nonEmptyHeapNdx];
+		Heap&				heap			= m_heaps[heapNdx];
+		const size_t		memoryObjectNdx	= m_rng.getUint32() % heap.objects.size();
+		MemoryObject&		memoryObject	= heap.objects[memoryObjectNdx];
+
+		vkd.freeMemory(device, memoryObject.memory, (const VkAllocationCallbacks*)DE_NULL);
+		memoryObject.memory = (VkDeviceMemory)0;
+		m_memoryObjectCount--;
+
+		if (heap.memoryUsage >= heap.maxMemoryUsage && heap.memoryUsage - memoryObject.size < heap.maxMemoryUsage)
+			m_nonFullHeaps.push_back(heapNdx);
+
+		heap.memoryUsage -= memoryObject.size;
+
+		heap.objects[memoryObjectNdx] = heap.objects.back();
+		heap.objects.pop_back();
+
+		if (heap.memoryUsage == 0)
+		{
+			DE_ASSERT(heap.objects.empty());
+
+			m_nonEmptyHeaps[nonEmptyHeapNdx] = m_nonEmptyHeaps.back();
+			m_nonEmptyHeaps.pop_back();
+		}
+		else
+			DE_ASSERT(!heap.objects.empty());
+	}
+
+	m_opNdx++;
+	return tcu::TestStatus::incomplete();
+}
+
+
+} // anonymous
+
+tcu::TestCaseGroup* createAllocationTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "allocation", "Memory allocation tests."));
+
+	const VkDeviceSize	KiB	= 1024;
+	const VkDeviceSize	MiB	= 1024 * KiB;
+
+	const struct
+	{
+		const char* const	str;
+		VkDeviceSize		size;
+	} allocationSizes[] =
+	{
+		{   "64", 64 },
+		{  "128", 128 },
+		{  "256", 256 },
+		{  "512", 512 },
+		{ "1KiB", 1*KiB },
+		{ "4KiB", 4*KiB },
+		{ "8KiB", 8*KiB },
+		{ "1MiB", 1*MiB }
+	};
+
+	const int allocationPercents[] =
+	{
+		1
+	};
+
+	const int allocationCounts[] =
+	{
+		1, 10, 100, 1000, -1
+	};
+
+	const struct
+	{
+		const char* const		str;
+		const TestConfig::Order	order;
+	} orders[] =
+	{
+		{ "forward",	TestConfig::ALLOC_FREE },
+		{ "reverse",	TestConfig::ALLOC_REVERSE_FREE },
+		{ "mixed",		TestConfig::MIXED_ALLOC_FREE }
+	};
+
+	{
+		de::MovePtr<tcu::TestCaseGroup>	basicGroup	(new tcu::TestCaseGroup(testCtx, "basic", "Basic memory allocation and free tests"));
+
+		for (size_t allocationSizeNdx = 0; allocationSizeNdx < DE_LENGTH_OF_ARRAY(allocationSizes); allocationSizeNdx++)
+		{
+			const VkDeviceSize				allocationSize		= allocationSizes[allocationSizeNdx].size;
+			const char* const				allocationSizeName	= allocationSizes[allocationSizeNdx].str;
+			de::MovePtr<tcu::TestCaseGroup>	sizeGroup			(new tcu::TestCaseGroup(testCtx, ("size_" + string(allocationSizeName)).c_str(), ("Test different allocation sizes " + de::toString(allocationSize)).c_str()));
+
+			for (size_t orderNdx = 0; orderNdx < DE_LENGTH_OF_ARRAY(orders); orderNdx++)
+			{
+				const TestConfig::Order			order				= orders[orderNdx].order;
+				const char* const				orderName			= orders[orderNdx].str;
+				const char* const				orderDescription	= orderName;
+				de::MovePtr<tcu::TestCaseGroup>	orderGroup			(new tcu::TestCaseGroup(testCtx, orderName, orderDescription));
+
+				for (size_t allocationCountNdx = 0; allocationCountNdx < DE_LENGTH_OF_ARRAY(allocationCounts); allocationCountNdx++)
+				{
+					const int allocationCount = allocationCounts[allocationCountNdx];
+
+					if (allocationCount != -1 && allocationCount * allocationSize > 50 * MiB)
+						continue;
+
+					TestConfig config;
+
+					config.memorySize				= allocationSize;
+					config.order					= order;
+
+					if (allocationCount == -1)
+					{
+						if (allocationSize < 4096)
+							continue;
+
+						config.memoryAllocationCount	= de::min((deUint32)(50 * MiB / allocationSize), (deUint32)MAX_ALLOCATION_COUNT);
+
+						if (config.memoryAllocationCount == 0
+							|| config.memoryAllocationCount == 1
+							|| config.memoryAllocationCount == 10
+							|| config.memoryAllocationCount == 100
+							|| config.memoryAllocationCount == 1000)
+						continue;
+					}
+					else
+						config.memoryAllocationCount	= allocationCount;
+
+					orderGroup->addChild(new InstanceFactory1<AllocateFreeTestInstance, TestConfig>(testCtx, tcu::NODETYPE_SELF_VALIDATE, "count_" + de::toString(config.memoryAllocationCount), "", config));
+				}
+
+				sizeGroup->addChild(orderGroup.release());
+			}
+
+			basicGroup->addChild(sizeGroup.release());
+		}
+
+		for (size_t allocationPercentNdx = 0; allocationPercentNdx < DE_LENGTH_OF_ARRAY(allocationPercents); allocationPercentNdx++)
+		{
+			const int						allocationPercent	= allocationPercents[allocationPercentNdx];
+			de::MovePtr<tcu::TestCaseGroup>	percentGroup		(new tcu::TestCaseGroup(testCtx, ("percent_" + de::toString(allocationPercent)).c_str(), ("Test different allocation percents " + de::toString(allocationPercent)).c_str()));
+
+			for (size_t orderNdx = 0; orderNdx < DE_LENGTH_OF_ARRAY(orders); orderNdx++)
+			{
+				const TestConfig::Order			order				= orders[orderNdx].order;
+				const char* const				orderName			= orders[orderNdx].str;
+				const char* const				orderDescription	= orderName;
+				de::MovePtr<tcu::TestCaseGroup>	orderGroup			(new tcu::TestCaseGroup(testCtx, orderName, orderDescription));
+
+				for (size_t allocationCountNdx = 0; allocationCountNdx < DE_LENGTH_OF_ARRAY(allocationCounts); allocationCountNdx++)
+				{
+					const int allocationCount = allocationCounts[allocationCountNdx];
+
+					if ((allocationCount != -1) && ((float)allocationCount * (float)allocationPercent >= 1.00f / 8.00f))
+						continue;
+
+					TestConfig config;
+
+					config.memoryPercentage			= (float)allocationPercent / 100.0f;
+					config.order					= order;
+
+					if (allocationCount == -1)
+					{
+						config.memoryAllocationCount	= de::min((deUint32)((1.00f / 8.00f) / ((float)allocationPercent / 100.0f)), (deUint32)MAX_ALLOCATION_COUNT);
+
+						if (config.memoryAllocationCount == 0
+							|| config.memoryAllocationCount == 1
+							|| config.memoryAllocationCount == 10
+							|| config.memoryAllocationCount == 100
+							|| config.memoryAllocationCount == 1000)
+						continue;
+					}
+					else
+						config.memoryAllocationCount	= allocationCount;
+
+					orderGroup->addChild(new InstanceFactory1<AllocateFreeTestInstance, TestConfig>(testCtx, tcu::NODETYPE_SELF_VALIDATE, "count_" + de::toString(config.memoryAllocationCount), "", config));
+				}
+
+				percentGroup->addChild(orderGroup.release());
+			}
+
+			basicGroup->addChild(percentGroup.release());
+		}
+
+		group->addChild(basicGroup.release());
+	}
+
+	{
+		const deUint32					caseCount	= 100;
+		de::MovePtr<tcu::TestCaseGroup>	randomGroup	(new tcu::TestCaseGroup(testCtx, "random", "Random memory allocation tests."));
+
+		for (deUint32 caseNdx = 0; caseNdx < caseCount; caseNdx++)
+		{
+			const deUint32 seed = deInt32Hash(caseNdx ^ 32480);
+
+			randomGroup->addChild(new InstanceFactory1<RandomAllocFreeTestInstance, deUint32>(testCtx, tcu::NODETYPE_SELF_VALIDATE, de::toString(caseNdx), "Random case", seed));
+		}
+
+		group->addChild(randomGroup.release());
+	}
+
+	return group.release();
+}
+
+} // memory
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.hpp b/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.hpp
new file mode 100644
index 0000000..7d6427f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryAllocationTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTMEMORYALLOCATIONTESTS_HPP
+#define _VKTMEMORYALLOCATIONTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple memory allocation tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace memory
+{
+
+tcu::TestCaseGroup* createAllocationTests (tcu::TestContext& testCtx);
+
+} // memory
+} // vkt
+
+#endif // _VKTMEMORYALLOCATIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.cpp b/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.cpp
new file mode 100644
index 0000000..5d23556
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.cpp
@@ -0,0 +1,999 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple memory mapping tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktMemoryMappingTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+#include "tcuMaybe.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuTestLog.hpp"
+
+#include "vkDeviceUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkStrUtil.hpp"
+
+#include "deRandom.hpp"
+#include "deSharedPtr.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include <string>
+#include <vector>
+
+using tcu::Maybe;
+using tcu::TestLog;
+
+using de::SharedPtr;
+
+using std::string;
+using std::vector;
+
+using namespace vk;
+
+namespace vkt
+{
+namespace memory
+{
+namespace
+{
+Move<VkDeviceMemory> allocMemory (const DeviceInterface& vk, VkDevice device, VkDeviceSize pAllocInfo_allocationSize, deUint32 pAllocInfo_memoryTypeIndex)
+{
+	VkDeviceMemory object = 0;
+	const VkMemoryAllocateInfo pAllocInfo =
+	{
+		VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+		DE_NULL,
+		pAllocInfo_allocationSize,
+		pAllocInfo_memoryTypeIndex,
+	};
+	VK_CHECK(vk.allocateMemory(device, &pAllocInfo, (const VkAllocationCallbacks*)DE_NULL, &object));
+	return Move<VkDeviceMemory>(check<VkDeviceMemory>(object), Deleter<VkDeviceMemory>(vk, device, (const VkAllocationCallbacks*)DE_NULL));
+}
+
+struct MemoryRange
+{
+	MemoryRange (VkDeviceSize offset_ = ~(VkDeviceSize)0, VkDeviceSize size_ = ~(VkDeviceSize)0)
+		: offset	(offset_)
+		, size		(size_)
+	{
+	}
+
+	VkDeviceSize	offset;
+	VkDeviceSize	size;
+};
+
+struct TestConfig
+{
+	TestConfig (void)
+		: allocationSize	(~(VkDeviceSize)0)
+	{
+	}
+
+	VkDeviceSize		allocationSize;
+	deUint32			seed;
+
+	MemoryRange			mapping;
+	vector<MemoryRange>	flushMappings;
+	vector<MemoryRange>	invalidateMappings;
+	bool				remap;
+};
+
+bool compareAndLogBuffer (TestLog& log, size_t size, const deUint8* result, const deUint8* reference)
+{
+	size_t	failedBytes	= 0;
+	size_t	firstFailed	= (size_t)-1;
+
+	for (size_t ndx = 0; ndx < size; ndx++)
+	{
+		if (result[ndx] != reference[ndx])
+		{
+			failedBytes++;
+
+			if (firstFailed == (size_t)-1)
+				firstFailed = ndx;
+		}
+	}
+
+	if (failedBytes > 0)
+	{
+		log << TestLog::Message << "Comparison failed. Failed bytes " << failedBytes << ". First failed at offset " << firstFailed << "." << TestLog::EndMessage;
+
+		std::ostringstream	expectedValues;
+		std::ostringstream	resultValues;
+
+		for (size_t ndx = firstFailed; ndx < firstFailed + 10 && ndx < size; ndx++)
+		{
+			if (ndx != firstFailed)
+			{
+				expectedValues << ", ";
+				resultValues << ", ";
+			}
+
+			expectedValues << reference[ndx];
+			resultValues << result[ndx];
+		}
+
+		if (firstFailed + 10 < size)
+		{
+			expectedValues << "...";
+			resultValues << "...";
+		}
+
+		log << TestLog::Message << "Expected values at offset: " << firstFailed << ", " << expectedValues.str() << TestLog::EndMessage;
+		log << TestLog::Message << "Result values at offset: " << firstFailed << ", " << resultValues.str() << TestLog::EndMessage;
+
+		return false;
+	}
+	else
+		return true;
+}
+
+tcu::TestStatus testMemoryMapping (Context& context, const TestConfig config)
+{
+	TestLog&								log					= context.getTestContext().getLog();
+	tcu::ResultCollector					result				(log);
+	const VkPhysicalDevice					physicalDevice		= context.getPhysicalDevice();
+	const VkDevice							device				= context.getDevice();
+	const InstanceInterface&				vki					= context.getInstanceInterface();
+	const DeviceInterface&					vkd					= context.getDeviceInterface();
+	const VkPhysicalDeviceMemoryProperties	memoryProperties	= getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+
+	{
+		const tcu::ScopedLogSection	section	(log, "TestCaseInfo", "TestCaseInfo");
+
+		log << TestLog::Message << "Seed: " << config.seed << TestLog::EndMessage;
+		log << TestLog::Message << "Allocation size: " << config.allocationSize << TestLog::EndMessage;
+		log << TestLog::Message << "Mapping, offset: " << config.mapping.offset << ", size: " << config.mapping.size << TestLog::EndMessage;
+
+		if (!config.flushMappings.empty())
+		{
+			log << TestLog::Message << "Invalidating following ranges:" << TestLog::EndMessage;
+
+			for (size_t ndx = 0; ndx < config.flushMappings.size(); ndx++)
+				log << TestLog::Message << "\tOffset: " << config.flushMappings[ndx].offset << ", Size: " << config.flushMappings[ndx].size << TestLog::EndMessage;
+		}
+
+		if (config.remap)
+			log << TestLog::Message << "Remapping memory between flush and invalidation." << TestLog::EndMessage;
+
+		if (!config.invalidateMappings.empty())
+		{
+			log << TestLog::Message << "Flushing following ranges:" << TestLog::EndMessage;
+
+			for (size_t ndx = 0; ndx < config.invalidateMappings.size(); ndx++)
+				log << TestLog::Message << "\tOffset: " << config.invalidateMappings[ndx].offset << ", Size: " << config.invalidateMappings[ndx].size << TestLog::EndMessage;
+		}
+	}
+
+	for (deUint32 memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
+	{
+		try
+		{
+			const tcu::ScopedLogSection		section		(log, "MemoryType" + de::toString(memoryTypeIndex), "MemoryType" + de::toString(memoryTypeIndex));
+			const VkMemoryType&				memoryType	= memoryProperties.memoryTypes[memoryTypeIndex];
+			const VkMemoryHeap&				memoryHeap	= memoryProperties.memoryHeaps[memoryType.heapIndex];
+
+			log << TestLog::Message << "MemoryType: " << memoryType << TestLog::EndMessage;
+			log << TestLog::Message << "MemoryHeap: " << memoryHeap << TestLog::EndMessage;
+
+			if ((memoryType.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
+			{
+				log << TestLog::Message << "Memory type doesn't support mapping." << TestLog::EndMessage;
+			}
+			else
+			{
+				const Unique<VkDeviceMemory>	memory				(allocMemory(vkd, device, config.allocationSize, memoryTypeIndex));
+				de::Random						rng					(config.seed);
+				vector<deUint8>					reference			((size_t)config.allocationSize);
+				deUint8*						mapping				= DE_NULL;
+
+				{
+					void* ptr;
+					VK_CHECK(vkd.mapMemory(device, *memory, config.mapping.offset, config.mapping.size, 0u, &ptr));
+					TCU_CHECK(ptr);
+
+					mapping = (deUint8*)ptr;
+				}
+
+				for (VkDeviceSize ndx = 0; ndx < config.mapping.size; ndx++)
+				{
+					const deUint8 val = rng.getUint8();
+
+					mapping[ndx]										= val;
+					reference[(size_t)(config.mapping.offset + ndx)]	= val;
+				}
+
+				if (!config.flushMappings.empty())
+				{
+					vector<VkMappedMemoryRange> ranges;
+
+					for (size_t ndx = 0; ndx < config.flushMappings.size(); ndx++)
+					{
+						const VkMappedMemoryRange range =
+						{
+							VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+							DE_NULL,
+
+							*memory,
+							config.flushMappings[ndx].offset,
+							config.flushMappings[ndx].size
+						};
+
+						ranges.push_back(range);
+					}
+
+					VK_CHECK(vkd.flushMappedMemoryRanges(device, (deUint32)ranges.size(), &ranges[0]));
+				}
+
+				if (config.remap)
+				{
+					void* ptr;
+					vkd.unmapMemory(device, *memory);
+					VK_CHECK(vkd.mapMemory(device, *memory, config.mapping.offset, config.mapping.size, 0u, &ptr));
+					TCU_CHECK(ptr);
+
+					mapping = (deUint8*)ptr;
+				}
+
+				if (!config.invalidateMappings.empty())
+				{
+					vector<VkMappedMemoryRange> ranges;
+
+					for (size_t ndx = 0; ndx < config.invalidateMappings.size(); ndx++)
+					{
+						const VkMappedMemoryRange range =
+						{
+							VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+							DE_NULL,
+
+							*memory,
+							config.invalidateMappings[ndx].offset,
+							config.invalidateMappings[ndx].size
+						};
+
+						ranges.push_back(range);
+					}
+
+					VK_CHECK(vkd.invalidateMappedMemoryRanges(device, (deUint32)ranges.size(), &ranges[0]));
+				}
+
+				if (!compareAndLogBuffer(log, (size_t)config.mapping.size, mapping, &reference[(size_t)config.mapping.offset]))
+					result.fail("Unexpected values read from mapped memory.");
+
+				vkd.unmapMemory(device, *memory);
+			}
+		}
+		catch (const tcu::TestError& error)
+		{
+			result.fail(error.getMessage());
+		}
+	}
+
+	return tcu::TestStatus(result.getResult(), result.getMessage());
+}
+
+class MemoryMapping
+{
+public:
+				MemoryMapping	(const MemoryRange&	range,
+								 void*				ptr,
+								 deUint16*			refPtr);
+
+	void		randomRead		(de::Random& rng);
+	void		randomWrite		(de::Random& rng);
+	void		randomModify	(de::Random& rng);
+
+private:
+	MemoryRange	m_range;
+	void*		m_ptr;
+	deUint16*	m_refPtr;
+};
+
+MemoryMapping::MemoryMapping (const MemoryRange&	range,
+							  void*					ptr,
+							  deUint16*				refPtr)
+	: m_range	(range)
+	, m_ptr		(ptr)
+	, m_refPtr	(refPtr)
+{
+	DE_ASSERT(range.size > 0);
+}
+
+void MemoryMapping::randomRead (de::Random& rng)
+{
+	const size_t count = (size_t)rng.getInt(0, 100);
+
+	for (size_t ndx = 0; ndx < count; ndx++)
+	{
+		const size_t	pos	= (size_t)(rng.getUint64() % (deUint64)m_range.size);
+		const deUint8	val	= ((deUint8*) m_ptr)[pos];
+
+		if (m_refPtr[pos] < 256)
+			TCU_CHECK((deUint16)val == m_refPtr[pos]);
+		else
+			m_refPtr[pos] = (deUint16)val;
+	}
+}
+
+void MemoryMapping::randomWrite (de::Random& rng)
+{
+	const size_t count = (size_t)rng.getInt(0, 100);
+
+	for (size_t ndx = 0; ndx < count; ndx++)
+	{
+		const size_t	pos	= (size_t)(rng.getUint64() % (deUint64)m_range.size);
+		const deUint8	val	= rng.getUint8();
+
+		((deUint8*)m_ptr)[pos]	= val;
+		m_refPtr[pos]			= (deUint16)val;
+	}
+}
+
+void MemoryMapping::randomModify (de::Random& rng)
+{
+	const size_t count = (size_t)rng.getInt(0, 100);
+
+	for (size_t ndx = 0; ndx < count; ndx++)
+	{
+		const size_t	pos		= (size_t)(rng.getUint64() % (deUint64)m_range.size);
+		const deUint8	val		= ((deUint8*)m_ptr)[pos];
+		const deUint8	mask	= rng.getUint8();
+
+		if (m_refPtr[pos] < 256)
+			TCU_CHECK((deUint16)val == m_refPtr[pos]);
+
+		((deUint8*)m_ptr)[pos]	= val ^ mask;
+		m_refPtr[pos]			= (deUint16)(val ^ mask);
+	}
+}
+
+void randomRanges (de::Random& rng, vector<VkMappedMemoryRange>& ranges, size_t count, VkDeviceMemory memory, VkDeviceSize maxSize)
+{
+	ranges.resize(count);
+
+	for (size_t rangeNdx = 0; rangeNdx < count; rangeNdx++)
+	{
+		const VkDeviceSize	size	= (maxSize > 1 ? (VkDeviceSize)(1 + (rng.getUint64() % (deUint64)(maxSize - 1))) : 1);
+		const VkDeviceSize	offset	= (VkDeviceSize)(rng.getUint64() % (deUint64)(maxSize - size + 1));
+
+		const VkMappedMemoryRange range =
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+			DE_NULL,
+
+			memory,
+			offset,
+			size
+		};
+		ranges[rangeNdx] = range;
+	}
+}
+
+class MemoryObject
+{
+public:
+							MemoryObject		(const DeviceInterface&		vkd,
+												 VkDevice					device,
+												 VkDeviceSize				size,
+												 deUint32					memoryTypeIndex);
+
+							~MemoryObject		(void);
+
+	MemoryMapping*			mapRandom			(const DeviceInterface& vkd, VkDevice device, de::Random& rng);
+	void					unmap				(void);
+
+	void					randomFlush			(const DeviceInterface& vkd, VkDevice device, de::Random& rng);
+	void					randomInvalidate	(const DeviceInterface& vkd, VkDevice device, de::Random& rng);
+
+	VkDeviceSize			getSize				(void) const { return m_size; }
+	MemoryMapping*			getMapping			(void) { return m_mapping; }
+
+private:
+	const DeviceInterface&	m_vkd;
+	VkDevice				m_device;
+
+	deUint32				m_memoryTypeIndex;
+	VkDeviceSize			m_size;
+
+	Move<VkDeviceMemory>	m_memory;
+
+	MemoryMapping*			m_mapping;
+	vector<deUint16>		m_reference;
+};
+
+MemoryObject::MemoryObject (const DeviceInterface&		vkd,
+							VkDevice					device,
+							VkDeviceSize				size,
+							deUint32					memoryTypeIndex)
+	: m_vkd				(vkd)
+	, m_device			(device)
+	, m_memoryTypeIndex	(memoryTypeIndex)
+	, m_size			(size)
+	, m_mapping			(DE_NULL)
+{
+	m_memory = allocMemory(m_vkd, m_device, m_size, m_memoryTypeIndex);
+	m_reference.resize((size_t)m_size, 0xFFFFu);
+}
+
+MemoryObject::~MemoryObject (void)
+{
+	delete m_mapping;
+}
+
+MemoryMapping* MemoryObject::mapRandom (const DeviceInterface& vkd, VkDevice device, de::Random& rng)
+{
+	const VkDeviceSize	size	= (m_size > 1 ? (VkDeviceSize)(1 + (rng.getUint64() % (deUint64)(m_size - 1))) : 1);
+	const VkDeviceSize	offset	= (VkDeviceSize)(rng.getUint64() % (deUint64)(m_size - size + 1));
+	void*				ptr;
+
+	DE_ASSERT(!m_mapping);
+
+	VK_CHECK(vkd.mapMemory(device, *m_memory, offset, size, 0u, &ptr));
+	TCU_CHECK(ptr);
+	m_mapping = new MemoryMapping(MemoryRange(offset, size), ptr, &(m_reference[(size_t)offset]));
+
+	return m_mapping;
+}
+
+void MemoryObject::unmap (void)
+{
+	m_vkd.unmapMemory(m_device, *m_memory);
+
+	delete m_mapping;
+	m_mapping = DE_NULL;
+}
+
+void MemoryObject::randomFlush (const DeviceInterface& vkd, VkDevice device, de::Random& rng)
+{
+	const size_t				rangeCount	= (size_t)rng.getInt(1, 10);
+	vector<VkMappedMemoryRange>	ranges		(rangeCount);
+
+	randomRanges(rng, ranges, rangeCount, *m_memory, m_size);
+
+	VK_CHECK(vkd.flushMappedMemoryRanges(device, (deUint32)ranges.size(), ranges.empty() ? DE_NULL : &ranges[0]));
+}
+
+void MemoryObject::randomInvalidate (const DeviceInterface& vkd, VkDevice device, de::Random& rng)
+{
+	const size_t				rangeCount	= (size_t)rng.getInt(1, 10);
+	vector<VkMappedMemoryRange>	ranges		(rangeCount);
+
+	randomRanges(rng, ranges, rangeCount, *m_memory, m_size);
+
+	VK_CHECK(vkd.invalidateMappedMemoryRanges(device, (deUint32)ranges.size(), ranges.empty() ? DE_NULL : &ranges[0]));
+}
+
+enum
+{
+	// Use only 1/16 of each memory heap.
+	MAX_MEMORY_USAGE_DIV = 16
+};
+
+template<typename T>
+void removeFirstEqual (vector<T>& vec, const T& val)
+{
+	for (size_t ndx = 0; ndx < vec.size(); ndx++)
+	{
+		if (vec[ndx] == val)
+		{
+			vec[ndx] = vec.back();
+			vec.pop_back();
+			return;
+		}
+	}
+}
+
+class MemoryHeap
+{
+public:
+	MemoryHeap (const VkMemoryHeap&			heap,
+				const vector<deUint32>&		memoryTypes)
+		: m_heap		(heap)
+		, m_memoryTypes	(memoryTypes)
+		, m_usage		(0)
+	{
+	}
+
+	~MemoryHeap (void)
+	{
+		for (vector<MemoryObject*>::iterator iter = m_objects.begin(); iter != m_objects.end(); ++iter)
+			delete *iter;
+	}
+
+	bool								full			(void) const { return m_usage * MAX_MEMORY_USAGE_DIV >= m_heap.size; }
+	bool								empty			(void) const { return m_usage == 0; }
+
+	MemoryObject*						allocateRandom	(const DeviceInterface& vkd, VkDevice device, de::Random& rng)
+	{
+		const VkDeviceSize		size	= 1 + (rng.getUint64() % (de::max((deInt64)((m_heap.size / MAX_MEMORY_USAGE_DIV) - m_usage - 1ull), (deInt64)1)));
+		const deUint32			type	= rng.choose<deUint32>(m_memoryTypes.begin(), m_memoryTypes.end());
+
+		if ( (size > (VkDeviceSize)((m_heap.size / MAX_MEMORY_USAGE_DIV) - m_usage)) && (size != 1))
+			TCU_THROW(InternalError, "Test Error: trying to allocate memory more than the available heap size.");
+
+		MemoryObject* const		object	= new MemoryObject(vkd, device, size, type);
+
+		m_usage += size;
+		m_objects.push_back(object);
+
+		return object;
+	}
+
+	MemoryObject*						getRandomObject	(de::Random& rng) const
+	{
+		return rng.choose<MemoryObject*>(m_objects.begin(), m_objects.end());
+	}
+
+	void								free			(MemoryObject* object)
+	{
+		removeFirstEqual(m_objects, object);
+		m_usage -= object->getSize();
+		delete object;
+	}
+
+private:
+	VkMemoryHeap			m_heap;
+	vector<deUint32>		m_memoryTypes;
+
+	VkDeviceSize			m_usage;
+	vector<MemoryObject*>	m_objects;
+};
+
+class RandomMemoryMappingInstance : public TestInstance
+{
+public:
+	RandomMemoryMappingInstance (Context& context, deUint32 seed)
+		: TestInstance	(context)
+		, m_rng			(seed)
+		, m_opNdx		(0)
+	{
+		const VkPhysicalDevice					physicalDevice		= context.getPhysicalDevice();
+		const InstanceInterface&				vki					= context.getInstanceInterface();
+		const VkPhysicalDeviceMemoryProperties	memoryProperties	= getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+
+		// Initialize heaps
+		{
+			vector<vector<deUint32> >	memoryTypes	(memoryProperties.memoryHeapCount);
+
+			for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < memoryProperties.memoryTypeCount; memoryTypeNdx++)
+			{
+				if (memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
+					memoryTypes[memoryProperties.memoryTypes[memoryTypeNdx].heapIndex].push_back(memoryTypeNdx);
+			}
+
+			for (deUint32 heapIndex = 0; heapIndex < memoryProperties.memoryHeapCount; heapIndex++)
+			{
+				const VkMemoryHeap	heapInfo	= memoryProperties.memoryHeaps[heapIndex];
+
+				if (!memoryTypes[heapIndex].empty())
+				{
+					const de::SharedPtr<MemoryHeap>	heap	(new MemoryHeap(heapInfo, memoryTypes[heapIndex]));
+
+					if (!heap->full())
+						m_nonFullHeaps.push_back(heap);
+				}
+			}
+		}
+	}
+
+	~RandomMemoryMappingInstance (void)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const size_t			opCount						= 100;
+		const float				memoryOpProbability			= 0.5f;		// 0.50
+		const float				flushInvalidateProbability	= 0.4f;		// 0.20
+		const float				mapProbability				= 0.50f;	// 0.15
+		const float				unmapProbability			= 0.25f;	// 0.075
+
+		const float				allocProbability			= 0.75f; // Versun free
+
+		const VkDevice			device						= m_context.getDevice();
+		const DeviceInterface&	vkd							= m_context.getDeviceInterface();
+
+		if (m_opNdx < opCount)
+		{
+			if (!m_memoryMappings.empty() && m_rng.getFloat() < memoryOpProbability)
+			{
+				// Perform operations on mapped memory
+				MemoryMapping* const	mapping	= m_rng.choose<MemoryMapping*>(m_memoryMappings.begin(), m_memoryMappings.end());
+
+				enum Op
+				{
+					OP_READ = 0,
+					OP_WRITE,
+					OP_MODIFY,
+					OP_LAST
+				};
+
+				const Op op = (Op)(m_rng.getUint32() % OP_LAST);
+
+				switch (op)
+				{
+					case OP_READ:
+						mapping->randomRead(m_rng);
+						break;
+
+					case OP_WRITE:
+						mapping->randomWrite(m_rng);
+						break;
+
+					case OP_MODIFY:
+						mapping->randomModify(m_rng);
+						break;
+
+					default:
+						DE_FATAL("Invalid operation");
+				}
+			}
+			else if (!m_mappedMemoryObjects.empty() && m_rng.getFloat() < flushInvalidateProbability)
+			{
+				MemoryObject* const	object	= m_rng.choose<MemoryObject*>(m_mappedMemoryObjects.begin(), m_mappedMemoryObjects.end());
+
+				if (m_rng.getBool())
+					object->randomFlush(vkd, device, m_rng);
+				else
+					object->randomInvalidate(vkd, device, m_rng);
+			}
+			else if (!m_mappedMemoryObjects.empty() && m_rng.getFloat() < unmapProbability)
+			{
+				// Unmap memory object
+				MemoryObject* const	object	= m_rng.choose<MemoryObject*>(m_mappedMemoryObjects.begin(), m_mappedMemoryObjects.end());
+
+				// Remove mapping
+				removeFirstEqual(m_memoryMappings, object->getMapping());
+
+				object->unmap();
+				removeFirstEqual(m_mappedMemoryObjects, object);
+				m_nonMappedMemoryObjects.push_back(object);
+			}
+			else if (!m_nonMappedMemoryObjects.empty() && m_rng.getFloat() < mapProbability)
+			{
+				// Map memory object
+				MemoryObject* const		object	= m_rng.choose<MemoryObject*>(m_nonMappedMemoryObjects.begin(), m_nonMappedMemoryObjects.end());
+				MemoryMapping*			mapping	= object->mapRandom(vkd, device, m_rng);
+
+				m_memoryMappings.push_back(mapping);
+				m_mappedMemoryObjects.push_back(object);
+				removeFirstEqual(m_nonMappedMemoryObjects, object);
+			}
+			else
+			{
+				if (!m_nonFullHeaps.empty() && (m_nonEmptyHeaps.empty() || m_rng.getFloat() < allocProbability))
+				{
+					// Allocate more memory objects
+					de::SharedPtr<MemoryHeap> const heap = m_rng.choose<de::SharedPtr<MemoryHeap> >(m_nonFullHeaps.begin(), m_nonFullHeaps.end());
+
+					if (heap->empty())
+						m_nonEmptyHeaps.push_back(heap);
+
+					{
+						MemoryObject* const	object = heap->allocateRandom(vkd, device, m_rng);
+
+						if (heap->full())
+							removeFirstEqual(m_nonFullHeaps, heap);
+
+						m_nonMappedMemoryObjects.push_back(object);
+					}
+				}
+				else
+				{
+					// Free memory objects
+					de::SharedPtr<MemoryHeap> const		heap	= m_rng.choose<de::SharedPtr<MemoryHeap> >(m_nonEmptyHeaps.begin(), m_nonEmptyHeaps.end());
+					MemoryObject* const					object	= heap->getRandomObject(m_rng);
+
+					// Remove mapping
+					if (object->getMapping())
+						removeFirstEqual(m_memoryMappings, object->getMapping());
+
+					removeFirstEqual(m_mappedMemoryObjects, object);
+					removeFirstEqual(m_nonMappedMemoryObjects, object);
+
+					if (heap->full())
+						m_nonFullHeaps.push_back(heap);
+
+					heap->free(object);
+
+					if (heap->empty())
+						removeFirstEqual(m_nonEmptyHeaps, heap);
+				}
+			}
+
+			m_opNdx++;
+			return tcu::TestStatus::incomplete();
+		}
+		else
+			return tcu::TestStatus::pass("Pass");
+	}
+
+private:
+	de::Random							m_rng;
+	size_t								m_opNdx;
+
+	vector<de::SharedPtr<MemoryHeap> >	m_nonEmptyHeaps;
+	vector<de::SharedPtr<MemoryHeap> >	m_nonFullHeaps;
+
+	vector<MemoryObject*>				m_mappedMemoryObjects;
+	vector<MemoryObject*>				m_nonMappedMemoryObjects;
+	vector<MemoryMapping*>				m_memoryMappings;
+};
+
+enum Op
+{
+	OP_NONE = 0,
+
+	OP_FLUSH,
+	OP_SUB_FLUSH,
+	OP_SUB_FLUSH_SEPARATE,
+	OP_SUB_FLUSH_OVERLAPPING,
+
+	OP_INVALIDATE,
+	OP_SUB_INVALIDATE,
+	OP_SUB_INVALIDATE_SEPARATE,
+	OP_SUB_INVALIDATE_OVERLAPPING,
+
+	OP_REMAP,
+
+	OP_LAST
+};
+
+TestConfig subMappedConfig (VkDeviceSize				allocationSize,
+							const MemoryRange&			mapping,
+							Op							op,
+							deUint32					seed)
+{
+	TestConfig config;
+
+	config.allocationSize	= allocationSize;
+	config.seed				= seed;
+	config.mapping			= mapping;
+	config.remap			= false;
+
+	switch (op)
+	{
+		case OP_NONE:
+			return config;
+
+		case OP_REMAP:
+			config.remap = true;
+			return config;
+
+		case OP_FLUSH:
+			config.flushMappings = vector<MemoryRange>(1, MemoryRange(0, allocationSize));
+			return config;
+
+		case OP_SUB_FLUSH:
+			DE_ASSERT(allocationSize / 4 > 0);
+
+			config.flushMappings = vector<MemoryRange>(1, MemoryRange(allocationSize / 4, allocationSize / 2));
+			return config;
+
+		case OP_SUB_FLUSH_SEPARATE:
+			DE_ASSERT(allocationSize / 2 > 0);
+
+			config.flushMappings.push_back(MemoryRange(allocationSize /  2, allocationSize - (allocationSize / 2)));
+			config.flushMappings.push_back(MemoryRange(0, allocationSize / 2));
+
+			return config;
+
+		case OP_SUB_FLUSH_OVERLAPPING:
+			DE_ASSERT((allocationSize / 3) > 0);
+
+			config.flushMappings.push_back(MemoryRange(allocationSize /  3, allocationSize - (allocationSize / 2)));
+			config.flushMappings.push_back(MemoryRange(0, (2 * allocationSize) / 3));
+
+			return config;
+
+		case OP_INVALIDATE:
+			config.invalidateMappings = vector<MemoryRange>(1, MemoryRange(0, allocationSize));
+			return config;
+
+		case OP_SUB_INVALIDATE:
+			DE_ASSERT(allocationSize / 4 > 0);
+
+			config.invalidateMappings = vector<MemoryRange>(1, MemoryRange(allocationSize / 4, allocationSize / 2));
+			return config;
+
+		case OP_SUB_INVALIDATE_SEPARATE:
+			DE_ASSERT(allocationSize / 2 > 0);
+
+			config.invalidateMappings.push_back(MemoryRange(allocationSize /  2, allocationSize - (allocationSize / 2)));
+			config.invalidateMappings.push_back(MemoryRange(0, allocationSize / 2));
+
+			return config;
+
+		case OP_SUB_INVALIDATE_OVERLAPPING:
+			DE_ASSERT((allocationSize / 3) > 0);
+
+			config.invalidateMappings.push_back(MemoryRange(allocationSize /  3, allocationSize - (allocationSize / 2)));
+			config.invalidateMappings.push_back(MemoryRange(0, (2 * allocationSize) / 3));
+
+			return config;
+
+		default:
+			DE_FATAL("Unknown Op");
+			return TestConfig();
+	}
+}
+
+TestConfig fullMappedConfig (VkDeviceSize	allocationSize,
+							 Op				op,
+							 deUint32		seed)
+{
+	return subMappedConfig(allocationSize, MemoryRange(0, allocationSize), op, seed);
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createMappingTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "mapping", "Memory mapping tests."));
+
+	const VkDeviceSize allocationSizes[] =
+	{
+		33, 257, 4087, 8095, 1*1024*1024 + 1
+	};
+
+	const VkDeviceSize offsets[] =
+	{
+		0, 17, 129, 255, 1025, 32*1024+1
+	};
+
+	const VkDeviceSize sizes[] =
+	{
+		31, 255, 1025, 4085, 1*1024*1024 - 1
+	};
+
+	const struct
+	{
+		const Op			op;
+		const char* const	name;
+	} ops[] =
+	{
+		{ OP_NONE,						"simple"					},
+		{ OP_REMAP,						"remap"						},
+		{ OP_FLUSH,						"flush"						},
+		{ OP_SUB_FLUSH,					"subflush"					},
+		{ OP_SUB_FLUSH_SEPARATE,		"subflush_separate"			},
+		{ OP_SUB_FLUSH_SEPARATE,		"subflush_overlapping"		},
+
+		{ OP_INVALIDATE,				"invalidate"				},
+		{ OP_SUB_INVALIDATE,			"subinvalidate"				},
+		{ OP_SUB_INVALIDATE_SEPARATE,	"subinvalidate_separate"	},
+		{ OP_SUB_INVALIDATE_SEPARATE,	"subinvalidate_overlapping"	}
+	};
+
+	// .full
+	{
+		de::MovePtr<tcu::TestCaseGroup> fullGroup (new tcu::TestCaseGroup(testCtx, "full", "Map memory completely."));
+
+		for (size_t allocationSizeNdx = 0; allocationSizeNdx < DE_LENGTH_OF_ARRAY(allocationSizes); allocationSizeNdx++)
+		{
+			const VkDeviceSize				allocationSize		= allocationSizes[allocationSizeNdx];
+			de::MovePtr<tcu::TestCaseGroup>	allocationSizeGroup	(new tcu::TestCaseGroup(testCtx, de::toString(allocationSize).c_str(), ""));
+
+			for (size_t opNdx = 0; opNdx < DE_LENGTH_OF_ARRAY(ops); opNdx++)
+			{
+				const Op			op		= ops[opNdx].op;
+				const char* const	name	= ops[opNdx].name;
+				const deUint32		seed	= (deUint32)(opNdx * allocationSizeNdx);
+				const TestConfig	config	= fullMappedConfig(allocationSize, op, seed);
+
+				addFunctionCase(allocationSizeGroup.get(), name, name, testMemoryMapping, config);
+			}
+
+			fullGroup->addChild(allocationSizeGroup.release());
+		}
+
+		group->addChild(fullGroup.release());
+	}
+
+	// .sub
+	{
+		de::MovePtr<tcu::TestCaseGroup> subGroup (new tcu::TestCaseGroup(testCtx, "sub", "Map part of the memory."));
+
+		for (size_t allocationSizeNdx = 0; allocationSizeNdx < DE_LENGTH_OF_ARRAY(allocationSizes); allocationSizeNdx++)
+		{
+			const VkDeviceSize				allocationSize		= allocationSizes[allocationSizeNdx];
+			de::MovePtr<tcu::TestCaseGroup>	allocationSizeGroup	(new tcu::TestCaseGroup(testCtx, de::toString(allocationSize).c_str(), ""));
+
+			for (size_t offsetNdx = 0; offsetNdx < DE_LENGTH_OF_ARRAY(offsets); offsetNdx++)
+			{
+				const VkDeviceSize				offset			= offsets[offsetNdx];
+
+				if (offset >= allocationSize)
+					continue;
+
+				de::MovePtr<tcu::TestCaseGroup>	offsetGroup		(new tcu::TestCaseGroup(testCtx, ("offset_" + de::toString(offset)).c_str(), ""));
+
+				for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
+				{
+					const VkDeviceSize				size		= sizes[sizeNdx];
+
+					if (offset + size > allocationSize)
+						continue;
+
+					if (offset == 0 && size == allocationSize)
+						continue;
+
+					de::MovePtr<tcu::TestCaseGroup>	sizeGroup	(new tcu::TestCaseGroup(testCtx, ("size_" + de::toString(size)).c_str(), ""));
+
+					for (size_t opNdx = 0; opNdx < DE_LENGTH_OF_ARRAY(ops); opNdx++)
+					{
+						const deUint32		seed	= (deUint32)(opNdx * allocationSizeNdx);
+						const Op			op		= ops[opNdx].op;
+						const char* const	name	= ops[opNdx].name;
+						const TestConfig	config	= subMappedConfig(allocationSize, MemoryRange(offset, size), op, seed);
+
+						addFunctionCase(sizeGroup.get(), name, name, testMemoryMapping, config);
+					}
+
+					offsetGroup->addChild(sizeGroup.release());
+				}
+
+				allocationSizeGroup->addChild(offsetGroup.release());
+			}
+
+			subGroup->addChild(allocationSizeGroup.release());
+		}
+
+		group->addChild(subGroup.release());
+	}
+
+	// .random
+	{
+		de::MovePtr<tcu::TestCaseGroup>	randomGroup	(new tcu::TestCaseGroup(testCtx, "random", "Random memory mapping tests."));
+		de::Random						rng			(3927960301u);
+
+		for (size_t ndx = 0; ndx < 100; ndx++)
+		{
+			const deUint32		seed	= rng.getUint32();
+			const std::string	name	= de::toString(ndx);
+
+			randomGroup->addChild(new InstanceFactory1<RandomMemoryMappingInstance, deUint32>(testCtx, tcu::NODETYPE_SELF_VALIDATE, de::toString(ndx), "Random case", seed));
+		}
+
+		group->addChild(randomGroup.release());
+	}
+
+	return group.release();
+}
+
+} // memory
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.hpp b/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.hpp
new file mode 100644
index 0000000..c4279de
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryMappingTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTMEMORYMAPPINGTESTS_HPP
+#define _VKTMEMORYMAPPINGTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Simple memory mapping tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace memory
+{
+
+tcu::TestCaseGroup* createMappingTests (tcu::TestContext& testCtx);
+
+} // memory
+} // vkt
+
+#endif // _VKTMEMORYMAPPINGTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp
new file mode 100644
index 0000000..a7a20f7
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.cpp
@@ -0,0 +1,6548 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pipeline barrier tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktMemoryPipelineBarrierTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkPlatform.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkPrograms.hpp"
+
+#include "tcuMaybe.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuTexture.hpp"
+#include "tcuImageCompare.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deRandom.hpp"
+
+#include "deMemory.h"
+#include "deMath.h"
+
+#include <map>
+#include <set>
+#include <sstream>
+#include <string>
+#include <vector>
+
+// \todo Check bufferImageGranularity
+
+using tcu::TestLog;
+using tcu::Maybe;
+
+using std::string;
+using std::vector;
+using std::map;
+using std::set;
+using std::pair;
+
+using tcu::IVec2;
+using tcu::UVec4;
+using tcu::Vec4;
+using tcu::ConstPixelBufferAccess;
+using tcu::PixelBufferAccess;
+using tcu::TextureFormat;
+using tcu::TextureLevel;
+
+namespace vkt
+{
+namespace memory
+{
+namespace
+{
+enum
+{
+	ALL_PIPELINE_STAGES = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
+						| vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT
+						| vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
+						| vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
+						| vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
+						| vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
+						| vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
+						| vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT
+						| vk::VK_PIPELINE_STAGE_TRANSFER_BIT
+						| vk::VK_PIPELINE_STAGE_HOST_BIT
+};
+
+enum
+{
+	ALL_ACCESSES = vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT
+				 | vk::VK_ACCESS_INDEX_READ_BIT
+				 | vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
+				 | vk::VK_ACCESS_UNIFORM_READ_BIT
+				 | vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
+				 | vk::VK_ACCESS_SHADER_READ_BIT
+				 | vk::VK_ACCESS_SHADER_WRITE_BIT
+				 | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
+				 | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
+				 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
+				 | vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
+				 | vk::VK_ACCESS_TRANSFER_READ_BIT
+				 | vk::VK_ACCESS_TRANSFER_WRITE_BIT
+				 | vk::VK_ACCESS_HOST_READ_BIT
+				 | vk::VK_ACCESS_HOST_WRITE_BIT
+				 | vk::VK_ACCESS_MEMORY_READ_BIT
+				 | vk::VK_ACCESS_MEMORY_WRITE_BIT
+};
+
+enum Usage
+{
+	// Mapped host read and write
+	USAGE_HOST_READ = (0x1u<<0),
+	USAGE_HOST_WRITE = (0x1u<<1),
+
+	// Copy and other transfer operations
+	USAGE_TRANSFER_SRC = (0x1u<<2),
+	USAGE_TRANSFER_DST = (0x1u<<3),
+
+	// Buffer usage flags
+	USAGE_INDEX_BUFFER = (0x1u<<4),
+	USAGE_VERTEX_BUFFER = (0x1u<<5),
+
+	USAGE_UNIFORM_BUFFER = (0x1u<<6),
+	USAGE_STORAGE_BUFFER = (0x1u<<7),
+
+	USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
+	USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
+
+	// \todo This is probably almost impossible to do
+	USAGE_INDIRECT_BUFFER = (0x1u<<10),
+
+	// Texture usage flags
+	USAGE_TEXTURE_SAMPLED = (0x1u<<11),
+	USAGE_TEXTURE_STORAGE = (0x1u<<12),
+	USAGE_COLOR_ATTACHMENT = (0x1u<<13),
+	USAGE_INPUT_ATTACHMENT = (0x1u<<14),
+	USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
+};
+
+bool supportsDeviceBufferWrites (Usage usage)
+{
+	if (usage & USAGE_TRANSFER_DST)
+		return true;
+
+	if (usage & USAGE_STORAGE_BUFFER)
+		return true;
+
+	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
+		return true;
+
+	return false;
+}
+
+bool supportsDeviceImageWrites (Usage usage)
+{
+	if (usage & USAGE_TRANSFER_DST)
+		return true;
+
+	if (usage & USAGE_TEXTURE_STORAGE)
+		return true;
+
+	if (usage & USAGE_COLOR_ATTACHMENT)
+		return true;
+
+	return false;
+}
+
+// Sequential access enums
+enum Access
+{
+    ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
+    ACCESS_INDEX_READ_BIT,
+    ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    ACCESS_UNIFORM_READ_BIT,
+    ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    ACCESS_SHADER_READ_BIT,
+    ACCESS_SHADER_WRITE_BIT,
+    ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    ACCESS_TRANSFER_READ_BIT,
+    ACCESS_TRANSFER_WRITE_BIT,
+    ACCESS_HOST_READ_BIT,
+    ACCESS_HOST_WRITE_BIT,
+    ACCESS_MEMORY_READ_BIT,
+    ACCESS_MEMORY_WRITE_BIT,
+
+    ACCESS_LAST
+};
+
+// Sequential stage enums
+enum PipelineStage
+{
+	PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
+	PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
+	PIPELINESTAGE_DRAW_INDIRECT_BIT,
+	PIPELINESTAGE_VERTEX_INPUT_BIT,
+	PIPELINESTAGE_VERTEX_SHADER_BIT,
+	PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
+	PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+	PIPELINESTAGE_GEOMETRY_SHADER_BIT,
+	PIPELINESTAGE_FRAGMENT_SHADER_BIT,
+	PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
+	PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
+	PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+	PIPELINESTAGE_COMPUTE_SHADER_BIT,
+	PIPELINESTAGE_TRANSFER_BIT,
+	PIPELINESTAGE_HOST_BIT,
+
+	PIPELINESTAGE_LAST
+};
+
+PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flags)
+{
+	switch (flags)
+	{
+		case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:						return PIPELINESTAGE_TOP_OF_PIPE_BIT;
+		case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:					return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
+		case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:					return PIPELINESTAGE_DRAW_INDIRECT_BIT;
+		case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:					return PIPELINESTAGE_VERTEX_INPUT_BIT;
+		case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:					return PIPELINESTAGE_VERTEX_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:		return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:	return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:					return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:					return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:			return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
+		case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:				return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
+		case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:			return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+		case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:					return PIPELINESTAGE_COMPUTE_SHADER_BIT;
+		case vk::VK_PIPELINE_STAGE_TRANSFER_BIT:						return PIPELINESTAGE_TRANSFER_BIT;
+		case vk::VK_PIPELINE_STAGE_HOST_BIT:							return PIPELINESTAGE_HOST_BIT;
+
+		default:
+			DE_FATAL("Unknown pipeline stage flags");
+			return PIPELINESTAGE_LAST;
+	}
+}
+
+Usage operator| (Usage a, Usage b)
+{
+	return (Usage)((deUint32)a | (deUint32)b);
+}
+
+Usage operator& (Usage a, Usage b)
+{
+	return (Usage)((deUint32)a & (deUint32)b);
+}
+
+string usageToName (Usage usage)
+{
+	const struct
+	{
+		Usage				usage;
+		const char* const	name;
+	} usageNames[] =
+	{
+		{ USAGE_HOST_READ,					"host_read" },
+		{ USAGE_HOST_WRITE,					"host_write" },
+
+		{ USAGE_TRANSFER_SRC,				"transfer_src" },
+		{ USAGE_TRANSFER_DST,				"transfer_dst" },
+
+		{ USAGE_INDEX_BUFFER,				"index_buffer" },
+		{ USAGE_VERTEX_BUFFER,				"vertex_buffer" },
+		{ USAGE_UNIFORM_BUFFER,				"uniform_buffer" },
+		{ USAGE_STORAGE_BUFFER,				"storage_buffer" },
+		{ USAGE_UNIFORM_TEXEL_BUFFER,		"uniform_texel_buffer" },
+		{ USAGE_STORAGE_TEXEL_BUFFER,		"storage_texel_buffer" },
+		{ USAGE_INDIRECT_BUFFER,			"indirect_buffer" },
+		{ USAGE_TEXTURE_SAMPLED,			"sampled_texture" },
+		{ USAGE_TEXTURE_STORAGE,			"texture_storage" },
+		{ USAGE_COLOR_ATTACHMENT,			"color_attachment" },
+		{ USAGE_INPUT_ATTACHMENT,			"input_attachment" },
+		{ USAGE_DEPTH_STENCIL_ATTACHMENT,	"depth_stencil_attachment" },
+	};
+
+	std::ostringstream	stream;
+	bool				first = true;
+
+	for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
+	{
+		if (usage & usageNames[usageNdx].usage)
+		{
+			if (!first)
+				stream << "_";
+			else
+				first = false;
+
+			stream << usageNames[usageNdx].name;
+		}
+	}
+
+	return stream.str();
+}
+
+vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
+{
+	vk::VkBufferUsageFlags flags = 0;
+
+	if (usage & USAGE_TRANSFER_SRC)
+		flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
+
+	if (usage & USAGE_TRANSFER_DST)
+		flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
+
+	if (usage & USAGE_INDEX_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
+
+	if (usage & USAGE_VERTEX_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
+
+	if (usage & USAGE_INDIRECT_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
+
+	if (usage & USAGE_UNIFORM_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
+
+	if (usage & USAGE_STORAGE_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+
+	if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
+
+	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
+		flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
+
+	return flags;
+}
+
+vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
+{
+	vk::VkImageUsageFlags flags = 0;
+
+	if (usage & USAGE_TRANSFER_SRC)
+		flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
+
+	if (usage & USAGE_TRANSFER_DST)
+		flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+
+	if (usage & USAGE_TEXTURE_SAMPLED)
+		flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
+
+	if (usage & USAGE_TEXTURE_STORAGE)
+		flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
+
+	if (usage & USAGE_COLOR_ATTACHMENT)
+		flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+
+	if (usage & USAGE_INPUT_ATTACHMENT)
+		flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
+
+	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
+		flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+	return flags;
+}
+
+vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
+{
+	vk::VkPipelineStageFlags flags = 0;
+
+	if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
+		flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
+
+	if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
+		flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
+
+	if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
+		flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
+
+	if (usage & USAGE_INDIRECT_BUFFER)
+		flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
+
+	if (usage &
+			(USAGE_UNIFORM_BUFFER
+			| USAGE_STORAGE_BUFFER
+			| USAGE_UNIFORM_TEXEL_BUFFER
+			| USAGE_STORAGE_TEXEL_BUFFER
+			| USAGE_TEXTURE_SAMPLED
+			| USAGE_TEXTURE_STORAGE))
+	{
+		flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
+				| vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
+				| vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
+				| vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
+				| vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
+				| vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
+	}
+
+	if (usage & USAGE_INPUT_ATTACHMENT)
+		flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
+
+	if (usage & USAGE_COLOR_ATTACHMENT)
+		flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
+
+	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
+	{
+		flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
+				| vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
+	}
+
+	return flags;
+}
+
+vk::VkAccessFlags usageToAccessFlags (Usage usage)
+{
+	vk::VkAccessFlags flags = 0;
+
+	if (usage & USAGE_HOST_READ)
+		flags |= vk::VK_ACCESS_HOST_READ_BIT;
+
+	if (usage & USAGE_HOST_WRITE)
+		flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
+
+	if (usage & USAGE_TRANSFER_SRC)
+		flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
+
+	if (usage & USAGE_TRANSFER_DST)
+		flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
+
+	if (usage & USAGE_INDEX_BUFFER)
+		flags |= vk::VK_ACCESS_INDEX_READ_BIT;
+
+	if (usage & USAGE_VERTEX_BUFFER)
+		flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
+
+	if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
+		flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
+
+	if (usage & (USAGE_STORAGE_BUFFER
+				| USAGE_STORAGE_TEXEL_BUFFER
+				| USAGE_TEXTURE_SAMPLED
+				| USAGE_TEXTURE_STORAGE))
+		flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
+
+	if (usage & USAGE_INDIRECT_BUFFER)
+		flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
+
+	if (usage & USAGE_COLOR_ATTACHMENT)
+		flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+
+	if (usage & USAGE_INPUT_ATTACHMENT)
+		flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
+
+	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
+		flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
+			| vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
+
+	return flags;
+}
+
+struct TestConfig
+{
+	Usage					usage;
+	vk::VkDeviceSize		size;
+	vk::VkSharingMode		sharing;
+};
+
+vk::Move<vk::VkCommandBuffer> createCommandBuffer (const vk::DeviceInterface&	vkd,
+												   vk::VkDevice					device,
+												   vk::VkCommandPool			pool,
+												   vk::VkCommandBufferLevel		level)
+{
+	const vk::VkCommandBufferAllocateInfo bufferInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+
+		pool,
+		level,
+		1u
+	};
+
+	return vk::allocateCommandBuffer(vkd, device, &bufferInfo);
+}
+
+vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface&	vkd,
+														vk::VkDevice				device,
+														vk::VkCommandPool			pool,
+														vk::VkCommandBufferLevel	level)
+{
+	const vk::VkCommandBufferInheritanceInfo	inheritInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		0,
+		0,
+		0,
+		vk::VK_FALSE,
+		0u,
+		0u
+	};
+	const vk::VkCommandBufferBeginInfo			beginInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		0u,
+		(level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
+	};
+
+	vk::Move<vk::VkCommandBuffer> commandBuffer (createCommandBuffer(vkd, device, pool, level));
+
+	vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
+
+	return commandBuffer;
+}
+
+vk::Move<vk::VkCommandPool> createCommandPool (const vk::DeviceInterface&	vkd,
+											   vk::VkDevice					device,
+											   deUint32						queueFamilyIndex)
+{
+	const vk::VkCommandPoolCreateInfo poolInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+		DE_NULL,
+
+		vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+		queueFamilyIndex,
+	};
+
+	return vk::createCommandPool(vkd, device, &poolInfo);
+}
+
+vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface&	vkd,
+									 vk::VkDevice				device,
+									 vk::VkDeviceSize			size,
+									 vk::VkBufferUsageFlags		usage,
+									 vk::VkSharingMode			sharingMode,
+									 const vector<deUint32>&	queueFamilies)
+{
+	const vk::VkBufferCreateInfo	createInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+
+		0,	// flags
+		size,
+		usage,
+		sharingMode,
+		(deUint32)queueFamilies.size(),
+		&queueFamilies[0]
+	};
+
+	return vk::createBuffer(vkd, device, &createInfo);
+}
+
+vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface&	vkd,
+										  vk::VkDevice					device,
+										  vk::VkDeviceSize				size,
+										  deUint32						memoryTypeIndex)
+{
+	const vk::VkMemoryAllocateInfo alloc =
+	{
+		vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
+		DE_NULL,									// pNext
+
+		size,
+		memoryTypeIndex
+	};
+
+	return vk::allocateMemory(vkd, device, &alloc);
+}
+
+vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface&	vki,
+											   const vk::DeviceInterface&	vkd,
+											   vk::VkPhysicalDevice			physicalDevice,
+											   vk::VkDevice					device,
+											   vk::VkBuffer					buffer,
+											   vk::VkMemoryPropertyFlags	properties)
+{
+	const vk::VkMemoryRequirements				memoryRequirements	= vk::getBufferMemoryRequirements(vkd, device, buffer);
+	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+	deUint32									memoryTypeIndex;
+
+	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
+	{
+		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
+			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
+		{
+			try
+			{
+				const vk::VkMemoryAllocateInfo	allocationInfo	=
+				{
+					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+					DE_NULL,
+					memoryRequirements.size,
+					memoryTypeIndex
+				};
+				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
+
+				VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
+
+				return memory;
+			}
+			catch (const vk::Error& error)
+			{
+				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
+					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
+				{
+					// Try next memory type/heap if out of memory
+				}
+				else
+				{
+					// Throw all other errors forward
+					throw;
+				}
+			}
+		}
+	}
+
+	TCU_FAIL("Failed to allocate memory for buffer");
+}
+
+vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface&	vki,
+											   const vk::DeviceInterface&	vkd,
+											   vk::VkPhysicalDevice			physicalDevice,
+											   vk::VkDevice					device,
+											   vk::VkImage					image,
+											   vk::VkMemoryPropertyFlags	properties)
+{
+	const vk::VkMemoryRequirements				memoryRequirements	= vk::getImageMemoryRequirements(vkd, device, image);
+	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+	deUint32									memoryTypeIndex;
+
+	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
+	{
+		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
+			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
+		{
+			try
+			{
+				const vk::VkMemoryAllocateInfo	allocationInfo	=
+				{
+					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+					DE_NULL,
+					memoryRequirements.size,
+					memoryTypeIndex
+				};
+				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
+
+				VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
+
+				return memory;
+			}
+			catch (const vk::Error& error)
+			{
+				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
+					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
+				{
+					// Try next memory type/heap if out of memory
+				}
+				else
+				{
+					// Throw all other errors forward
+					throw;
+				}
+			}
+		}
+	}
+
+	TCU_FAIL("Failed to allocate memory for image");
+}
+
+void queueRun (const vk::DeviceInterface&	vkd,
+			   vk::VkQueue					queue,
+			   vk::VkCommandBuffer			commandBuffer)
+{
+	const vk::VkSubmitInfo	submitInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+
+		0,
+		DE_NULL,
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+
+		1,
+		&commandBuffer,
+
+		0,
+		DE_NULL
+	};
+
+	VK_CHECK(vkd.queueSubmit(queue, 1, &submitInfo, 0));
+	VK_CHECK(vkd.queueWaitIdle(queue));
+}
+
+void* mapMemory (const vk::DeviceInterface&	vkd,
+				 vk::VkDevice				device,
+				 vk::VkDeviceMemory			memory,
+				 vk::VkDeviceSize			size)
+{
+	void* ptr;
+
+	VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
+
+	return ptr;
+}
+
+class ReferenceMemory
+{
+public:
+			ReferenceMemory	(size_t size);
+
+	void	set				(size_t pos, deUint8 val);
+	deUint8	get				(size_t pos) const;
+	bool	isDefined		(size_t pos) const;
+
+	void	setDefined		(size_t offset, size_t size, const void* data);
+	void	setUndefined	(size_t offset, size_t size);
+	void	setData			(size_t offset, size_t size, const void* data);
+
+	size_t	getSize			(void) const { return m_data.size(); }
+
+private:
+	vector<deUint8>		m_data;
+	vector<deUint64>	m_defined;
+};
+
+ReferenceMemory::ReferenceMemory (size_t size)
+	: m_data	(size, 0)
+	, m_defined	(size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
+{
+}
+
+void ReferenceMemory::set (size_t pos, deUint8 val)
+{
+	m_data[pos] = val;
+	m_defined[pos / 64] |= 0x1ull << (pos % 64);
+}
+
+void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
+{
+	const deUint8* data = (const deUint8*)data_;
+
+	// \todo Optimize
+	for (size_t pos = 0; pos < size; pos++)
+	{
+		m_data[offset + pos] = data[pos];
+		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
+	}
+}
+
+void ReferenceMemory::setUndefined	(size_t offset, size_t size)
+{
+	// \todo Optimize
+	for (size_t pos = 0; pos < size; pos++)
+		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
+}
+
+deUint8 ReferenceMemory::get (size_t pos) const
+{
+	DE_ASSERT(isDefined(pos));
+	return m_data[pos];
+}
+
+bool ReferenceMemory::isDefined (size_t pos) const
+{
+	return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
+}
+
+class Memory
+{
+public:
+							Memory				(const vk::InstanceInterface&	vki,
+												 const vk::DeviceInterface&		vkd,
+												 vk::VkPhysicalDevice			physicalDevice,
+												 vk::VkDevice					device,
+												 vk::VkDeviceSize				size,
+												 deUint32						memoryTypeIndex,
+												 vk::VkDeviceSize				maxBufferSize,
+												 deInt32						maxImageWidth,
+												 deInt32						maxImageHeight);
+
+	vk::VkDeviceSize		getSize				(void) const { return m_size; }
+	vk::VkDeviceSize		getMaxBufferSize	(void) const { return m_maxBufferSize; }
+	bool					getSupportBuffers	(void) const { return m_maxBufferSize > 0; }
+
+	deInt32					getMaxImageWidth	(void) const { return m_maxImageWidth; }
+	deInt32					getMaxImageHeight	(void) const { return m_maxImageHeight; }
+	bool					getSupportImages	(void) const { return m_maxImageWidth > 0; }
+
+	const vk::VkMemoryType&	getMemoryType		(void) const { return m_memoryType; }
+	deUint32				getMemoryTypeIndex	(void) const { return m_memoryTypeIndex; }
+	vk::VkDeviceMemory		getMemory			(void) const { return *m_memory; }
+
+private:
+	const vk::VkDeviceSize					m_size;
+	const deUint32							m_memoryTypeIndex;
+	const vk::VkMemoryType					m_memoryType;
+	const vk::Unique<vk::VkDeviceMemory>	m_memory;
+	const vk::VkDeviceSize					m_maxBufferSize;
+	const deInt32							m_maxImageWidth;
+	const deInt32							m_maxImageHeight;
+};
+
+vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface&	vki,
+									vk::VkPhysicalDevice			device,
+									deUint32						memoryTypeIndex)
+{
+	const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
+
+	DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
+
+	return memoryProperties.memoryTypes[memoryTypeIndex];
+}
+
+vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface&		vkd,
+									vk::VkDevice					device,
+
+									vk::VkBufferUsageFlags			usage,
+									vk::VkSharingMode				sharingMode,
+									const vector<deUint32>&			queueFamilies,
+
+									vk::VkDeviceSize				memorySize,
+									deUint32						memoryTypeIndex)
+{
+	vk::VkDeviceSize lastSuccess = 0;
+	vk::VkDeviceSize currentSize = memorySize / 2;
+
+	{
+		const vk::Unique<vk::VkBuffer>  buffer			(createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
+		const vk::VkMemoryRequirements  requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
+
+		if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
+			return memorySize;
+	}
+
+	for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
+	{
+		const vk::Unique<vk::VkBuffer>	buffer			(createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
+		const vk::VkMemoryRequirements	requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
+
+		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
+		{
+			lastSuccess = currentSize;
+			currentSize += stepSize;
+		}
+		else
+			currentSize -= stepSize;
+
+		if (stepSize == 0)
+			break;
+	}
+
+	return lastSuccess;
+}
+
+// Round size down maximum W * H * 4, where W and H < 4096
+vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
+{
+	const vk::VkDeviceSize	maxTextureSize	= 4096;
+	vk::VkDeviceSize		maxTexelCount	= size / 4;
+	vk::VkDeviceSize		bestW			= de::max(maxTexelCount, maxTextureSize);
+	vk::VkDeviceSize		bestH			= maxTexelCount / bestW;
+
+	// \todo Could probably be faster?
+	for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
+	{
+		const vk::VkDeviceSize h = maxTexelCount / w;
+
+		if (bestW * bestH < w * h)
+		{
+			bestW = w;
+			bestH = h;
+		}
+	}
+
+	return bestW * bestH * 4;
+}
+
+// Find RGBA8 image size that has exactly "size" of number of bytes.
+// "size" must be W * H * 4 where W and H < 4096
+IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
+{
+	const vk::VkDeviceSize	maxTextureSize	= 4096;
+	vk::VkDeviceSize		texelCount		= size / 4;
+
+	DE_ASSERT((size % 4) == 0);
+
+	// \todo Could probably be faster?
+	for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
+	{
+		const vk::VkDeviceSize	h	= texelCount / w;
+
+		if ((texelCount  % w) == 0 && h < maxTextureSize)
+			return IVec2((int)w, (int)h);
+	}
+
+	DE_FATAL("Invalid size");
+	return IVec2(-1, -1);
+}
+
+IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface&	vkd,
+							 vk::VkDevice				device,
+
+							 vk::VkImageUsageFlags		usage,
+							 vk::VkSharingMode			sharingMode,
+							 const vector<deUint32>&	queueFamilies,
+
+							 vk::VkDeviceSize			memorySize,
+							 deUint32					memoryTypeIndex)
+{
+	IVec2		lastSuccess		(0);
+	IVec2		currentSize;
+
+	{
+		const deUint32	texelCount	= (deUint32)(memorySize / 4);
+		const deUint32	width		= (deUint32)deFloatSqrt((float)texelCount);
+		const deUint32	height		= texelCount / width;
+
+		currentSize[0] = deMaxu32(width, height);
+		currentSize[1] = deMinu32(width, height);
+	}
+
+	for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
+	{
+		const vk::VkImageCreateInfo	createInfo		=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0u,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)currentSize[0],
+				(deUint32)currentSize[1],
+				1u,
+			},
+			1u, 1u,
+			vk::VK_SAMPLE_COUNT_1_BIT,
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			usage,
+			sharingMode,
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+		const vk::Unique<vk::VkImage>	image			(vk::createImage(vkd, device, &createInfo));
+		const vk::VkMemoryRequirements	requirements	(vk::getImageMemoryRequirements(vkd, device, *image));
+
+		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
+		{
+			lastSuccess = currentSize;
+			currentSize[0] += stepSize;
+			currentSize[1] += stepSize;
+		}
+		else
+		{
+			currentSize[0] -= stepSize;
+			currentSize[1] -= stepSize;
+		}
+
+		if (stepSize == 0)
+			break;
+	}
+
+	return lastSuccess;
+}
+
+Memory::Memory (const vk::InstanceInterface&	vki,
+				const vk::DeviceInterface&		vkd,
+				vk::VkPhysicalDevice			physicalDevice,
+				vk::VkDevice					device,
+				vk::VkDeviceSize				size,
+				deUint32						memoryTypeIndex,
+				vk::VkDeviceSize				maxBufferSize,
+				deInt32							maxImageWidth,
+				deInt32							maxImageHeight)
+	: m_size			(size)
+	, m_memoryTypeIndex	(memoryTypeIndex)
+	, m_memoryType		(getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
+	, m_memory			(allocMemory(vkd, device, size, memoryTypeIndex))
+	, m_maxBufferSize	(maxBufferSize)
+	, m_maxImageWidth	(maxImageWidth)
+	, m_maxImageHeight	(maxImageHeight)
+{
+}
+
+class Context
+{
+public:
+												Context					(const vk::InstanceInterface&						vki,
+																		 const vk::DeviceInterface&							vkd,
+																		 vk::VkPhysicalDevice								physicalDevice,
+																		 vk::VkDevice										device,
+																		 vk::VkQueue										queue,
+																		 deUint32											queueFamilyIndex,
+																		 const vector<pair<deUint32, vk::VkQueue> >&		queues,
+																		 const vk::ProgramCollection<vk::ProgramBinary>&	binaryCollection)
+		: m_vki					(vki)
+		, m_vkd					(vkd)
+		, m_physicalDevice		(physicalDevice)
+		, m_device				(device)
+		, m_queue				(queue)
+		, m_queueFamilyIndex	(queueFamilyIndex)
+		, m_queues				(queues)
+		, m_commandPool			(createCommandPool(vkd, device, queueFamilyIndex))
+		, m_binaryCollection	(binaryCollection)
+	{
+		for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
+			m_queueFamilies.push_back(m_queues[queueNdx].first);
+	}
+
+	const vk::InstanceInterface&					getInstanceInterface	(void) const { return m_vki; }
+	vk::VkPhysicalDevice							getPhysicalDevice		(void) const { return m_physicalDevice; }
+	vk::VkDevice									getDevice				(void) const { return m_device; }
+	const vk::DeviceInterface&						getDeviceInterface		(void) const { return m_vkd; }
+	vk::VkQueue										getQueue				(void) const { return m_queue; }
+	deUint32										getQueueFamily			(void) const { return m_queueFamilyIndex; }
+	const vector<pair<deUint32, vk::VkQueue> >&		getQueues				(void) const { return m_queues; }
+	const vector<deUint32>							getQueueFamilies		(void) const { return m_queueFamilies; }
+	vk::VkCommandPool								getCommandPool			(void) const { return *m_commandPool; }
+	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection		(void) const { return m_binaryCollection; }
+
+private:
+	const vk::InstanceInterface&					m_vki;
+	const vk::DeviceInterface&						m_vkd;
+	const vk::VkPhysicalDevice						m_physicalDevice;
+	const vk::VkDevice								m_device;
+	const vk::VkQueue								m_queue;
+	const deUint32									m_queueFamilyIndex;
+	const vector<pair<deUint32, vk::VkQueue> >&		m_queues;
+	const vk::Unique<vk::VkCommandPool>				m_commandPool;
+	const vk::ProgramCollection<vk::ProgramBinary>&	m_binaryCollection;
+	vector<deUint32>								m_queueFamilies;
+};
+
+class PrepareContext
+{
+public:
+							PrepareContext	(const Context&	context,
+											 const Memory&	memory)
+		: m_context	(context)
+		, m_memory	(memory)
+	{
+	}
+
+	const Memory&									getMemory				(void) const { return m_memory; }
+	const Context&									getContext				(void) const { return m_context; }
+	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection		(void) const { return m_context.getBinaryCollection(); }
+
+	void					setBuffer		(vk::Move<vk::VkBuffer>	buffer,
+											 vk::VkDeviceSize		size)
+	{
+		DE_ASSERT(!m_currentImage);
+		DE_ASSERT(!m_currentBuffer);
+
+		m_currentBuffer		= buffer;
+		m_currentBufferSize	= size;
+	}
+
+	vk::VkBuffer			getBuffer		(void) const { return *m_currentBuffer; }
+	vk::VkDeviceSize		getBufferSize	(void) const
+	{
+		DE_ASSERT(m_currentBuffer);
+		return m_currentBufferSize;
+	}
+
+	void					releaseBuffer	(void) { m_currentBuffer.disown(); }
+
+	void					setImage		(vk::Move<vk::VkImage>	image,
+											 vk::VkImageLayout		layout,
+											 vk::VkDeviceSize		memorySize,
+											 deInt32				width,
+											 deInt32				height)
+	{
+		DE_ASSERT(!m_currentImage);
+		DE_ASSERT(!m_currentBuffer);
+
+		m_currentImage				= image;
+		m_currentImageMemorySize	= memorySize;
+		m_currentImageLayout		= layout;
+		m_currentImageWidth			= width;
+		m_currentImageHeight		= height;
+	}
+
+	void				setImageLayout	(vk::VkImageLayout layout)
+	{
+		DE_ASSERT(m_currentImage);
+		m_currentImageLayout = layout;
+	}
+
+	vk::VkImage			getImage		(void) const { return *m_currentImage; }
+	deInt32				getImageWidth	(void) const
+	{
+		DE_ASSERT(m_currentImage);
+		return m_currentImageWidth;
+	}
+	deInt32				getImageHeight	(void) const
+	{
+		DE_ASSERT(m_currentImage);
+		return m_currentImageHeight;
+	}
+	vk::VkDeviceSize	getImageMemorySize	(void) const
+	{
+		DE_ASSERT(m_currentImage);
+		return m_currentImageMemorySize;
+	}
+
+	void					releaseImage	(void) { m_currentImage.disown(); }
+
+	vk::VkImageLayout		getImageLayout	(void) const
+	{
+		DE_ASSERT(m_currentImage);
+		return m_currentImageLayout;
+	}
+
+private:
+	const Context&			m_context;
+	const Memory&			m_memory;
+
+	vk::Move<vk::VkBuffer>	m_currentBuffer;
+	vk::VkDeviceSize		m_currentBufferSize;
+
+	vk::Move<vk::VkImage>	m_currentImage;
+	vk::VkDeviceSize		m_currentImageMemorySize;
+	vk::VkImageLayout		m_currentImageLayout;
+	deInt32					m_currentImageWidth;
+	deInt32					m_currentImageHeight;
+};
+
+class ExecuteContext
+{
+public:
+					ExecuteContext	(const Context&	context)
+		: m_context	(context)
+	{
+	}
+
+	const Context&	getContext		(void) const { return m_context; }
+	void			setMapping		(void* ptr) { m_mapping = ptr; }
+	void*			getMapping		(void) const { return m_mapping; }
+
+private:
+	const Context&	m_context;
+	void*			m_mapping;
+};
+
+class VerifyContext
+{
+public:
+							VerifyContext		(TestLog&				log,
+												 tcu::ResultCollector&	resultCollector,
+												 const Context&			context,
+												 vk::VkDeviceSize		size)
+		: m_log				(log)
+		, m_resultCollector	(resultCollector)
+		, m_context			(context)
+		, m_reference		((size_t)size)
+	{
+	}
+
+	const Context&			getContext			(void) const { return m_context; }
+	TestLog&				getLog				(void) const { return m_log; }
+	tcu::ResultCollector&	getResultCollector	(void) const { return m_resultCollector; }
+
+	ReferenceMemory&		getReference		(void) { return m_reference; }
+	TextureLevel&			getReferenceImage	(void) { return m_referenceImage;}
+
+private:
+	TestLog&				m_log;
+	tcu::ResultCollector&	m_resultCollector;
+	const Context&			m_context;
+	ReferenceMemory			m_reference;
+	TextureLevel			m_referenceImage;
+};
+
+class Command
+{
+public:
+	// Constructor should allocate all non-vulkan resources.
+	virtual				~Command	(void) {}
+
+	// Get name of the command
+	virtual const char*	getName		(void) const = 0;
+
+	// Log prepare operations
+	virtual void		logPrepare	(TestLog&, size_t) const {}
+	// Log executed operations
+	virtual void		logExecute	(TestLog&, size_t) const {}
+
+	// Prepare should allocate all vulkan resources and resources that require
+	// that buffer or memory has been already allocated. This should build all
+	// command buffers etc.
+	virtual void		prepare		(PrepareContext&) {}
+
+	// Execute command. Write or read mapped memory, submit commands to queue
+	// etc.
+	virtual void		execute		(ExecuteContext&) {}
+
+	// Verify that results are correct.
+	virtual void		verify		(VerifyContext&, size_t) {}
+
+protected:
+	// Allow only inheritance
+						Command		(void) {}
+
+private:
+	// Disallow copying
+						Command		(const Command&);
+	Command&			operator&	(const Command&);
+};
+
+class Map : public Command
+{
+public:
+						Map			(void) {}
+						~Map		(void) {}
+	const char*			getName		(void) const { return "Map"; }
+
+
+	void				logExecute	(TestLog& log, size_t commandIndex) const
+	{
+		log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
+	}
+
+	void				prepare		(PrepareContext& context)
+	{
+		m_memory	= context.getMemory().getMemory();
+		m_size		= context.getMemory().getSize();
+	}
+
+	void				execute		(ExecuteContext& context)
+	{
+		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+		const vk::VkDevice			device	= context.getContext().getDevice();
+
+		context.setMapping(mapMemory(vkd, device, m_memory, m_size));
+	}
+
+private:
+	vk::VkDeviceMemory	m_memory;
+	vk::VkDeviceSize	m_size;
+};
+
+class UnMap : public Command
+{
+public:
+						UnMap		(void) {}
+						~UnMap		(void) {}
+	const char*			getName		(void) const { return "UnMap"; }
+
+	void				logExecute	(TestLog& log, size_t commandIndex) const
+	{
+		log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
+	}
+
+	void				prepare		(PrepareContext& context)
+	{
+		m_memory	= context.getMemory().getMemory();
+	}
+
+	void				execute		(ExecuteContext& context)
+	{
+		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+		const vk::VkDevice			device	= context.getContext().getDevice();
+
+		vkd.unmapMemory(device, m_memory);
+		context.setMapping(DE_NULL);
+	}
+
+private:
+	vk::VkDeviceMemory	m_memory;
+};
+
+class Invalidate : public Command
+{
+public:
+						Invalidate	(void) {}
+						~Invalidate	(void) {}
+	const char*			getName		(void) const { return "Invalidate"; }
+
+	void				logExecute	(TestLog& log, size_t commandIndex) const
+	{
+		log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
+	}
+
+	void				prepare		(PrepareContext& context)
+	{
+		m_memory	= context.getMemory().getMemory();
+		m_size		= context.getMemory().getSize();
+	}
+
+	void				execute		(ExecuteContext& context)
+	{
+		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+		const vk::VkDevice			device	= context.getContext().getDevice();
+
+		vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
+	}
+
+private:
+	vk::VkDeviceMemory	m_memory;
+	vk::VkDeviceSize	m_size;
+};
+
+class Flush : public Command
+{
+public:
+						Flush		(void) {}
+						~Flush		(void) {}
+	const char*			getName		(void) const { return "Flush"; }
+
+	void				logExecute	(TestLog& log, size_t commandIndex) const
+	{
+		log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
+	}
+
+	void				prepare		(PrepareContext& context)
+	{
+		m_memory	= context.getMemory().getMemory();
+		m_size		= context.getMemory().getSize();
+	}
+
+	void				execute		(ExecuteContext& context)
+	{
+		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+		const vk::VkDevice			device	= context.getContext().getDevice();
+
+		vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
+	}
+
+private:
+	vk::VkDeviceMemory	m_memory;
+	vk::VkDeviceSize	m_size;
+};
+
+// Host memory reads and writes
+class HostMemoryAccess : public Command
+{
+public:
+					HostMemoryAccess	(bool read, bool write, deUint32 seed);
+					~HostMemoryAccess	(void) {}
+	const char*		getName				(void) const { return "HostMemoryAccess"; }
+
+	void			logExecute			(TestLog& log, size_t commandIndex) const;
+	void			prepare				(PrepareContext& context);
+	void			execute				(ExecuteContext& context);
+
+	void			verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	const bool		m_read;
+	const bool		m_write;
+	const deUint32	m_seed;
+
+	size_t			m_size;
+	vector<deUint8>	m_readData;
+};
+
+HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
+	: m_read	(read)
+	, m_write	(write)
+	, m_seed	(seed)
+{
+}
+
+void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "")  << ", seed: " << m_seed << TestLog::EndMessage;
+}
+
+void HostMemoryAccess::prepare (PrepareContext& context)
+{
+	m_size = (size_t)context.getMemory().getSize();
+
+	if (m_read)
+		m_readData.resize(m_size, 0);
+}
+
+void HostMemoryAccess::execute (ExecuteContext& context)
+{
+	de::Random		rng	(m_seed);
+	deUint8* const	ptr	= (deUint8*)context.getMapping();
+
+	if (m_read && m_write)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	mask	= rng.getUint8();
+			const deUint8	value	= ptr[pos];
+
+			m_readData[pos] = value;
+			ptr[pos] = value ^ mask;
+		}
+	}
+	else if (m_read)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	value	= ptr[pos];
+
+			m_readData[pos] = value;
+		}
+	}
+	else if (m_write)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	value	= rng.getUint8();
+
+			ptr[pos] = value;
+		}
+	}
+	else
+		DE_FATAL("Host memory access without read or write.");
+}
+
+void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&	resultCollector	= context.getResultCollector();
+	ReferenceMemory&		reference		= context.getReference();
+	de::Random				rng				(m_seed);
+
+	if (m_read && m_write)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	mask	= rng.getUint8();
+			const deUint8	value	= m_readData[pos];
+
+			if (reference.isDefined(pos))
+			{
+				if (value != reference.get(pos))
+				{
+					resultCollector.fail(
+							de::toString(commandIndex) + ":" + getName()
+							+ " Result differs from reference, Expected: "
+							+ de::toString(tcu::toHex<8>(reference.get(pos)))
+							+ ", Got: "
+							+ de::toString(tcu::toHex<8>(value))
+							+ ", At offset: "
+							+ de::toString(pos));
+					break;
+				}
+
+				reference.set(pos, reference.get(pos) ^ mask);
+			}
+		}
+	}
+	else if (m_read)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	value	= m_readData[pos];
+
+			if (reference.isDefined(pos))
+			{
+				if (value != reference.get(pos))
+				{
+					resultCollector.fail(
+							de::toString(commandIndex) + ":" + getName()
+							+ " Result differs from reference, Expected: "
+							+ de::toString(tcu::toHex<8>(reference.get(pos)))
+							+ ", Got: "
+							+ de::toString(tcu::toHex<8>(value))
+							+ ", At offset: "
+							+ de::toString(pos));
+					break;
+				}
+			}
+		}
+	}
+	else if (m_write)
+	{
+		for (size_t pos = 0; pos < m_size; pos++)
+		{
+			const deUint8	value	= rng.getUint8();
+
+			reference.set(pos, value);
+		}
+	}
+	else
+		DE_FATAL("Host memory access without read or write.");
+}
+
+class CreateBuffer : public Command
+{
+public:
+									CreateBuffer	(vk::VkBufferUsageFlags	usage,
+													 vk::VkSharingMode		sharing);
+									~CreateBuffer	(void) {}
+	const char*						getName			(void) const { return "CreateBuffer"; }
+
+	void							logPrepare		(TestLog& log, size_t commandIndex) const;
+	void							prepare			(PrepareContext& context);
+
+private:
+	const vk::VkBufferUsageFlags	m_usage;
+	const vk::VkSharingMode			m_sharing;
+};
+
+CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags	usage,
+							vk::VkSharingMode		sharing)
+	: m_usage	(usage)
+	, m_sharing	(sharing)
+{
+}
+
+void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
+}
+
+void CreateBuffer::prepare (PrepareContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device			= context.getContext().getDevice();
+	const vk::VkDeviceSize		bufferSize		= context.getMemory().getMaxBufferSize();
+	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
+
+	context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
+}
+
+class DestroyBuffer : public Command
+{
+public:
+							DestroyBuffer	(void);
+							~DestroyBuffer	(void) {}
+	const char*				getName			(void) const { return "DestroyBuffer"; }
+
+	void					logExecute		(TestLog& log, size_t commandIndex) const;
+	void					prepare			(PrepareContext& context);
+	void					execute			(ExecuteContext& context);
+
+private:
+	vk::Move<vk::VkBuffer>	m_buffer;
+};
+
+DestroyBuffer::DestroyBuffer (void)
+{
+}
+
+void DestroyBuffer::prepare (PrepareContext& context)
+{
+	m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
+	context.releaseBuffer();
+}
+
+void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
+}
+
+void DestroyBuffer::execute (ExecuteContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device			= context.getContext().getDevice();
+
+	vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
+}
+
+class BindBufferMemory : public Command
+{
+public:
+				BindBufferMemory	(void) {}
+				~BindBufferMemory	(void) {}
+	const char*	getName				(void) const { return "BindBufferMemory"; }
+
+	void		logPrepare			(TestLog& log, size_t commandIndex) const;
+	void		prepare				(PrepareContext& context);
+};
+
+void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
+}
+
+void BindBufferMemory::prepare (PrepareContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device			= context.getContext().getDevice();
+
+	VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
+}
+
+class CreateImage : public Command
+{
+public:
+									CreateImage		(vk::VkImageUsageFlags	usage,
+													 vk::VkSharingMode		sharing);
+									~CreateImage	(void) {}
+	const char*						getName			(void) const { return "CreateImage"; }
+
+	void							logPrepare		(TestLog& log, size_t commandIndex) const;
+	void							prepare			(PrepareContext& context);
+	void							verify			(VerifyContext& context, size_t commandIndex);
+
+private:
+	const vk::VkImageUsageFlags	m_usage;
+	const vk::VkSharingMode		m_sharing;
+	deInt32						m_imageWidth;
+	deInt32						m_imageHeight;
+};
+
+CreateImage::CreateImage (vk::VkImageUsageFlags	usage,
+						  vk::VkSharingMode		sharing)
+	: m_usage	(usage)
+	, m_sharing	(sharing)
+{
+}
+
+void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage)  << TestLog::EndMessage;
+}
+
+void CreateImage::prepare (PrepareContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device			= context.getContext().getDevice();
+	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth	= context.getMemory().getMaxImageWidth();
+	m_imageHeight	= context.getMemory().getMaxImageHeight();
+
+	{
+		const vk::VkImageCreateInfo	createInfo		=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0u,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u,
+			},
+			1u, 1u,
+			vk::VK_SAMPLE_COUNT_1_BIT,
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			m_usage,
+			m_sharing,
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+		vk::Move<vk::VkImage>			image			(createImage(vkd, device, &createInfo));
+		const vk::VkMemoryRequirements	requirements	= vk::getImageMemoryRequirements(vkd, device, *image);
+
+		context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
+	}
+}
+
+void CreateImage::verify (VerifyContext& context, size_t)
+{
+	context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
+}
+
+class DestroyImage : public Command
+{
+public:
+							DestroyImage	(void);
+							~DestroyImage	(void) {}
+	const char*				getName			(void) const { return "DestroyImage"; }
+
+	void					logExecute		(TestLog& log, size_t commandIndex) const;
+	void					prepare			(PrepareContext& context);
+	void					execute			(ExecuteContext& context);
+
+private:
+	vk::Move<vk::VkImage>	m_image;
+};
+
+DestroyImage::DestroyImage (void)
+{
+}
+
+void DestroyImage::prepare (PrepareContext& context)
+{
+	m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
+	context.releaseImage();
+}
+
+
+void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
+}
+
+void DestroyImage::execute (ExecuteContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device			= context.getContext().getDevice();
+
+	vkd.destroyImage(device, m_image.disown(), DE_NULL);
+}
+
+class BindImageMemory : public Command
+{
+public:
+				BindImageMemory		(void) {}
+				~BindImageMemory	(void) {}
+	const char*	getName				(void) const { return "BindImageMemory"; }
+
+	void		logPrepare			(TestLog& log, size_t commandIndex) const;
+	void		prepare				(PrepareContext& context);
+};
+
+void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
+}
+
+void BindImageMemory::prepare (PrepareContext& context)
+{
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+
+	VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
+}
+
+class QueueWaitIdle : public Command
+{
+public:
+				QueueWaitIdle	(void) {}
+				~QueueWaitIdle	(void) {}
+	const char*	getName			(void) const { return "QueuetWaitIdle"; }
+
+	void		logExecute		(TestLog& log, size_t commandIndex) const;
+	void		execute			(ExecuteContext& context);
+};
+
+void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
+}
+
+void QueueWaitIdle::execute (ExecuteContext& context)
+{
+	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+	const vk::VkQueue			queue	= context.getContext().getQueue();
+
+	VK_CHECK(vkd.queueWaitIdle(queue));
+}
+
+class DeviceWaitIdle : public Command
+{
+public:
+				DeviceWaitIdle	(void) {}
+				~DeviceWaitIdle	(void) {}
+	const char*	getName			(void) const { return "DeviceWaitIdle"; }
+
+	void		logExecute		(TestLog& log, size_t commandIndex) const;
+	void		execute			(ExecuteContext& context);
+};
+
+void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
+}
+
+void DeviceWaitIdle::execute (ExecuteContext& context)
+{
+	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device	= context.getContext().getDevice();
+
+	VK_CHECK(vkd.deviceWaitIdle(device));
+}
+
+class SubmitContext
+{
+public:
+								SubmitContext		(const PrepareContext&		context,
+													 const vk::VkCommandBuffer	commandBuffer)
+		: m_context			(context)
+		, m_commandBuffer	(commandBuffer)
+	{
+	}
+
+	const Memory&				getMemory			(void) const { return m_context.getMemory(); }
+	const Context&				getContext			(void) const { return m_context.getContext(); }
+	vk::VkCommandBuffer			getCommandBuffer	(void) const { return m_commandBuffer; }
+
+	vk::VkBuffer				getBuffer			(void) const { return m_context.getBuffer(); }
+	vk::VkDeviceSize			getBufferSize		(void) const { return m_context.getBufferSize(); }
+
+	vk::VkImage					getImage			(void) const { return m_context.getImage(); }
+	deInt32						getImageWidth		(void) const { return m_context.getImageWidth(); }
+	deInt32						getImageHeight		(void) const { return m_context.getImageHeight(); }
+	vk::VkImageLayout			getImageLayout		(void) const { return m_context.getImageLayout(); }
+
+private:
+	const PrepareContext&		m_context;
+	const vk::VkCommandBuffer	m_commandBuffer;
+};
+
+class CmdCommand
+{
+public:
+	virtual				~CmdCommand	(void) {}
+	virtual const char*	getName		(void) const = 0;
+
+	// Log things that are done during prepare
+	virtual void		logPrepare	(TestLog&, size_t) const {}
+	// Log submitted calls etc.
+	virtual void		logSubmit	(TestLog&, size_t) const {}
+
+	// Allocate vulkan resources and prepare for submit.
+	virtual void		prepare		(PrepareContext&) {}
+
+	// Submit commands to command buffer.
+	virtual void		submit		(SubmitContext&) {}
+
+	// Verify results
+	virtual void		verify		(VerifyContext&, size_t) {}
+};
+
+class SubmitCommandBuffer : public Command
+{
+public:
+					SubmitCommandBuffer		(const vector<CmdCommand*>& commands);
+					~SubmitCommandBuffer	(void);
+
+	const char*		getName					(void) const { return "SubmitCommandBuffer"; }
+	void			logExecute				(TestLog& log, size_t commandIndex) const;
+	void			logPrepare				(TestLog& log, size_t commandIndex) const;
+
+	// Allocate command buffer and submit commands to command buffer
+	void			prepare					(PrepareContext& context);
+	void			execute					(ExecuteContext& context);
+
+	// Verify that results are correct.
+	void			verify					(VerifyContext& context, size_t commandIndex);
+
+private:
+	vector<CmdCommand*>				m_commands;
+	vk::Move<vk::VkCommandBuffer>	m_commandBuffer;
+};
+
+SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
+	: m_commands	(commands)
+{
+}
+
+SubmitCommandBuffer::~SubmitCommandBuffer (void)
+{
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		delete m_commands[cmdNdx];
+}
+
+void SubmitCommandBuffer::prepare (PrepareContext& context)
+{
+	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
+	const vk::VkDevice			device		= context.getContext().getDevice();
+	const vk::VkCommandPool		commandPool	= context.getContext().getCommandPool();
+
+	m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+	{
+		CmdCommand& command = *m_commands[cmdNdx];
+
+		command.prepare(context);
+	}
+
+	{
+		SubmitContext submitContext (context, *m_commandBuffer);
+
+		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		{
+			CmdCommand& command = *m_commands[cmdNdx];
+
+			command.submit(submitContext);
+		}
+
+		VK_CHECK(vkd.endCommandBuffer(*m_commandBuffer));
+	}
+}
+
+void SubmitCommandBuffer::execute (ExecuteContext& context)
+{
+	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	cmd		= *m_commandBuffer;
+	const vk::VkQueue			queue	= context.getContext().getQueue();
+	const vk::VkSubmitInfo		submit	=
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+
+		0,
+		DE_NULL,
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+
+		1,
+		&cmd,
+
+		0,
+		DE_NULL
+	};
+
+	vkd.queueSubmit(queue, 1, &submit, 0);
+}
+
+void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
+{
+	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section		(context.getLog(), sectionName, sectionName);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		m_commands[cmdNdx]->verify(context, cmdNdx);
+}
+
+void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		m_commands[cmdNdx]->logPrepare(log, cmdNdx);
+}
+
+void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
+{
+	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		m_commands[cmdNdx]->logSubmit(log, cmdNdx);
+}
+
+class PipelineBarrier : public CmdCommand
+{
+public:
+	enum Type
+	{
+		TYPE_GLOBAL = 0,
+		TYPE_BUFFER,
+		TYPE_IMAGE,
+		TYPE_LAST
+	};
+									PipelineBarrier		(const vk::VkPipelineStageFlags	srcStages,
+														 const vk::VkAccessFlags		srcAccesses,
+														 const vk::VkPipelineStageFlags	dstStages,
+														 const vk::VkAccessFlags		dstAccesses,
+														 Type							type);
+									~PipelineBarrier	(void) {}
+	const char*						getName				(void) const { return "PipelineBarrier"; }
+
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+
+private:
+	const vk::VkPipelineStageFlags	m_srcStages;
+	const vk::VkAccessFlags			m_srcAccesses;
+	const vk::VkPipelineStageFlags	m_dstStages;
+	const vk::VkAccessFlags			m_dstAccesses;
+	const Type						m_type;
+};
+
+PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags	srcStages,
+								  const vk::VkAccessFlags			srcAccesses,
+								  const vk::VkPipelineStageFlags	dstStages,
+								  const vk::VkAccessFlags			dstAccesses,
+								  Type								type)
+	: m_srcStages	(srcStages)
+	, m_srcAccesses	(srcAccesses)
+	, m_dstStages	(dstStages)
+	, m_dstAccesses	(dstAccesses)
+	, m_type		(type)
+{
+}
+
+void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName()
+		<< " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
+					: m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
+					: "Image pipeline barrier")
+		<< ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
+		<< ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
+}
+
+void PipelineBarrier::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd	= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	cmd	= context.getCommandBuffer();
+
+	// \todo [2016-01-08 pyry] This could be cleaned up thanks to latest API changes
+
+	switch (m_type)
+	{
+		case TYPE_GLOBAL:
+		{
+			const vk::VkMemoryBarrier	barrier		=
+			{
+				vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+				DE_NULL,
+
+				m_srcAccesses,
+				m_dstAccesses
+			};
+
+			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+			break;
+		}
+
+		case TYPE_BUFFER:
+		{
+			const vk::VkBufferMemoryBarrier	barrier		=
+			{
+				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+				DE_NULL,
+
+				m_srcAccesses,
+				m_dstAccesses,
+
+				vk::VK_QUEUE_FAMILY_IGNORED,
+				vk::VK_QUEUE_FAMILY_IGNORED,
+
+				context.getBuffer(),
+				0,
+				vk::VK_WHOLE_SIZE
+			};
+
+			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+			break;
+		}
+
+		case TYPE_IMAGE:
+		{
+			const vk::VkImageMemoryBarrier	barrier		=
+			{
+				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+				DE_NULL,
+
+				m_srcAccesses,
+				m_dstAccesses,
+
+				context.getImageLayout(),
+				context.getImageLayout(),
+
+				vk::VK_QUEUE_FAMILY_IGNORED,
+				vk::VK_QUEUE_FAMILY_IGNORED,
+
+				context.getImage(),
+				{
+					vk::VK_IMAGE_ASPECT_COLOR_BIT,
+					0, 1,
+					0, 1
+				}
+			};
+
+			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
+			break;
+		}
+
+		default:
+			DE_FATAL("Unknown pipeline barrier type");
+	}
+}
+
+class ImageTransition : public CmdCommand
+{
+public:
+						ImageTransition		(void) {}
+						~ImageTransition	(void) {}
+	const char*			getName				(void) const { return "ImageTransition"; }
+
+	void				prepare				(PrepareContext& context);
+	void				logSubmit			(TestLog& log, size_t commandIndex) const;
+	void				submit				(SubmitContext& context);
+	void				verify				(VerifyContext& context, size_t);
+
+private:
+	vk::VkDeviceSize	m_imageMemorySize;
+};
+
+void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Use pipeline barrier to transition to VK_IMAGE_LAYOUT_GENERAL." << TestLog::EndMessage;
+}
+
+void ImageTransition::prepare (PrepareContext& context)
+{
+	context.setImageLayout(vk::VK_IMAGE_LAYOUT_GENERAL);
+	m_imageMemorySize = context.getImageMemorySize();
+}
+
+void ImageTransition::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&		vkd			= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer		cmd			= context.getCommandBuffer();
+	const vk::VkImageMemoryBarrier	barrier		=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+		DE_NULL,
+
+		ALL_ACCESSES,
+		ALL_ACCESSES,
+
+		context.getImageLayout(),
+		vk::VK_IMAGE_LAYOUT_GENERAL,
+
+		vk::VK_QUEUE_FAMILY_IGNORED,
+		vk::VK_QUEUE_FAMILY_IGNORED,
+
+		context.getImage(),
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0u, 1u,
+			0u, 1u
+		}
+	};
+
+	vkd.cmdPipelineBarrier(cmd, ALL_PIPELINE_STAGES, ALL_PIPELINE_STAGES, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
+}
+
+void ImageTransition::verify (VerifyContext& context, size_t)
+{
+	context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
+}
+
+class FillBuffer : public CmdCommand
+{
+public:
+						FillBuffer	(deUint32 value) : m_value(value) {}
+						~FillBuffer	(void) {}
+	const char*			getName		(void) const { return "FillBuffer"; }
+
+	void				logSubmit	(TestLog& log, size_t commandIndex) const;
+	void				submit		(SubmitContext& context);
+	void				verify		(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32		m_value;
+	vk::VkDeviceSize	m_bufferSize;
+};
+
+void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
+}
+
+void FillBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
+	const vk::VkBuffer			buffer		= context.getBuffer();
+	const vk::VkDeviceSize		sizeMask	= ~(0x3ull); // \note Round down to multiple of 4
+
+	m_bufferSize = sizeMask & context.getBufferSize();
+	vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
+}
+
+void FillBuffer::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	= context.getReference();
+
+	for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
+	{
+#if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
+		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
+#else
+		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
+#endif
+	}
+}
+
+class UpdateBuffer : public CmdCommand
+{
+public:
+						UpdateBuffer	(deUint32 seed) : m_seed(seed) {}
+						~UpdateBuffer	(void) {}
+	const char*			getName			(void) const { return "UpdateBuffer"; }
+
+	void				logSubmit		(TestLog& log, size_t commandIndex) const;
+	void				submit			(SubmitContext& context);
+	void				verify			(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32		m_seed;
+	vk::VkDeviceSize	m_bufferSize;
+};
+
+void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
+}
+
+void UpdateBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
+	const vk::VkBuffer			buffer		= context.getBuffer();
+	const size_t				blockSize	= 65536;
+	std::vector<deUint8>		data		(blockSize, 0);
+	de::Random					rng			(m_seed);
+
+	m_bufferSize = context.getBufferSize();
+
+	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
+	{
+		for (size_t ndx = 0; ndx < data.size(); ndx++)
+			data[ndx] = rng.getUint8();
+
+		if (m_bufferSize - updated > blockSize)
+			vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
+		else
+			vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
+	}
+}
+
+void UpdateBuffer::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	= context.getReference();
+	const size_t		blockSize	= 65536;
+	vector<deUint8>		data		(blockSize, 0);
+	de::Random			rng			(m_seed);
+
+	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
+	{
+		for (size_t ndx = 0; ndx < data.size(); ndx++)
+			data[ndx] = rng.getUint8();
+
+		if (m_bufferSize - updated > blockSize)
+			reference.setData(updated, blockSize, &data[0]);
+		else
+			reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
+	}
+}
+
+class BufferCopyToBuffer : public CmdCommand
+{
+public:
+									BufferCopyToBuffer	(void) {}
+									~BufferCopyToBuffer	(void) {}
+	const char*						getName				(void) const { return "BufferCopyToBuffer"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	vk::VkDeviceSize				m_bufferSize;
+	vk::Move<vk::VkBuffer>			m_dstBuffer;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
+}
+
+void BufferCopyToBuffer::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_bufferSize = context.getBufferSize();
+
+	m_dstBuffer	= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
+	m_memory	= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+}
+
+void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
+}
+
+void BufferCopyToBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferCopy		range			=
+	{
+		0, 0, // Offsets
+		m_bufferSize
+	};
+
+	vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
+}
+
+void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
+	ReferenceMemory&						reference		(context.getReference());
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vk::VkQueue						queue			= context.getContext().getQueue();
+	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+	const vk::VkBufferMemoryBarrier			barrier			=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+
+		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+		vk::VK_ACCESS_HOST_READ_BIT,
+
+		vk::VK_QUEUE_FAMILY_IGNORED,
+		vk::VK_QUEUE_FAMILY_IGNORED,
+		*m_dstBuffer,
+		0,
+		vk::VK_WHOLE_SIZE
+	};
+
+	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+
+	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+	queueRun(vkd, queue, *commandBuffer);
+
+	{
+		void* const	ptr		= mapMemory(vkd, device, *m_memory, m_bufferSize);
+		bool		isOk	= true;
+
+		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
+
+		{
+			const deUint8* const data = (const deUint8*)ptr;
+
+			for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
+			{
+				if (reference.isDefined(pos))
+				{
+					if (data[pos] != reference.get(pos))
+					{
+						resultCollector.fail(
+								de::toString(commandIndex) + ":" + getName()
+								+ " Result differs from reference, Expected: "
+								+ de::toString(tcu::toHex<8>(reference.get(pos)))
+								+ ", Got: "
+								+ de::toString(tcu::toHex<8>(data[pos]))
+								+ ", At offset: "
+								+ de::toString(pos));
+						break;
+					}
+				}
+			}
+		}
+
+		vkd.unmapMemory(device, *m_memory);
+
+		if (!isOk)
+			context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
+	}
+}
+
+class BufferCopyFromBuffer : public CmdCommand
+{
+public:
+									BufferCopyFromBuffer	(deUint32 seed) : m_seed(seed) {}
+									~BufferCopyFromBuffer	(void) {}
+	const char*						getName					(void) const { return "BufferCopyFromBuffer"; }
+
+	void							logPrepare				(TestLog& log, size_t commandIndex) const;
+	void							prepare					(PrepareContext& context);
+	void							logSubmit				(TestLog& log, size_t commandIndex) const;
+	void							submit					(SubmitContext& context);
+	void							verify					(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32					m_seed;
+	vk::VkDeviceSize				m_bufferSize;
+	vk::Move<vk::VkBuffer>			m_srcBuffer;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
+}
+
+void BufferCopyFromBuffer::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_bufferSize	= context.getBufferSize();
+	m_srcBuffer		= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
+	m_memory		= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+
+	{
+		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
+		de::Random	rng	(m_seed);
+
+		{
+			deUint8* const	data = (deUint8*)ptr;
+
+			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
+				data[ndx] = rng.getUint8();
+		}
+
+		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
+		vkd.unmapMemory(device, *m_memory);
+	}
+}
+
+void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
+}
+
+void BufferCopyFromBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferCopy		range			=
+	{
+		0, 0, // Offsets
+		m_bufferSize
+	};
+
+	vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
+}
+
+void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	(context.getReference());
+	de::Random			rng			(m_seed);
+
+	for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
+		reference.set(ndx, rng.getUint8());
+}
+
+class BufferCopyToImage : public CmdCommand
+{
+public:
+									BufferCopyToImage	(void) {}
+									~BufferCopyToImage	(void) {}
+	const char*						getName				(void) const { return "BufferCopyToImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::Move<vk::VkImage>			m_dstImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
+}
+
+void BufferCopyToImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
+
+	m_imageWidth	= imageSize[0];
+	m_imageHeight	= imageSize[1];
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_dstImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
+
+	{
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			barrier			=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
+}
+
+void BufferCopyToImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferImageCopy	region			=
+	{
+		0,
+		0, 0,
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+}
+
+void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
+	ReferenceMemory&						reference		(context.getReference());
+	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vk::VkQueue						queue			= context.getContext().getQueue();
+	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
+	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+	{
+		const vk::VkImageMemoryBarrier		imageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_TRANSFER_READ_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferMemoryBarrier bufferBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_HOST_READ_BIT,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			*dstBuffer,
+			0,
+			vk::VK_WHOLE_SIZE
+		};
+
+		const vk::VkBufferImageCopy	region =
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+	queueRun(vkd, queue, *commandBuffer);
+
+	{
+		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
+
+		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
+
+		{
+			const deUint8* const	data = (const deUint8*)ptr;
+
+			for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
+			{
+				if (reference.isDefined(pos))
+				{
+					if (data[pos] != reference.get(pos))
+					{
+						resultCollector.fail(
+								de::toString(commandIndex) + ":" + getName()
+								+ " Result differs from reference, Expected: "
+								+ de::toString(tcu::toHex<8>(reference.get(pos)))
+								+ ", Got: "
+								+ de::toString(tcu::toHex<8>(data[pos]))
+								+ ", At offset: "
+								+ de::toString(pos));
+						break;
+					}
+				}
+			}
+		}
+
+		vkd.unmapMemory(device, *memory);
+	}
+}
+
+class BufferCopyFromImage : public CmdCommand
+{
+public:
+									BufferCopyFromImage		(deUint32 seed) : m_seed(seed) {}
+									~BufferCopyFromImage	(void) {}
+	const char*						getName					(void) const { return "BufferCopyFromImage"; }
+
+	void							logPrepare				(TestLog& log, size_t commandIndex) const;
+	void							prepare					(PrepareContext& context);
+	void							logSubmit				(TestLog& log, size_t commandIndex) const;
+	void							submit					(SubmitContext& context);
+	void							verify					(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32					m_seed;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::Move<vk::VkImage>			m_srcImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
+}
+
+void BufferCopyFromImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
+
+	m_imageWidth	= imageSize[0];
+	m_imageHeight	= imageSize[1];
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_srcImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
+
+	{
+		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			preImageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkImageMemoryBarrier			postImageBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferImageCopy				region				=
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u
+			}
+		};
+
+		{
+			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
+			de::Random	rng	(m_seed);
+
+			{
+				deUint8* const	data = (deUint8*)ptr;
+
+				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
+					data[ndx] = rng.getUint8();
+			}
+
+			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
+			vkd.unmapMemory(device, *memory);
+		}
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
+		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
+}
+
+void BufferCopyFromImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferImageCopy	region			=
+	{
+		0,
+		0, 0,
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, &region);
+}
+
+void BufferCopyFromImage::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference		(context.getReference());
+	de::Random			rng	(m_seed);
+
+	for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
+		reference.set(ndx, rng.getUint8());
+}
+
+class ImageCopyToBuffer : public CmdCommand
+{
+public:
+									ImageCopyToBuffer	(void) {}
+									~ImageCopyToBuffer	(void) {}
+	const char*						getName				(void) const { return "BufferCopyToImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	vk::VkDeviceSize				m_bufferSize;
+	vk::Move<vk::VkBuffer>			m_dstBuffer;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+	vk::VkDeviceSize				m_imageMemorySize;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+};
+
+void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
+}
+
+void ImageCopyToBuffer::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageWidth();
+	m_imageHeight		= context.getImageHeight();
+	m_bufferSize		= 4 * m_imageWidth * m_imageHeight;
+	m_imageMemorySize	= context.getImageMemorySize();
+	m_dstBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
+	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+}
+
+void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
+}
+
+void ImageCopyToBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferImageCopy	region			=
+	{
+		0,
+		0, 0,
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstBuffer, 1, &region);
+}
+
+void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
+	ReferenceMemory&						reference		(context.getReference());
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vk::VkQueue						queue			= context.getContext().getQueue();
+	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+	const vk::VkBufferMemoryBarrier			barrier			=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+		DE_NULL,
+
+		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+		vk::VK_ACCESS_HOST_READ_BIT,
+
+		vk::VK_QUEUE_FAMILY_IGNORED,
+		vk::VK_QUEUE_FAMILY_IGNORED,
+		*m_dstBuffer,
+		0,
+		vk::VK_WHOLE_SIZE
+	};
+
+	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+
+	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+	queueRun(vkd, queue, *commandBuffer);
+
+	reference.setUndefined(0, (size_t)m_imageMemorySize);
+	{
+		void* const						ptr				= mapMemory(vkd, device, *m_memory, m_bufferSize);
+		const ConstPixelBufferAccess	referenceImage	(context.getReferenceImage().getAccess());
+		const ConstPixelBufferAccess	resultImage		(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
+
+		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
+
+		if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
+			resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
+
+		vkd.unmapMemory(device, *m_memory);
+	}
+}
+
+class ImageCopyFromBuffer : public CmdCommand
+{
+public:
+									ImageCopyFromBuffer		(deUint32 seed) : m_seed(seed) {}
+									~ImageCopyFromBuffer	(void) {}
+	const char*						getName					(void) const { return "ImageCopyFromBuffer"; }
+
+	void							logPrepare				(TestLog& log, size_t commandIndex) const;
+	void							prepare					(PrepareContext& context);
+	void							logSubmit				(TestLog& log, size_t commandIndex) const;
+	void							submit					(SubmitContext& context);
+	void							verify					(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32					m_seed;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::VkDeviceSize				m_imageMemorySize;
+	vk::VkDeviceSize				m_bufferSize;
+	vk::Move<vk::VkBuffer>			m_srcBuffer;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
+}
+
+void ImageCopyFromBuffer::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageHeight();
+	m_imageHeight		= context.getImageWidth();
+	m_imageMemorySize	= context.getImageMemorySize();
+	m_bufferSize		= m_imageWidth * m_imageHeight * 4;
+	m_srcBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
+	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
+
+	{
+		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
+		de::Random	rng	(m_seed);
+
+		{
+			deUint8* const	data = (deUint8*)ptr;
+
+			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
+				data[ndx] = rng.getUint8();
+		}
+
+		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
+		vkd.unmapMemory(device, *m_memory);
+	}
+}
+
+void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
+}
+
+void ImageCopyFromBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkBufferImageCopy	region			=
+	{
+		0,
+		0, 0,
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), context.getImageLayout(), 1, &region);
+}
+
+void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	(context.getReference());
+	de::Random			rng			(m_seed);
+
+	reference.setUndefined(0, (size_t)m_imageMemorySize);
+
+	{
+		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+		for (deInt32 y = 0; y < m_imageHeight; y++)
+		for (deInt32 x = 0; x < m_imageWidth; x++)
+		{
+			const deUint8 r8 = rng.getUint8();
+			const deUint8 g8 = rng.getUint8();
+			const deUint8 b8 = rng.getUint8();
+			const deUint8 a8 = rng.getUint8();
+
+			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
+		}
+	}
+}
+
+class ImageCopyFromImage : public CmdCommand
+{
+public:
+									ImageCopyFromImage	(deUint32 seed) : m_seed(seed) {}
+									~ImageCopyFromImage	(void) {}
+	const char*						getName				(void) const { return "ImageCopyFromImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32					m_seed;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::VkDeviceSize				m_imageMemorySize;
+	vk::Move<vk::VkImage>			m_srcImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
+}
+
+void ImageCopyFromImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageWidth();
+	m_imageHeight		= context.getImageHeight();
+	m_imageMemorySize	= context.getImageMemorySize();
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_srcImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
+
+	{
+		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			preImageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkImageMemoryBarrier			postImageBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferImageCopy				region				=
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u
+			}
+		};
+
+		{
+			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
+			de::Random	rng	(m_seed);
+
+			{
+				deUint8* const	data = (deUint8*)ptr;
+
+				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
+					data[ndx] = rng.getUint8();
+			}
+
+			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
+			vkd.unmapMemory(device, *memory);
+		}
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
+		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
+}
+
+void ImageCopyFromImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkImageCopy		region			=
+	{
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), context.getImageLayout(), 1, &region);
+}
+
+void ImageCopyFromImage::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	(context.getReference());
+	de::Random			rng			(m_seed);
+
+	reference.setUndefined(0, (size_t)m_imageMemorySize);
+
+	{
+		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+		for (deInt32 y = 0; y < m_imageHeight; y++)
+		for (deInt32 x = 0; x < m_imageWidth; x++)
+		{
+			const deUint8 r8 = rng.getUint8();
+			const deUint8 g8 = rng.getUint8();
+			const deUint8 b8 = rng.getUint8();
+			const deUint8 a8 = rng.getUint8();
+
+			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
+		}
+	}
+}
+
+class ImageCopyToImage : public CmdCommand
+{
+public:
+									ImageCopyToImage	(void) {}
+									~ImageCopyToImage	(void) {}
+	const char*						getName				(void) const { return "ImageCopyToImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::VkDeviceSize				m_imageMemorySize;
+	vk::Move<vk::VkImage>			m_dstImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
+}
+
+void ImageCopyToImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageWidth();
+	m_imageHeight		= context.getImageHeight();
+	m_imageMemorySize	= context.getImageMemorySize();
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_dstImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
+
+	{
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			barrier			=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
+}
+
+void ImageCopyToImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkImageCopy		region			=
+	{
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{ 0, 0, 0 },
+		{
+			(deUint32)m_imageWidth,
+			(deUint32)m_imageHeight,
+			1u
+		}
+	};
+
+	vkd.cmdCopyImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+}
+
+void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
+	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vk::VkQueue						queue			= context.getContext().getQueue();
+	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
+	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+	{
+		const vk::VkImageMemoryBarrier		imageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_TRANSFER_READ_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferMemoryBarrier bufferBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_HOST_READ_BIT,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			*dstBuffer,
+			0,
+			vk::VK_WHOLE_SIZE
+		};
+		const vk::VkBufferImageCopy	region =
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_imageWidth,
+				(deUint32)m_imageHeight,
+				1u
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+	queueRun(vkd, queue, *commandBuffer);
+
+	{
+		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
+
+		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
+
+		{
+			const deUint8* const			data		= (const deUint8*)ptr;
+			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
+			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
+				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
+		}
+
+		vkd.unmapMemory(device, *memory);
+	}
+}
+
+enum BlitScale
+{
+	BLIT_SCALE_20,
+	BLIT_SCALE_10,
+};
+
+class ImageBlitFromImage : public CmdCommand
+{
+public:
+									ImageBlitFromImage	(deUint32 seed, BlitScale scale) : m_seed(seed), m_scale(scale) {}
+									~ImageBlitFromImage	(void) {}
+	const char*						getName				(void) const { return "ImageBlitFromImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	const deUint32					m_seed;
+	const BlitScale					m_scale;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::VkDeviceSize				m_imageMemorySize;
+	deInt32							m_srcImageWidth;
+	deInt32							m_srcImageHeight;
+	vk::Move<vk::VkImage>			m_srcImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
+}
+
+void ImageBlitFromImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageWidth();
+	m_imageHeight		= context.getImageHeight();
+	m_imageMemorySize	= context.getImageMemorySize();
+
+	if (m_scale == BLIT_SCALE_10)
+	{
+		m_srcImageWidth			= m_imageWidth;
+		m_srcImageHeight		= m_imageHeight;
+	}
+	else if (m_scale == BLIT_SCALE_20)
+	{
+		m_srcImageWidth			= m_imageWidth / 2;
+		m_srcImageHeight		= m_imageHeight / 2;
+	}
+	else
+		DE_FATAL("Unsupported scale");
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_srcImageWidth,
+				(deUint32)m_srcImageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_srcImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
+
+	{
+		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			preImageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkImageMemoryBarrier			postImageBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_srcImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferImageCopy				region				=
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_srcImageWidth,
+				(deUint32)m_srcImageHeight,
+				1u
+			}
+		};
+
+		{
+			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
+			de::Random	rng	(m_seed);
+
+			{
+				deUint8* const	data = (deUint8*)ptr;
+
+				for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
+					data[ndx] = rng.getUint8();
+			}
+
+			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
+			vkd.unmapMemory(device, *memory);
+		}
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
+		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
+}
+
+void ImageBlitFromImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkImageBlit		region			=
+	{
+		// Src
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{
+			{ 0, 0, 0 },
+			{
+				m_srcImageWidth,
+				m_srcImageHeight,
+				1
+			},
+		},
+
+		// Dst
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{
+			{ 0, 0, 0 },
+			{
+				m_imageWidth,
+				m_imageHeight,
+				1u
+			}
+		}
+	};
+	vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), context.getImageLayout(), 1, &region, vk::VK_FILTER_NEAREST);
+}
+
+void ImageBlitFromImage::verify (VerifyContext& context, size_t)
+{
+	ReferenceMemory&	reference	(context.getReference());
+	de::Random			rng			(m_seed);
+
+	reference.setUndefined(0, (size_t)m_imageMemorySize);
+
+	{
+		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+		if (m_scale == BLIT_SCALE_10)
+		{
+			for (deInt32 y = 0; y < m_imageHeight; y++)
+			for (deInt32 x = 0; x < m_imageWidth; x++)
+			{
+				const deUint8 r8 = rng.getUint8();
+				const deUint8 g8 = rng.getUint8();
+				const deUint8 b8 = rng.getUint8();
+				const deUint8 a8 = rng.getUint8();
+
+				refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
+			}
+		}
+		else if (m_scale == BLIT_SCALE_20)
+		{
+			tcu::TextureLevel source (TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
+
+			for (deInt32 y = 0; y < m_srcImageHeight; y++)
+			for (deInt32 x = 0; x < m_srcImageWidth; x++)
+			{
+				const deUint8 r8 = rng.getUint8();
+				const deUint8 g8 = rng.getUint8();
+				const deUint8 b8 = rng.getUint8();
+				const deUint8 a8 = rng.getUint8();
+
+				source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
+			}
+
+			for (deInt32 y = 0; y < m_imageHeight; y++)
+			for (deInt32 x = 0; x < m_imageWidth; x++)
+				refAccess.setPixel(source.getAccess().getPixelUint(x / 2, y / 2), x, y);
+		}
+		else
+			DE_FATAL("Unsupported scale");
+	}
+}
+
+class ImageBlitToImage : public CmdCommand
+{
+public:
+									ImageBlitToImage	(BlitScale scale) : m_scale(scale) {}
+									~ImageBlitToImage	(void) {}
+	const char*						getName				(void) const { return "ImageBlitToImage"; }
+
+	void							logPrepare			(TestLog& log, size_t commandIndex) const;
+	void							prepare				(PrepareContext& context);
+	void							logSubmit			(TestLog& log, size_t commandIndex) const;
+	void							submit				(SubmitContext& context);
+	void							verify				(VerifyContext& context, size_t commandIndex);
+
+private:
+	const BlitScale					m_scale;
+	deInt32							m_imageWidth;
+	deInt32							m_imageHeight;
+	vk::VkDeviceSize				m_imageMemorySize;
+	deInt32							m_dstImageWidth;
+	deInt32							m_dstImageHeight;
+	vk::Move<vk::VkImage>			m_dstImage;
+	vk::Move<vk::VkDeviceMemory>	m_memory;
+};
+
+void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
+}
+
+void ImageBlitToImage::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice				device			= context.getContext().getDevice();
+	const vk::VkQueue				queue			= context.getContext().getQueue();
+	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
+	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
+
+	m_imageWidth		= context.getImageWidth();
+	m_imageHeight		= context.getImageHeight();
+	m_imageMemorySize	= context.getImageMemorySize();
+
+	if (m_scale == BLIT_SCALE_10)
+	{
+		m_dstImageWidth		= context.getImageWidth();
+		m_dstImageHeight	= context.getImageHeight();
+	}
+	else if (m_scale == BLIT_SCALE_20)
+	{
+		m_dstImageWidth		= context.getImageWidth() * 2;
+		m_dstImageHeight	= context.getImageHeight() * 2;
+	}
+	else
+		DE_FATAL("Unsupportd blit scale");
+
+	{
+		const vk::VkImageCreateInfo	createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+
+			0,
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				(deUint32)m_dstImageWidth,
+				(deUint32)m_dstImageHeight,
+				1u,
+			},
+			1, 1, // mipLevels, arrayLayers
+			vk::VK_SAMPLE_COUNT_1_BIT,
+
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_dstImage = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
+
+	{
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vk::VkImageMemoryBarrier			barrier			=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			0,
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+	}
+}
+
+void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
+}
+
+void ImageBlitToImage::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkImageBlit		region			=
+	{
+		// Src
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{
+			{ 0, 0, 0 },
+			{
+				m_imageWidth,
+				m_imageHeight,
+				1
+			},
+		},
+
+		// Dst
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,
+			0,	// mipLevel
+			0,	// arrayLayer
+			1	// layerCount
+		},
+		{
+			{ 0, 0, 0 },
+			{
+				m_dstImageWidth,
+				m_dstImageHeight,
+				1u
+			}
+		}
+	};
+	vkd.cmdBlitImage(commandBuffer, context.getImage(), context.getImageLayout(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region, vk::VK_FILTER_NEAREST);
+}
+
+void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
+{
+	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
+	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vk::VkQueue						queue			= context.getContext().getQueue();
+	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
+	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+	{
+		const vk::VkImageMemoryBarrier		imageBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_TRANSFER_READ_BIT,
+
+			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+
+			*m_dstImage,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// Mip level
+				1,	// Mip level count
+				0,	// Layer
+				1	// Layer count
+			}
+		};
+		const vk::VkBufferMemoryBarrier bufferBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+			DE_NULL,
+
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+			vk::VK_ACCESS_HOST_READ_BIT,
+
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			vk::VK_QUEUE_FAMILY_IGNORED,
+			*dstBuffer,
+			0,
+			vk::VK_WHOLE_SIZE
+		};
+		const vk::VkBufferImageCopy	region =
+		{
+			0,
+			0, 0,
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0,	// mipLevel
+				0,	// arrayLayer
+				1	// layerCount
+			},
+			{ 0, 0, 0 },
+			{
+				(deUint32)m_dstImageWidth,
+				(deUint32)m_dstImageHeight,
+				1
+			}
+		};
+
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
+		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+	queueRun(vkd, queue, *commandBuffer);
+
+	{
+		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
+
+		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_dstImageWidth * m_dstImageHeight);
+
+		if (m_scale == BLIT_SCALE_10)
+		{
+			const deUint8* const			data		= (const deUint8*)ptr;
+			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
+			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
+				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
+		}
+		else if (m_scale == BLIT_SCALE_20)
+		{
+			const deUint8* const			data		= (const deUint8*)ptr;
+			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
+			tcu::TextureLevel				reference	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
+
+			{
+				const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
+
+				for (deInt32 y = 0; y < m_dstImageHeight; y++)
+				for (deInt32 x = 0; x < m_dstImageWidth; x++)
+				{
+					reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
+				}
+			}
+
+			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
+				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
+		}
+		else
+			DE_FATAL("Unknown scale");
+
+		vkd.unmapMemory(device, *memory);
+	}
+}
+
+class PrepareRenderPassContext
+{
+public:
+								PrepareRenderPassContext	(PrepareContext&	context,
+															 vk::VkRenderPass	renderPass,
+															 vk::VkFramebuffer	framebuffer,
+															 deInt32			targetWidth,
+															 deInt32			targetHeight)
+		: m_context			(context)
+		, m_renderPass		(renderPass)
+		, m_framebuffer		(framebuffer)
+		, m_targetWidth		(targetWidth)
+		, m_targetHeight	(targetHeight)
+	{
+	}
+
+	const Memory&									getMemory					(void) const { return m_context.getMemory(); }
+	const Context&									getContext					(void) const { return m_context.getContext(); }
+	const vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection			(void) const { return m_context.getBinaryCollection(); }
+
+	vk::VkBuffer				getBuffer					(void) const { return m_context.getBuffer(); }
+	vk::VkDeviceSize			getBufferSize				(void) const { return m_context.getBufferSize(); }
+
+	vk::VkImage					getImage					(void) const { return m_context.getImage(); }
+	deInt32						getImageWidth				(void) const { return m_context.getImageWidth(); }
+	deInt32						getImageHeight				(void) const { return m_context.getImageHeight(); }
+	vk::VkImageLayout			getImageLayout				(void) const { return m_context.getImageLayout(); }
+
+	deInt32						getTargetWidth				(void) const { return m_targetWidth; }
+	deInt32						getTargetHeight				(void) const { return m_targetHeight; }
+
+	vk::VkRenderPass			getRenderPass				(void) const { return m_renderPass; }
+
+private:
+	PrepareContext&				m_context;
+	const vk::VkRenderPass		m_renderPass;
+	const vk::VkFramebuffer		m_framebuffer;
+	const deInt32				m_targetWidth;
+	const deInt32				m_targetHeight;
+};
+
+class VerifyRenderPassContext
+{
+public:
+							VerifyRenderPassContext		(VerifyContext&			context,
+														 deInt32				targetWidth,
+														 deInt32				targetHeight)
+		: m_context			(context)
+		, m_referenceTarget	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
+	{
+	}
+
+	const Context&			getContext			(void) const { return m_context.getContext(); }
+	TestLog&				getLog				(void) const { return m_context.getLog(); }
+	tcu::ResultCollector&	getResultCollector	(void) const { return m_context.getResultCollector(); }
+
+	TextureLevel&			getReferenceTarget	(void) { return m_referenceTarget; }
+
+	ReferenceMemory&		getReference		(void) { return m_context.getReference(); }
+	TextureLevel&			getReferenceImage	(void) { return m_context.getReferenceImage();}
+
+private:
+	VerifyContext&	m_context;
+	TextureLevel	m_referenceTarget;
+};
+
+
+class RenderPassCommand
+{
+public:
+	virtual				~RenderPassCommand	(void) {}
+	virtual const char*	getName				(void) const = 0;
+
+	// Log things that are done during prepare
+	virtual void		logPrepare			(TestLog&, size_t) const {}
+	// Log submitted calls etc.
+	virtual void		logSubmit			(TestLog&, size_t) const {}
+
+	// Allocate vulkan resources and prepare for submit.
+	virtual void		prepare				(PrepareRenderPassContext&) {}
+
+	// Submit commands to command buffer.
+	virtual void		submit				(SubmitContext&) {}
+
+	// Verify results
+	virtual void		verify				(VerifyRenderPassContext&, size_t) {}
+};
+
+class SubmitRenderPass : public CmdCommand
+{
+public:
+				SubmitRenderPass	(const vector<RenderPassCommand*>& commands);
+				~SubmitRenderPass	(void) {}
+	const char*	getName				(void) const { return "SubmitRenderPass"; }
+
+	void		logPrepare			(TestLog&, size_t) const;
+	void		logSubmit			(TestLog&, size_t) const;
+
+	void		prepare				(PrepareContext&);
+	void		submit				(SubmitContext&);
+
+	void		verify				(VerifyContext&, size_t);
+
+private:
+	const deInt32					m_targetWidth;
+	const deInt32					m_targetHeight;
+	vk::Move<vk::VkRenderPass>		m_renderPass;
+	vk::Move<vk::VkDeviceMemory>	m_colorTargetMemory;
+	de::MovePtr<vk::Allocation>		m_colorTargetMemory2;
+	vk::Move<vk::VkImage>			m_colorTarget;
+	vk::Move<vk::VkImageView>		m_colorTargetView;
+	vk::Move<vk::VkFramebuffer>		m_framebuffer;
+	vector<RenderPassCommand*>		m_commands;
+};
+
+SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
+	: m_targetWidth		(256)
+	, m_targetHeight	(256)
+	, m_commands		(commands)
+{
+}
+
+void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+	{
+		RenderPassCommand& command = *m_commands[cmdNdx];
+		command.logPrepare(log, cmdNdx);
+	}
+}
+
+void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+	{
+		RenderPassCommand& command = *m_commands[cmdNdx];
+		command.logSubmit(log, cmdNdx);
+	}
+}
+
+void SubmitRenderPass::prepare (PrepareContext& context)
+{
+	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
+	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
+	const vk::VkDevice						device			= context.getContext().getDevice();
+	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
+
+	const vk::VkAttachmentReference	colorAttachments[]	=
+	{
+		{ 0, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL }
+	};
+	const vk::VkSubpassDescription	subpass				=
+	{
+		0u,
+		vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
+
+		0u,
+		DE_NULL,
+
+		DE_LENGTH_OF_ARRAY(colorAttachments),
+		colorAttachments,
+		DE_NULL,
+		DE_NULL,
+		0u,
+		DE_NULL
+	};
+	const vk::VkAttachmentDescription attachment =
+	{
+		0u,
+		vk::VK_FORMAT_R8G8B8A8_UNORM,
+		vk::VK_SAMPLE_COUNT_1_BIT,
+
+		vk::VK_ATTACHMENT_LOAD_OP_CLEAR,
+		vk::VK_ATTACHMENT_STORE_OP_STORE,
+
+		vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+		vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,
+
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,
+		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
+	};
+	{
+		const vk::VkImageCreateInfo createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+			DE_NULL,
+			0u,
+
+			vk::VK_IMAGE_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{ (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
+			1u,
+			1u,
+			vk::VK_SAMPLE_COUNT_1_BIT,
+			vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+			vk::VK_SHARING_MODE_EXCLUSIVE,
+			(deUint32)queueFamilies.size(),
+			&queueFamilies[0],
+			vk::VK_IMAGE_LAYOUT_UNDEFINED
+		};
+
+		m_colorTarget = vk::createImage(vkd, device, &createInfo);
+	}
+
+	m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
+
+	{
+		const vk::VkImageViewCreateInfo createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+			DE_NULL,
+
+			0u,
+			*m_colorTarget,
+			vk::VK_IMAGE_VIEW_TYPE_2D,
+			vk::VK_FORMAT_R8G8B8A8_UNORM,
+			{
+				vk::VK_COMPONENT_SWIZZLE_R,
+				vk::VK_COMPONENT_SWIZZLE_G,
+				vk::VK_COMPONENT_SWIZZLE_B,
+				vk::VK_COMPONENT_SWIZZLE_A
+			},
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,
+				0u,
+				1u,
+				0u,
+				1u
+			}
+		};
+
+		m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
+	}
+	{
+		const vk::VkRenderPassCreateInfo createInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+			DE_NULL,
+			0u,
+
+			1u,
+			&attachment,
+
+			1u,
+			&subpass,
+
+			0,
+			DE_NULL
+		};
+
+		m_renderPass = vk::createRenderPass(vkd, device, &createInfo);
+	}
+
+	{
+		const vk::VkImageView				imageViews[]	=
+		{
+			*m_colorTargetView
+		};
+		const vk::VkFramebufferCreateInfo	createInfo		=
+		{
+			vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+			DE_NULL,
+			0u,
+
+			*m_renderPass,
+			DE_LENGTH_OF_ARRAY(imageViews),
+			imageViews,
+			(deUint32)m_targetWidth,
+			(deUint32)m_targetHeight,
+			1u
+		};
+
+		m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
+	}
+
+	{
+		PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
+
+		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+		{
+			RenderPassCommand& command = *m_commands[cmdNdx];
+			command.prepare(renderpassContext);
+		}
+	}
+}
+
+void SubmitRenderPass::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer		commandBuffer	= context.getCommandBuffer();
+	const vk::VkClearValue			clearValue		= vk::makeClearValueColorF32(0.0f, 0.0f, 0.0f, 1.0f);
+
+	const vk::VkRenderPassBeginInfo	beginInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+		DE_NULL,
+
+		*m_renderPass,
+		*m_framebuffer,
+
+		{ { 0, 0 },  { (deUint32)m_targetWidth, (deUint32)m_targetHeight } },
+		1u,
+		&clearValue
+	};
+
+	vkd.cmdBeginRenderPass(commandBuffer, &beginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+	{
+		RenderPassCommand& command = *m_commands[cmdNdx];
+
+		command.submit(context);
+	}
+
+	vkd.cmdEndRenderPass(commandBuffer);
+}
+
+void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
+{
+	TestLog&					log				(context.getLog());
+	tcu::ResultCollector&		resultCollector	(context.getResultCollector());
+	const string				sectionName		(de::toString(commandIndex) + ":" + getName());
+	const tcu::ScopedLogSection	section			(log, sectionName, sectionName);
+	VerifyRenderPassContext		verifyContext	(context, m_targetWidth, m_targetHeight);
+
+	tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+
+	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
+	{
+		RenderPassCommand& command = *m_commands[cmdNdx];
+		command.verify(verifyContext, cmdNdx);
+	}
+
+	{
+		const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
+		const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
+		const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
+		const vk::VkDevice						device			= context.getContext().getDevice();
+		const vk::VkQueue						queue			= context.getContext().getQueue();
+		const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
+		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
+		const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
+		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
+		{
+			const vk::VkImageMemoryBarrier		imageBarrier	=
+			{
+				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+				DE_NULL,
+
+				vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+				vk::VK_ACCESS_TRANSFER_READ_BIT,
+
+				vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+				vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+
+				vk::VK_QUEUE_FAMILY_IGNORED,
+				vk::VK_QUEUE_FAMILY_IGNORED,
+
+				*m_colorTarget,
+				{
+					vk::VK_IMAGE_ASPECT_COLOR_BIT,
+					0,	// Mip level
+					1,	// Mip level count
+					0,	// Layer
+					1	// Layer count
+				}
+			};
+			const vk::VkBufferMemoryBarrier bufferBarrier =
+			{
+				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+				DE_NULL,
+
+				vk::VK_ACCESS_TRANSFER_WRITE_BIT,
+				vk::VK_ACCESS_HOST_READ_BIT,
+
+				vk::VK_QUEUE_FAMILY_IGNORED,
+				vk::VK_QUEUE_FAMILY_IGNORED,
+				*dstBuffer,
+				0,
+				vk::VK_WHOLE_SIZE
+			};
+			const vk::VkBufferImageCopy	region =
+			{
+				0,
+				0, 0,
+				{
+					vk::VK_IMAGE_ASPECT_COLOR_BIT,
+					0,	// mipLevel
+					0,	// arrayLayer
+					1	// layerCount
+				},
+				{ 0, 0, 0 },
+				{
+					(deUint32)m_targetWidth,
+					(deUint32)m_targetHeight,
+					1u
+				}
+			};
+
+			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+			vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
+			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+		}
+
+		VK_CHECK(vkd.endCommandBuffer(*commandBuffer));
+		queueRun(vkd, queue, *commandBuffer);
+
+		{
+			void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
+
+			vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_targetWidth * m_targetHeight);
+
+			{
+				const deUint8* const			data		= (const deUint8*)ptr;
+				const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
+				const ConstPixelBufferAccess&	refAccess	(verifyContext.getReferenceTarget().getAccess());
+
+				if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
+					resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
+			}
+
+			vkd.unmapMemory(device, *memory);
+		}
+	}
+}
+
+class RenderBuffer : public RenderPassCommand
+{
+public:
+	enum RenderAs
+	{
+		RENDERAS_VERTEX_BUFFER,
+		RENDERAS_INDEX_BUFFER,
+	};
+				RenderBuffer		(RenderAs renderAs) : m_renderAs(renderAs) {}
+				~RenderBuffer		(void) {}
+
+	const char*	getName				(void) const { return "RenderBuffer"; }
+	void		logPrepare			(TestLog&, size_t) const;
+	void		logSubmit			(TestLog&, size_t) const;
+	void		prepare				(PrepareRenderPassContext&);
+	void		submit				(SubmitContext& context);
+	void		verify				(VerifyRenderPassContext&, size_t);
+
+private:
+	const RenderAs						m_renderAs;
+	vk::Move<vk::VkPipeline>			m_pipeline;
+	vk::Move<vk::VkPipelineLayout>		m_pipelineLayout;
+	vk::VkDeviceSize					m_bufferSize;
+
+	static const vk::ProgramBinary&		getVertexShader		(const vk::ProgramCollection<vk::ProgramBinary>& collections, RenderAs renderAs)
+	{
+		switch (renderAs)
+		{
+			case RENDERAS_VERTEX_BUFFER:
+				return collections.get("vertex-buffer.vert");
+
+			case RENDERAS_INDEX_BUFFER:
+				return collections.get("index-buffer.vert");
+
+			default:
+				DE_FATAL("Unknown renderAs");
+				return collections.get("");
+		}
+	}
+};
+
+void RenderBuffer::logPrepare (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
+}
+
+void RenderBuffer::logSubmit (TestLog& log, size_t commandIndex) const
+{
+	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as " << (m_renderAs == RENDERAS_VERTEX_BUFFER ? "vertex" : "index") << " buffer." << TestLog::EndMessage;
+}
+
+void RenderBuffer::prepare (PrepareRenderPassContext& context)
+{
+	const vk::DeviceInterface&				vkd						= context.getContext().getDeviceInterface();
+	const vk::VkDevice						device					= context.getContext().getDevice();
+	const vk::VkRenderPass					renderPass				= context.getRenderPass();
+	const deUint32							subpass					= 0;
+	const vk::Unique<vk::VkShaderModule>	vertexShaderModule		(vk::createShaderModule(vkd, device, getVertexShader(context.getBinaryCollection(), m_renderAs), 0));
+	const vk::Unique<vk::VkShaderModule>	fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
+
+	m_bufferSize = context.getBufferSize();
+
+	{
+		const vk::VkPipelineLayoutCreateInfo	createInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+			DE_NULL,
+			0,
+			0,
+			DE_NULL,
+			0,
+			DE_NULL
+		};
+
+		m_pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
+	}
+
+	{
+		const vk::VkPipelineShaderStageCreateInfo			shaderStages[]					=
+		{
+			{
+				vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+				DE_NULL,
+				0,
+				vk::VK_SHADER_STAGE_VERTEX_BIT,
+				*vertexShaderModule,
+				"main",
+				DE_NULL
+			},
+			{
+				vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+				DE_NULL,
+				0,
+				vk::VK_SHADER_STAGE_FRAGMENT_BIT,
+				*fragmentShaderModule,
+				"main",
+				DE_NULL
+			}
+		};
+		const vk::VkPipelineDepthStencilStateCreateInfo		depthStencilState				=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+			DE_NULL,
+			0u,
+			DE_FALSE,
+			DE_FALSE,
+			vk::VK_COMPARE_OP_ALWAYS,
+			DE_FALSE,
+			DE_FALSE,
+			{
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_COMPARE_OP_ALWAYS,
+				0u,
+				0u,
+				0u,
+			},
+			{
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_STENCIL_OP_KEEP,
+				vk::VK_COMPARE_OP_ALWAYS,
+				0u,
+				0u,
+				0u,
+			},
+			-1.0f,
+			+1.0f
+		};
+		const vk::VkVertexInputBindingDescription			vertexBindingDescriptions[]		=
+		{
+			{
+				0,
+				2,
+				vk::VK_VERTEX_INPUT_RATE_VERTEX
+			}
+		};
+		const vk::VkVertexInputAttributeDescription			vertexAttributeDescriptions[]	=
+		{
+			{
+				0,
+				0,
+				vk::VK_FORMAT_R8G8_UNORM,
+				0
+			}
+		};
+		const vk::VkPipelineVertexInputStateCreateInfo		vertexInputState				=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+			DE_NULL,
+			0u,
+
+			m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexBindingDescriptions) : 0u,
+			m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexBindingDescriptions : DE_NULL,
+
+			m_renderAs == RENDERAS_VERTEX_BUFFER ? DE_LENGTH_OF_ARRAY(vertexAttributeDescriptions) : 0u,
+			m_renderAs == RENDERAS_VERTEX_BUFFER ? vertexAttributeDescriptions : DE_NULL,
+		};
+		const vk::VkPipelineInputAssemblyStateCreateInfo	inputAssemblyState				=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+			DE_NULL,
+			0,
+			vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+			vk::VK_FALSE
+		};
+		const vk::VkViewport								viewports[]						=
+		{
+			{ 0.0f, 0.0f, (float)context.getTargetWidth(), (float)context.getTargetHeight(), 0.0f, 1.0f }
+		};
+		const vk::VkRect2D									scissors[]						=
+		{
+			{ { 0, 0 }, { (deUint32)context.getTargetWidth(), (deUint32)context.getTargetHeight() } }
+		};
+		const vk::VkPipelineViewportStateCreateInfo			viewportState					=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+			DE_NULL,
+			0,
+			DE_LENGTH_OF_ARRAY(viewports),
+			viewports,
+			DE_LENGTH_OF_ARRAY(scissors),
+			scissors
+		};
+		const vk::VkPipelineRasterizationStateCreateInfo	rasterState						=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+			DE_NULL,
+			0,
+
+			vk::VK_TRUE,
+			vk::VK_FALSE,
+			vk::VK_POLYGON_MODE_FILL,
+			vk::VK_CULL_MODE_NONE,
+			vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,
+			vk::VK_FALSE,
+			0.0f,
+			0.0f,
+			0.0f,
+			1.0f
+		};
+		const vk::VkSampleMask								sampleMask						= ~0u;
+		const vk::VkPipelineMultisampleStateCreateInfo		multisampleState				=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+			DE_NULL,
+			0,
+
+			vk::VK_SAMPLE_COUNT_1_BIT,
+			vk::VK_FALSE,
+			0.0f,
+			&sampleMask,
+			vk::VK_FALSE,
+			vk::VK_FALSE
+		};
+		const vk::VkPipelineColorBlendAttachmentState		attachments[]					=
+		{
+			{
+				vk::VK_FALSE,
+				vk::VK_BLEND_FACTOR_ONE,
+				vk::VK_BLEND_FACTOR_ZERO,
+				vk::VK_BLEND_OP_ADD,
+				vk::VK_BLEND_FACTOR_ONE,
+				vk::VK_BLEND_FACTOR_ZERO,
+				vk::VK_BLEND_OP_ADD,
+				(vk::VK_COLOR_COMPONENT_R_BIT|
+				 vk::VK_COLOR_COMPONENT_G_BIT|
+				 vk::VK_COLOR_COMPONENT_B_BIT|
+				 vk::VK_COLOR_COMPONENT_A_BIT)
+			}
+		};
+		const vk::VkPipelineColorBlendStateCreateInfo		colorBlendState					=
+		{
+			vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+			DE_NULL,
+			0,
+
+			vk::VK_FALSE,
+			vk::VK_LOGIC_OP_COPY,
+			DE_LENGTH_OF_ARRAY(attachments),
+			attachments,
+			{ 0.0f, 0.0f, 0.0f, 0.0f }
+		};
+		const vk::VkGraphicsPipelineCreateInfo				createInfo						=
+		{
+			vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+			DE_NULL,
+			0u,
+
+			DE_LENGTH_OF_ARRAY(shaderStages),
+			shaderStages,
+
+			&vertexInputState,
+			&inputAssemblyState,
+			DE_NULL,
+			&viewportState,
+			&rasterState,
+			&multisampleState,
+			&depthStencilState,
+			&colorBlendState,
+			DE_NULL,
+			*m_pipelineLayout,
+			renderPass,
+			subpass,
+			0,
+			0
+		};
+
+		m_pipeline = vk::createGraphicsPipeline(vkd, device, 0, &createInfo);
+	}
+}
+
+void RenderBuffer::submit (SubmitContext& context)
+{
+	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
+	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
+	const vk::VkDeviceSize		offset			= 0;
+	const vk::VkBuffer			buffer			= context.getBuffer();
+
+	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+
+	if (m_renderAs == RENDERAS_VERTEX_BUFFER)
+	{
+		vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
+		vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
+	}
+	else if (m_renderAs == RENDERAS_INDEX_BUFFER)
+	{
+		vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
+		vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
+	}
+	else
+		DE_FATAL("Unknown renderAs");
+}
+
+void RenderBuffer::verify (VerifyRenderPassContext& context, size_t)
+{
+	for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
+	{
+		const deUint8 x  = context.getReference().get(pos * 2);
+		const deUint8 y  = context.getReference().get((pos * 2) + 1);
+
+		context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
+	}
+}
+
+enum Op
+{
+	OP_MAP,
+	OP_UNMAP,
+
+	OP_MAP_FLUSH,
+	OP_MAP_INVALIDATE,
+
+	OP_MAP_READ,
+	OP_MAP_WRITE,
+	OP_MAP_MODIFY,
+
+	OP_BUFFER_CREATE,
+	OP_BUFFER_DESTROY,
+	OP_BUFFER_BINDMEMORY,
+
+	OP_QUEUE_WAIT_FOR_IDLE,
+	OP_DEVICE_WAIT_FOR_IDLE,
+
+	OP_COMMAND_BUFFER_BEGIN,
+	OP_COMMAND_BUFFER_END,
+
+	// Buffer transfer operations
+	OP_BUFFER_FILL,
+	OP_BUFFER_UPDATE,
+
+	OP_BUFFER_COPY_TO_BUFFER,
+	OP_BUFFER_COPY_FROM_BUFFER,
+
+	OP_BUFFER_COPY_TO_IMAGE,
+	OP_BUFFER_COPY_FROM_IMAGE,
+
+	OP_IMAGE_CREATE,
+	OP_IMAGE_DESTROY,
+	OP_IMAGE_BINDMEMORY,
+
+	OP_IMAGE_TRANSITION_TO_GENERAL,
+
+	OP_IMAGE_COPY_TO_BUFFER,
+	OP_IMAGE_COPY_FROM_BUFFER,
+
+	OP_IMAGE_COPY_TO_IMAGE,
+	OP_IMAGE_COPY_FROM_IMAGE,
+
+	OP_IMAGE_BLIT_TO_IMAGE,
+	OP_IMAGE_BLIT_FROM_IMAGE,
+
+	OP_IMAGE_RESOLVE,
+
+	OP_PIPELINE_BARRIER_GLOBAL,
+	OP_PIPELINE_BARRIER_BUFFER,
+	OP_PIPELINE_BARRIER_IMAGE,
+
+	// Renderpass operations
+	OP_RENDERPASS_BEGIN,
+	OP_RENDERPASS_END,
+
+	// Commands inside render pass
+	OP_RENDER_VERTEX_BUFFER,
+	OP_RENDER_INDEX_BUFFER
+};
+
+enum Stage
+{
+	STAGE_HOST,
+	STAGE_COMMAND_BUFFER,
+
+	STAGE_RENDER_PASS
+};
+
+bool isWriteAccess (vk::VkAccessFlagBits access)
+{
+	switch (access)
+	{
+
+		case vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT:			return false;
+		case vk::VK_ACCESS_INDEX_READ_BIT:						return false;
+		case vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:			return false;
+		case vk::VK_ACCESS_UNIFORM_READ_BIT:					return false;
+		case vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:			return false;
+		case vk::VK_ACCESS_SHADER_READ_BIT:						return false;
+		case vk::VK_ACCESS_SHADER_WRITE_BIT:					return true;
+		case vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:			return false;
+		case vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:			return true;
+		case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:	return false;
+		case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:	return true;
+		case vk::VK_ACCESS_TRANSFER_READ_BIT:					return false;
+		case vk::VK_ACCESS_TRANSFER_WRITE_BIT:					return true;
+		case vk::VK_ACCESS_HOST_READ_BIT:						return false;
+		case vk::VK_ACCESS_HOST_WRITE_BIT:						return true;
+		case vk::VK_ACCESS_MEMORY_READ_BIT:						return false;
+		case vk::VK_ACCESS_MEMORY_WRITE_BIT:					return true;
+
+		default:
+			DE_FATAL("Unknown access");
+			return true;
+	}
+}
+
+class CacheState
+{
+public:
+			CacheState			(vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
+
+	bool	isValid				(vk::VkPipelineStageFlagBits	stage,
+								 vk::VkAccessFlagBits			access) const;
+
+	void	perform				(vk::VkPipelineStageFlagBits	stage,
+								 vk::VkAccessFlagBits			access);
+
+	void	submitCommandBuffer	(void);
+
+	void	getFullBarrier		(vk::VkPipelineStageFlags&	srcStages,
+								 vk::VkAccessFlags&			srcAccesses,
+								 vk::VkPipelineStageFlags&	dstStages,
+								 vk::VkAccessFlags&			dstAccesses) const;
+
+	void	barrier				(vk::VkPipelineStageFlags	srcStages,
+								 vk::VkAccessFlags			srcAccesses,
+								 vk::VkPipelineStageFlags	dstStages,
+								 vk::VkAccessFlags			dstAccesses);
+
+	void	fullBarrier			(void);
+
+	// Everything is clean and there is no need for barriers
+	bool	isClean				(void) const;
+
+private:
+	// Limit which stages and accesses are used by the CacheState tracker
+	const vk::VkPipelineStageFlags	m_allowedStages;
+	const vk::VkAccessFlags			m_allowedAccesses;
+
+	// [dstStage][srcStage] = srcAccesses
+	// In stage dstStage write srcAccesses from srcStage are not yet available
+	vk::VkAccessFlags				m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
+	// [dstStage] = dstAccesses
+	// In stage dstStage ops with dstAccesses are not yet visible
+	vk::VkAccessFlags				m_invisibleOperations[PIPELINESTAGE_LAST];
+
+	// [dstStage] = srcStage
+	// Memory operation in srcStage have not completed before dstStage
+	vk::VkPipelineStageFlags		m_incompleteOperations[PIPELINESTAGE_LAST];
+};
+
+CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
+	: m_allowedStages	(allowedStages)
+	, m_allowedAccesses	(allowedAccesses)
+{
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
+	{
+		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// All operations are initially visible
+		m_invisibleOperations[dstStage] = 0;
+
+		// There are no incomplete read operations initially
+		m_incompleteOperations[dstStage] = 0;
+
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			// There are no write operations that are not yet available
+			// initially.
+			m_unavailableWriteOperations[dstStage][srcStage] = 0;
+		}
+	}
+}
+
+bool CacheState::isValid (vk::VkPipelineStageFlagBits	stage,
+						  vk::VkAccessFlagBits			access) const
+{
+	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
+	DE_ASSERT((stage & (~m_allowedStages)) == 0);
+
+	const PipelineStage	dstStage	= pipelineStageFlagToPipelineStage(stage);
+
+	// Previous operations are not visible to access on stage
+	if ((m_invisibleOperations[dstStage] & access) != 0)
+		return false;
+
+	if (isWriteAccess(access))
+	{
+		// Memory operations from other stages have not completed before
+		// dstStage
+		if (m_incompleteOperations[dstStage] != 0)
+			return false;
+	}
+
+	return true;
+}
+
+void CacheState::perform (vk::VkPipelineStageFlagBits	stage,
+						  vk::VkAccessFlagBits			access)
+{
+	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
+	DE_ASSERT((stage & (~m_allowedStages)) == 0);
+
+	const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
+
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
+	{
+		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// Mark stage as incomplete for all stages
+		m_incompleteOperations[dstStage] |= stage;
+
+		if (isWriteAccess(access))
+		{
+			// Mark all accesses from all stages invisible
+			m_invisibleOperations[dstStage] |= m_allowedAccesses;
+
+			// Mark write access from srcStage unavailable to all stages
+			m_unavailableWriteOperations[dstStage][srcStage] |= access;
+		}
+	}
+}
+
+void CacheState::submitCommandBuffer (void)
+{
+	// Flush all host writes and reads
+	barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
+			m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
+			m_allowedStages,
+			m_allowedAccesses);
+}
+
+void CacheState::getFullBarrier (vk::VkPipelineStageFlags&	srcStages,
+								 vk::VkAccessFlags&			srcAccesses,
+								 vk::VkPipelineStageFlags&	dstStages,
+								 vk::VkAccessFlags&			dstAccesses) const
+{
+	srcStages	= 0;
+	srcAccesses	= 0;
+	dstStages	= 0;
+	dstAccesses	= 0;
+
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
+	{
+		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// Make sure all previous operation are complete in all stages
+		if (m_incompleteOperations[dstStage])
+		{
+			dstStages |= dstStage_;
+			srcStages |= m_incompleteOperations[dstStage];
+		}
+
+		// Make sure all read operations are visible in dstStage
+		if (m_invisibleOperations[dstStage])
+		{
+			dstStages |= dstStage_;
+			dstAccesses |= m_invisibleOperations[dstStage];
+		}
+
+		// Make sure all write operations fro mall stages are available
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			if (m_unavailableWriteOperations[dstStage][srcStage])
+			{
+				dstStages |= dstStage_;
+				srcStages |= dstStage_;
+				srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage];
+			}
+		}
+	}
+
+	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
+	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
+	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
+	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
+}
+
+void CacheState::barrier (vk::VkPipelineStageFlags	srcStages,
+						  vk::VkAccessFlags			srcAccesses,
+						  vk::VkPipelineStageFlags	dstStages,
+						  vk::VkAccessFlags			dstAccesses)
+{
+	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
+	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
+	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
+	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
+
+	// Transitivity
+	{
+		vk::VkPipelineStageFlags		oldIncompleteOperations[PIPELINESTAGE_LAST];
+		vk::VkAccessFlags				oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST];
+
+		deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
+		deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
+
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
+			{
+				const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+				if ((dstStage_ & m_allowedStages) == 0)
+					continue;
+
+				// Stages that have completed before srcStage have also completed before dstStage
+				m_incompleteOperations[dstStage] &= ~oldIncompleteOperations[srcStage];
+
+				for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
+				{
+					const PipelineStage	sharedStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
+
+					if ((sharedStage_ & m_allowedStages) == 0)
+						continue;
+
+					// Writes that are available in srcStage are also available in dstStage
+					m_unavailableWriteOperations[dstStage][sharedStage] &= ~oldUnavailableWriteOperations[srcStage][sharedStage];
+				}
+			}
+		}
+	}
+
+	// Barrier
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
+	{
+		const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+		bool				allWritesAvailable	= true;
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// Operations in srcStages have completed before any stage in dstStages
+		m_incompleteOperations[dstStage] &= ~srcStages;
+
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			// Make srcAccesses from srcStagees available in dstStage
+			m_unavailableWriteOperations[dstStage][srcStage] &= ~srcAccesses;
+
+			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
+				allWritesAvailable = false;
+		}
+
+		// If all writes are available in dstStage make dstAccesses also visible
+		if (allWritesAvailable)
+			m_invisibleOperations[dstStage] &= ~dstAccesses;
+	}
+}
+
+bool CacheState::isClean (void) const
+{
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
+	{
+		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// Some operations are not visible to some stages
+		if (m_invisibleOperations[dstStage] != 0)
+			return false;
+
+		// There are operation that have not completed yet
+		if (m_incompleteOperations[dstStage] != 0)
+			return false;
+
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			// Some write operations are not available yet
+			if (m_unavailableWriteOperations[dstStage][srcStage] != 0)
+				return false;
+		}
+	}
+
+	return true;
+}
+
+void CacheState::fullBarrier (void)
+{
+	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
+	{
+		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
+
+		if ((dstStage_ & m_allowedStages) == 0)
+			continue;
+
+		// All stages have completed
+		m_incompleteOperations[dstStage] = 0;
+
+		// All operations are visible
+		m_invisibleOperations[dstStage] = 0;
+
+		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
+		{
+			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
+
+			if ((srcStage_ & m_allowedStages) == 0)
+				continue;
+
+			// All writes are available
+			m_unavailableWriteOperations[dstStage][srcStage] = 0;
+		}
+	}
+}
+
+struct State
+{
+	State (Usage usage, deUint32 seed)
+		: stage					(STAGE_HOST)
+		, cache					(usageToStageFlags(usage), usageToAccessFlags(usage))
+		, rng					(seed)
+		, mapped				(false)
+		, hostInvalidated		(true)
+		, hostFlushed			(true)
+		, memoryDefined			(false)
+		, hasBuffer				(false)
+		, hasBoundBufferMemory	(false)
+		, hasImage				(false)
+		, hasBoundImageMemory	(false)
+		, imageHasGeneralLayout	(false)
+		, imageDefined			(false)
+		, queueIdle				(true)
+		, deviceIdle			(true)
+		, commandBufferIsEmpty	(true)
+	{
+	}
+
+	Stage		stage;
+	CacheState	cache;
+	de::Random	rng;
+
+	bool		mapped;
+	bool		hostInvalidated;
+	bool		hostFlushed;
+	bool		memoryDefined;
+
+	bool		hasBuffer;
+	bool		hasBoundBufferMemory;
+
+	bool		hasImage;
+	bool		hasBoundImageMemory;
+	bool		imageHasGeneralLayout;
+	bool		imageDefined;
+
+	bool		queueIdle;
+	bool		deviceIdle;
+
+	bool		commandBufferIsEmpty;
+};
+
+void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
+{
+	if (state.stage == STAGE_HOST)
+	{
+		if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
+		{
+			// Host memory operations
+			if (state.mapped)
+			{
+				ops.push_back(OP_UNMAP);
+
+				// Avoid flush and finish if they are not needed
+				if (!state.hostFlushed)
+					ops.push_back(OP_MAP_FLUSH);
+
+				if (!state.hostInvalidated
+					&& state.queueIdle
+					&& ((usage & USAGE_HOST_READ) == 0
+						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
+					&& ((usage & USAGE_HOST_WRITE) == 0
+						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
+				{
+					ops.push_back(OP_MAP_INVALIDATE);
+				}
+
+				if (usage & USAGE_HOST_READ
+					&& usage & USAGE_HOST_WRITE
+					&& state.memoryDefined
+					&& state.hostInvalidated
+					&& state.queueIdle
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
+				{
+					ops.push_back(OP_MAP_MODIFY);
+				}
+
+				if (usage & USAGE_HOST_READ
+					&& state.memoryDefined
+					&& state.hostInvalidated
+					&& state.queueIdle
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
+				{
+					ops.push_back(OP_MAP_READ);
+				}
+
+				if (usage & USAGE_HOST_WRITE
+					&& state.hostInvalidated
+					&& state.queueIdle
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
+				{
+					ops.push_back(OP_MAP_WRITE);
+				}
+			}
+			else
+				ops.push_back(OP_MAP);
+		}
+
+		if (state.hasBoundBufferMemory && state.queueIdle)
+		{
+			// \note Destroy only buffers after they have been bound
+			ops.push_back(OP_BUFFER_DESTROY);
+		}
+		else
+		{
+			if (state.hasBuffer)
+			{
+				if (!state.hasBoundBufferMemory)
+					ops.push_back(OP_BUFFER_BINDMEMORY);
+			}
+			else if (!state.hasImage && supportsBuffers)	// Avoid creating buffer if there is already image
+				ops.push_back(OP_BUFFER_CREATE);
+		}
+
+		if (state.hasBoundImageMemory && state.queueIdle)
+		{
+			// \note Destroy only image after they have been bound
+			ops.push_back(OP_IMAGE_DESTROY);
+		}
+		else
+		{
+			if (state.hasImage)
+			{
+				if (!state.hasBoundImageMemory)
+					ops.push_back(OP_IMAGE_BINDMEMORY);
+			}
+			else if (!state.hasBuffer && supportsImages)	// Avoid creating image if there is already buffer
+				ops.push_back(OP_IMAGE_CREATE);
+		}
+
+		// Host writes must be flushed before GPU commands and there must be
+		// buffer or image for GPU commands
+		if (state.hostFlushed
+			&& (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
+			&& (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
+			&& (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
+		{
+			ops.push_back(OP_COMMAND_BUFFER_BEGIN);
+		}
+
+		if (!state.deviceIdle)
+			ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
+
+		if (!state.queueIdle)
+			ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
+	}
+	else if (state.stage == STAGE_COMMAND_BUFFER)
+	{
+		if (!state.cache.isClean())
+		{
+			ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
+
+			if (state.hasImage)
+				ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
+
+			if (state.hasBuffer)
+				ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
+		}
+
+		if (state.hasBoundBufferMemory)
+		{
+			if (usage & USAGE_TRANSFER_DST
+				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
+			{
+				ops.push_back(OP_BUFFER_FILL);
+				ops.push_back(OP_BUFFER_UPDATE);
+				ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
+				ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
+			}
+
+			if (usage & USAGE_TRANSFER_SRC
+				&& state.memoryDefined
+				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
+			{
+				ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
+				ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
+			}
+		}
+
+		if (state.hasBoundImageMemory)
+		{
+			if (!state.imageHasGeneralLayout)
+			{
+				ops.push_back(OP_IMAGE_TRANSITION_TO_GENERAL);
+			}
+			else
+			{
+				if (usage & USAGE_TRANSFER_DST
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
+				{
+					ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
+					ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
+					ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
+				}
+
+				if (usage & USAGE_TRANSFER_SRC
+					&& state.imageDefined
+					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
+				{
+					ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
+					ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
+					ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
+				}
+			}
+		}
+
+		// \todo Add other usages?
+		if (((usage & USAGE_VERTEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
+			|| ((usage & USAGE_INDEX_BUFFER) && state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT)))
+			ops.push_back(OP_RENDERPASS_BEGIN);
+
+		// \note This depends on previous operations and has to be always the
+		// last command buffer operation check
+		if (ops.empty() || !state.commandBufferIsEmpty)
+			ops.push_back(OP_COMMAND_BUFFER_END);
+	}
+	else if (state.stage == STAGE_RENDER_PASS)
+	{
+		if (usage & USAGE_VERTEX_BUFFER
+			&& state.memoryDefined
+			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
+		{
+			ops.push_back(OP_RENDER_VERTEX_BUFFER);
+		}
+
+		if (usage & USAGE_INDEX_BUFFER
+			&& state.memoryDefined
+			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
+		{
+			ops.push_back(OP_RENDER_INDEX_BUFFER);
+		}
+
+		ops.push_back(OP_RENDERPASS_END);
+	}
+	else
+		DE_FATAL("Unknown stage");
+}
+
+void applyOp (State& state, const Memory& memory, Op op)
+{
+	switch (op)
+	{
+		case OP_MAP:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.mapped);
+			state.mapped = true;
+			break;
+
+		case OP_UNMAP:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.mapped);
+			state.mapped = false;
+			break;
+
+		case OP_MAP_FLUSH:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.hostFlushed);
+			state.hostFlushed = true;
+			break;
+
+		case OP_MAP_INVALIDATE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.hostInvalidated);
+			state.hostInvalidated = true;
+			break;
+
+		case OP_MAP_READ:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hostInvalidated);
+			state.rng.getUint32();
+			break;
+
+		case OP_MAP_WRITE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
+				state.hostFlushed = false;
+
+			state.memoryDefined = true;
+			state.imageDefined = false;
+			state.rng.getUint32();
+			break;
+
+		case OP_MAP_MODIFY:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hostInvalidated);
+
+			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
+				state.hostFlushed = false;
+
+			state.rng.getUint32();
+			break;
+
+		case OP_BUFFER_CREATE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.hasBuffer);
+
+			state.hasBuffer = true;
+			break;
+
+		case OP_BUFFER_DESTROY:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hasBuffer);
+			DE_ASSERT(state.hasBoundBufferMemory);
+
+			state.hasBuffer = false;
+			state.hasBoundBufferMemory = false;
+			break;
+
+		case OP_BUFFER_BINDMEMORY:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hasBuffer);
+			DE_ASSERT(!state.hasBoundBufferMemory);
+
+			state.hasBoundBufferMemory = true;
+			break;
+
+		case OP_IMAGE_CREATE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.hasImage);
+			DE_ASSERT(!state.hasBuffer);
+
+			state.hasImage = true;
+			break;
+
+		case OP_IMAGE_DESTROY:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hasImage);
+			DE_ASSERT(state.hasBoundImageMemory);
+
+			state.hasImage = false;
+			state.hasBoundImageMemory = false;
+			state.imageHasGeneralLayout = false;
+			state.imageDefined = false;
+			break;
+
+		case OP_IMAGE_BINDMEMORY:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(state.hasImage);
+			DE_ASSERT(!state.hasBoundImageMemory);
+
+			state.hasBoundImageMemory = true;
+			break;
+
+		case OP_IMAGE_TRANSITION_TO_GENERAL:
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+			DE_ASSERT(state.hasImage);
+			DE_ASSERT(state.hasBoundImageMemory);
+
+			state.imageHasGeneralLayout = true;
+			state.memoryDefined = false;
+			break;
+
+		case OP_QUEUE_WAIT_FOR_IDLE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.queueIdle);
+
+			state.queueIdle = true;
+			break;
+
+		case OP_DEVICE_WAIT_FOR_IDLE:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			DE_ASSERT(!state.deviceIdle);
+
+			state.queueIdle = true;
+			state.deviceIdle = true;
+			break;
+
+		case OP_COMMAND_BUFFER_BEGIN:
+			DE_ASSERT(state.stage == STAGE_HOST);
+			state.stage = STAGE_COMMAND_BUFFER;
+			state.commandBufferIsEmpty = true;
+			// Makes host writes visible to command buffer
+			state.cache.submitCommandBuffer();
+			break;
+
+		case OP_COMMAND_BUFFER_END:
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+			state.stage = STAGE_HOST;
+			state.queueIdle = false;
+			state.deviceIdle = false;
+			// \todo Should this set all device reads ready?
+			break;
+
+		case OP_BUFFER_COPY_FROM_BUFFER:
+		case OP_BUFFER_COPY_FROM_IMAGE:
+		case OP_BUFFER_UPDATE:
+		case OP_BUFFER_FILL:
+			state.rng.getUint32();
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
+				state.hostInvalidated = false;
+
+			state.commandBufferIsEmpty = false;
+			state.memoryDefined = true;
+			state.imageDefined = false;
+			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
+			break;
+
+		case OP_BUFFER_COPY_TO_BUFFER:
+		case OP_BUFFER_COPY_TO_IMAGE:
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			state.commandBufferIsEmpty = false;
+			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
+			break;
+
+		case OP_IMAGE_BLIT_FROM_IMAGE:
+			state.rng.getBool();
+			// Fall through
+		case OP_IMAGE_COPY_FROM_BUFFER:
+		case OP_IMAGE_COPY_FROM_IMAGE:
+			state.rng.getUint32();
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			state.commandBufferIsEmpty = false;
+			state.memoryDefined = false;
+			state.imageDefined = true;
+			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
+			break;
+
+		case OP_IMAGE_BLIT_TO_IMAGE:
+			state.rng.getBool();
+			// Fall through
+		case OP_IMAGE_COPY_TO_BUFFER:
+		case OP_IMAGE_COPY_TO_IMAGE:
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			state.commandBufferIsEmpty = false;
+			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
+			break;
+
+		case OP_PIPELINE_BARRIER_GLOBAL:
+		case OP_PIPELINE_BARRIER_BUFFER:
+		case OP_PIPELINE_BARRIER_IMAGE:
+		{
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			vk::VkPipelineStageFlags	dirtySrcStages;
+			vk::VkAccessFlags			dirtySrcAccesses;
+			vk::VkPipelineStageFlags	dirtyDstStages;
+			vk::VkAccessFlags			dirtyDstAccesses;
+
+			vk::VkPipelineStageFlags	srcStages;
+			vk::VkAccessFlags			srcAccesses;
+			vk::VkPipelineStageFlags	dstStages;
+			vk::VkAccessFlags			dstAccesses;
+
+			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
+
+			// Try masking some random bits
+			srcStages	= dirtySrcStages & state.rng.getUint32();
+			srcAccesses	= dirtySrcAccesses & state.rng.getUint32();
+
+			dstStages	= dirtyDstStages & state.rng.getUint32();
+			dstAccesses	= dirtyDstAccesses & state.rng.getUint32();
+
+			// If there are no bits in stage mask use the original dirty stages
+			srcStages	= srcStages ? srcStages : dirtySrcStages;
+			dstStages	= dstStages ? dstStages : dirtyDstStages;
+
+			if (!srcStages)
+				srcStages = dstStages;
+
+			state.commandBufferIsEmpty = false;
+			state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
+			break;
+		}
+
+		case OP_RENDERPASS_BEGIN:
+		{
+			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
+
+			state.stage = STAGE_RENDER_PASS;
+			break;
+		}
+
+		case OP_RENDERPASS_END:
+		{
+			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
+
+			state.stage = STAGE_COMMAND_BUFFER;
+			break;
+		}
+
+		case OP_RENDER_VERTEX_BUFFER:
+		{
+			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
+
+			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
+			break;
+		}
+
+		case OP_RENDER_INDEX_BUFFER:
+		{
+			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
+
+			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
+			break;
+		}
+
+		default:
+			DE_FATAL("Unknown op");
+	}
+}
+
+de::MovePtr<Command> createHostCommand (Op					op,
+										de::Random&			rng,
+										Usage				usage,
+										vk::VkSharingMode	sharing)
+{
+	switch (op)
+	{
+		case OP_MAP:					return de::MovePtr<Command>(new Map());
+		case OP_UNMAP:					return de::MovePtr<Command>(new UnMap());
+
+		case OP_MAP_FLUSH:				return de::MovePtr<Command>(new Flush());
+		case OP_MAP_INVALIDATE:			return de::MovePtr<Command>(new Invalidate());
+
+		case OP_MAP_READ:				return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
+		case OP_MAP_WRITE:				return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
+		case OP_MAP_MODIFY:				return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
+
+		case OP_BUFFER_CREATE:			return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
+		case OP_BUFFER_DESTROY:			return de::MovePtr<Command>(new DestroyBuffer());
+		case OP_BUFFER_BINDMEMORY:		return de::MovePtr<Command>(new BindBufferMemory());
+
+		case OP_IMAGE_CREATE:			return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
+		case OP_IMAGE_DESTROY:			return de::MovePtr<Command>(new DestroyImage());
+		case OP_IMAGE_BINDMEMORY:		return de::MovePtr<Command>(new BindImageMemory());
+
+		case OP_QUEUE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new QueueWaitIdle());
+		case OP_DEVICE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new DeviceWaitIdle());
+
+		default:
+			DE_FATAL("Unknown op");
+			return de::MovePtr<Command>(DE_NULL);
+	}
+}
+
+de::MovePtr<CmdCommand> createCmdCommand (de::Random&	rng,
+										  const State&	state,
+										  Op			op)
+{
+	switch (op)
+	{
+		case OP_BUFFER_FILL:					return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
+		case OP_BUFFER_UPDATE:					return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
+		case OP_BUFFER_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
+		case OP_BUFFER_COPY_FROM_BUFFER:		return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
+
+		case OP_BUFFER_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyToImage());
+		case OP_BUFFER_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
+
+		case OP_IMAGE_TRANSITION_TO_GENERAL:	return de::MovePtr<CmdCommand>(new ImageTransition());
+
+		case OP_IMAGE_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyToBuffer());
+		case OP_IMAGE_COPY_FROM_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32()));
+		case OP_IMAGE_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyToImage());
+		case OP_IMAGE_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32()));
+		case OP_IMAGE_BLIT_TO_IMAGE:
+		{
+			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
+			return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale));
+		}
+
+		case OP_IMAGE_BLIT_FROM_IMAGE:
+		{
+			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
+			return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale));
+		}
+
+		case OP_PIPELINE_BARRIER_GLOBAL:
+		case OP_PIPELINE_BARRIER_BUFFER:
+		case OP_PIPELINE_BARRIER_IMAGE:
+		{
+			vk::VkPipelineStageFlags	dirtySrcStages;
+			vk::VkAccessFlags			dirtySrcAccesses;
+			vk::VkPipelineStageFlags	dirtyDstStages;
+			vk::VkAccessFlags			dirtyDstAccesses;
+
+			vk::VkPipelineStageFlags	srcStages;
+			vk::VkAccessFlags			srcAccesses;
+			vk::VkPipelineStageFlags	dstStages;
+			vk::VkAccessFlags			dstAccesses;
+
+			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
+
+			// Try masking some random bits
+			srcStages	= dirtySrcStages & rng.getUint32();
+			srcAccesses	= dirtySrcAccesses & rng.getUint32();
+
+			dstStages	= dirtyDstStages & rng.getUint32();
+			dstAccesses	= dirtyDstAccesses & rng.getUint32();
+
+			// If there are no bits in stage mask use the original dirty stages
+			srcStages	= srcStages ? srcStages : dirtySrcStages;
+			dstStages	= dstStages ? dstStages : dirtyDstStages;
+
+			if (!srcStages)
+				srcStages = dstStages;
+
+			PipelineBarrier::Type type;
+
+			if (op == OP_PIPELINE_BARRIER_IMAGE)
+				type = PipelineBarrier::TYPE_IMAGE;
+			else if (op == OP_PIPELINE_BARRIER_BUFFER)
+				type = PipelineBarrier::TYPE_BUFFER;
+			else if (op == OP_PIPELINE_BARRIER_GLOBAL)
+				type = PipelineBarrier::TYPE_GLOBAL;
+			else
+			{
+				type = PipelineBarrier::TYPE_LAST;
+				DE_FATAL("Unknown op");
+			}
+
+			return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type));
+		}
+
+		default:
+			DE_FATAL("Unknown op");
+			return de::MovePtr<CmdCommand>(DE_NULL);
+	}
+}
+
+de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
+														const State&,
+														Op				op)
+{
+	switch (op)
+	{
+		case OP_RENDER_VERTEX_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_VERTEX_BUFFER));
+		case OP_RENDER_INDEX_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderBuffer(RenderBuffer::RENDERAS_INDEX_BUFFER));
+
+		default:
+			DE_FATAL("Unknown op");
+			return de::MovePtr<RenderPassCommand>(DE_NULL);
+	}
+}
+
+de::MovePtr<CmdCommand> createRenderPassCommands (const Memory&	memory,
+												  de::Random&	nextOpRng,
+												  State&		state,
+												  Usage			usage,
+												  size_t&		opNdx,
+												  size_t		opCount)
+{
+	// \todo Exception safety
+	vector<RenderPassCommand*>	commands;
+
+	for (; opNdx < opCount; opNdx++)
+	{
+		vector<Op>	ops;
+
+		getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
+
+		DE_ASSERT(!ops.empty());
+
+		{
+			const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
+
+			if (op == OP_RENDERPASS_END)
+			{
+				break;
+			}
+			else
+			{
+				de::Random	rng	(state.rng);
+
+				commands.push_back(createRenderPassCommand(rng, state, op).release());
+				applyOp(state, memory, op);
+
+				DE_ASSERT(state.rng == rng);
+			}
+		}
+	}
+
+	applyOp(state, memory, OP_RENDERPASS_END);
+	return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
+}
+
+de::MovePtr<Command> createCmdCommands (const Memory&	memory,
+										de::Random&		nextOpRng,
+										State&			state,
+										Usage			usage,
+										size_t&			opNdx,
+										size_t			opCount)
+{
+	// \todo Exception safety
+	vector<CmdCommand*>	commands;
+
+	for (; opNdx < opCount; opNdx++)
+	{
+		vector<Op>	ops;
+
+		getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
+
+		DE_ASSERT(!ops.empty());
+
+		{
+			const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
+
+			if (op == OP_COMMAND_BUFFER_END)
+			{
+				break;
+			}
+			else
+			{
+				// \note Command needs to known the state before the operation
+				if (op == OP_RENDERPASS_BEGIN)
+				{
+					applyOp(state, memory, op);
+					commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
+				}
+				else
+				{
+					de::Random	rng	(state.rng);
+
+					commands.push_back(createCmdCommand(rng, state, op).release());
+					applyOp(state, memory, op);
+
+					DE_ASSERT(state.rng == rng);
+				}
+
+			}
+		}
+	}
+
+	applyOp(state, memory, OP_COMMAND_BUFFER_END);
+	return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
+}
+
+void createCommands (vector<Command*>&			commands,
+					 deUint32					seed,
+					 const Memory&				memory,
+					 Usage						usage,
+					 vk::VkSharingMode			sharingMode)
+{
+	const size_t		opCount		= 100;
+	State				state		(usage, seed);
+	// Used to select next operation only
+	de::Random			nextOpRng	(seed ^ 12930809);
+
+	commands.reserve(opCount);
+
+	for (size_t opNdx = 0; opNdx < opCount; opNdx++)
+	{
+		vector<Op>	ops;
+
+		getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
+
+		DE_ASSERT(!ops.empty());
+
+		{
+			const Op	op	= nextOpRng.choose<Op>(ops.begin(), ops.end());
+
+			if (op == OP_COMMAND_BUFFER_BEGIN)
+			{
+				applyOp(state, memory, op);
+				commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
+			}
+			else
+			{
+				de::Random	rng	(state.rng);
+
+				commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
+				applyOp(state, memory, op);
+
+				// Make sure that random generator is in sync
+				DE_ASSERT(state.rng == rng);
+			}
+		}
+	}
+
+	// Clean up resources
+	if (state.hasBuffer && state.hasImage)
+	{
+		if (!state.queueIdle)
+			commands.push_back(new QueueWaitIdle());
+
+		if (state.hasBuffer)
+			commands.push_back(new DestroyBuffer());
+
+		if (state.hasImage)
+			commands.push_back(new DestroyImage());
+	}
+}
+
+void testCommand (TestLog&											log,
+				  tcu::ResultCollector&								resultCollector,
+				  const vk::ProgramCollection<vk::ProgramBinary>&	binaryCollection,
+				  const vk::InstanceInterface&						vki,
+				  const vk::DeviceInterface&						vkd,
+				  vk::VkPhysicalDevice								physicalDevice,
+				  vk::VkDevice										device,
+				  vk::VkDeviceSize									size,
+				  deUint32											memoryTypeIndex,
+				  Usage												usage,
+				  vk::VkSharingMode									sharingMode,
+				  vk::VkQueue										executionQueue,
+				  deUint32											executionQueueFamily,
+				  const vector<deUint32>&							queueFamilies,
+				  const vk::VkDeviceSize							maxBufferSize,
+				  const IVec2										maxImageSize)
+{
+	const deUint32							seed			= 2830980989u;
+	Memory									memory			(vki, vkd, physicalDevice, device, size, memoryTypeIndex, maxBufferSize, maxImageSize[0], maxImageSize[1]);
+	vector<Command*>						commands;
+	vector<pair<deUint32, vk::VkQueue> >	queues;
+
+	try
+	{
+		log << TestLog::Message << "Create commands" << TestLog::EndMessage;
+		createCommands(commands, seed, memory, usage, sharingMode);
+
+		for (size_t queueNdx = 0; queueNdx < queueFamilies.size(); queueNdx++)
+		{
+			vk::VkQueue queue;
+
+			vkd.getDeviceQueue(device, queueFamilies[queueNdx], 0, &queue);
+			queues.push_back(std::make_pair(queueFamilies[queueNdx], queue));
+		}
+
+		{
+			const tcu::ScopedLogSection section (log, "LogPrepare", "LogPrepare");
+
+			for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
+				commands[cmdNdx]->logPrepare(log, cmdNdx);
+		}
+
+		{
+			const tcu::ScopedLogSection section (log, "LogExecute", "LogExecute");
+
+			for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
+				commands[cmdNdx]->logExecute(log, cmdNdx);
+		}
+
+		{
+			const Context context (vki, vkd, physicalDevice, device, executionQueue, executionQueueFamily, queues, binaryCollection);
+
+			try
+			{
+				{
+					PrepareContext	prepareContext	(context, memory);
+
+					log << TestLog::Message << "Begin prepare" << TestLog::EndMessage;
+
+					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
+					{
+						Command& command = *commands[cmdNdx];
+
+						try
+						{
+							command.prepare(prepareContext);
+						}
+						catch (...)
+						{
+							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare for execution");
+							throw;
+						}
+					}
+
+					ExecuteContext	executeContext	(context);
+
+					log << TestLog::Message << "Begin execution" << TestLog::EndMessage;
+
+					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
+					{
+						Command& command = *commands[cmdNdx];
+
+						try
+						{
+							command.execute(executeContext);
+						}
+						catch (...)
+						{
+							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute");
+							throw;
+						}
+					}
+
+					VK_CHECK(vkd.deviceWaitIdle(device));
+				}
+
+				{
+					const tcu::ScopedLogSection	section			(log, "Verify", "Verify");
+					VerifyContext				verifyContext	(log, resultCollector, context, size);
+
+					log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
+
+					for (size_t cmdNdx = 0; cmdNdx < commands.size(); cmdNdx++)
+					{
+						Command& command = *commands[cmdNdx];
+
+						try
+						{
+							command.verify(verifyContext, cmdNdx);
+						}
+						catch (...)
+						{
+							resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed verification");
+							throw;
+						}
+					}
+				}
+
+				for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
+				{
+					delete commands[commandNdx];
+					commands[commandNdx] = DE_NULL;
+				}
+			}
+			catch (...)
+			{
+				for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
+				{
+					delete commands[commandNdx];
+					commands[commandNdx] = DE_NULL;
+				}
+
+				throw;
+			}
+		}
+	}
+	catch (...)
+	{
+		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
+		{
+			delete commands[commandNdx];
+			commands[commandNdx] = DE_NULL;
+		}
+
+		throw;
+	}
+}
+class MemoryTestInstance : public TestInstance
+{
+public:
+
+						MemoryTestInstance	(::vkt::Context& context, const TestConfig& config);
+
+	tcu::TestStatus		iterate				(void);
+
+private:
+	const TestConfig							m_config;
+	const vk::VkPhysicalDeviceMemoryProperties	m_memoryProperties;
+	deUint32									m_memoryTypeNdx;
+	tcu::ResultCollector						m_resultCollector;
+};
+
+MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
+	: TestInstance			(context)
+	, m_config				(config)
+	, m_memoryProperties	(vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
+	, m_memoryTypeNdx		(0)
+	, m_resultCollector		(context.getTestContext().getLog())
+{
+	TestLog&	log		= context.getTestContext().getLog();
+	{
+		const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
+
+		log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
+		log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
+		log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
+	}
+
+	{
+		const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
+
+		for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
+		{
+			const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
+
+			log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
+			log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
+		}
+
+		for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
+		{
+			const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
+
+			log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
+			log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
+		}
+	}
+}
+
+tcu::TestStatus MemoryTestInstance::iterate (void)
+{
+	// \todo Split different stages over multiple iterations
+	if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
+	{
+		TestLog&									log					= m_context.getTestContext().getLog();
+		const tcu::ScopedLogSection					section				(log, "MemoryType" + de::toString(m_memoryTypeNdx), "Memory type " + de::toString(m_memoryTypeNdx));
+		const vk::InstanceInterface&				vki					= m_context.getInstanceInterface();
+		const vk::VkPhysicalDevice					physicalDevice		= m_context.getPhysicalDevice();
+		const vk::DeviceInterface&					vkd					= m_context.getDeviceInterface();
+		const vk::VkDevice							device				= m_context.getDevice();
+		const vk::VkQueue							queue				= m_context.getUniversalQueue();
+		const deUint32								queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
+		vector<deUint32>							queues;
+
+		queues.push_back(queueFamilyIndex);
+
+		if (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)
+			&& !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
+		{
+			log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
+
+			m_memoryTypeNdx++;
+			return tcu::TestStatus::incomplete();
+		}
+		else
+		{
+			try
+			{
+				const vk::VkBufferUsageFlags	bufferUsage		= usageToBufferUsageFlags(m_config.usage);
+				const vk::VkImageUsageFlags		imageUsage		= usageToImageUsageFlags(m_config.usage);
+				const vk::VkDeviceSize			maxBufferSize	= bufferUsage != 0
+																? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
+																: 0;
+				const IVec2						maxImageSize	= imageUsage != 0
+																? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
+																: IVec2(0, 0);
+
+				log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
+				log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
+
+				// Skip tests if there are no supported operations
+				if (maxBufferSize == 0
+					&& maxImageSize[0] == 0
+					&& (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
+				{
+					log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
+				}
+				else
+				{
+					testCommand(log, m_resultCollector, m_context.getBinaryCollection(), vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, m_config.usage, m_config.sharing, queue, queueFamilyIndex, queues, maxBufferSize, maxImageSize);
+				}
+			}
+			catch (const tcu::TestError& e)
+			{
+				m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
+			}
+
+			m_memoryTypeNdx++;
+			return tcu::TestStatus::incomplete();
+		}
+	}
+	else
+		return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
+}
+
+struct AddPrograms
+{
+	void init (vk::SourceCollections& sources, TestConfig config) const
+	{
+		// Vertex buffer rendering
+		if (config.usage & USAGE_VERTEX_BUFFER)
+		{
+			const char* const vertexShader =
+				"#version 310 es\n"
+				"layout(location = 0) in highp vec2 a_position;\n"
+				"void main (void) {\n"
+				"\tgl_PointSize = 1.0;\n"
+				"\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
+				"}\n";
+
+			sources.glslSources.add("vertex-buffer.vert")
+				<< glu::VertexSource(vertexShader);
+		}
+
+		// Index buffer rendering
+		if (config.usage & USAGE_INDEX_BUFFER)
+		{
+			const char* const vertexShader =
+				"#version 310 es\n"
+				"highp float;\n"
+				"void main (void) {\n"
+				"\tgl_PointSize = 1.0;\n"
+				"\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
+				"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
+				"}\n";
+
+			sources.glslSources.add("index-buffer.vert")
+				<< glu::VertexSource(vertexShader);
+		}
+
+		{
+			const char* const fragmentShader =
+				"#version 310 es\n"
+				"layout(location = 0) out highp vec4 o_color;\n"
+				"void main (void) {\n"
+				"\to_color = vec4(1.0);\n"
+				"}\n";
+
+			sources.glslSources.add("render-white.frag")
+				<< glu::FragmentSource(fragmentShader);
+		}
+	}
+};
+
+} // anonymous
+
+tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
+	const vk::VkDeviceSize			sizes[]			=
+	{
+		1024,			// 1K
+		8*1024,			// 8K
+		64*1024,		// 64K
+		1024*1024,		// 1M
+	};
+	const Usage						usages[]		=
+	{
+		USAGE_HOST_READ,
+		USAGE_HOST_WRITE,
+		USAGE_TRANSFER_SRC,
+		USAGE_TRANSFER_DST,
+		USAGE_VERTEX_BUFFER,
+		USAGE_INDEX_BUFFER
+	};
+	const Usage						readUsages[]		=
+	{
+		USAGE_HOST_READ,
+		USAGE_TRANSFER_SRC,
+		USAGE_VERTEX_BUFFER,
+		USAGE_INDEX_BUFFER
+	};
+
+	const Usage						writeUsages[]	=
+	{
+		USAGE_HOST_WRITE,
+		USAGE_TRANSFER_DST
+	};
+
+	for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
+	{
+		const Usage	writeUsage	= writeUsages[writeUsageNdx];
+
+		for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
+		{
+			const Usage						readUsage		= readUsages[readUsageNdx];
+			const Usage						usage			= writeUsage | readUsage;
+			const string					usageGroupName	(usageToName(usage));
+			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
+
+			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
+			{
+				const vk::VkDeviceSize	size		= sizes[sizeNdx];
+				const string			testName	(de::toString((deUint64)(size)));
+				const TestConfig		config		=
+				{
+					usage,
+					size,
+					vk::VK_SHARING_MODE_EXCLUSIVE
+				};
+
+				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
+			}
+
+			group->addChild(usageGroup.get());
+			usageGroup.release();
+		}
+	}
+
+	{
+		Usage all = (Usage)0;
+
+		for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
+			all = all | usages[usageNdx];
+
+		{
+			const string					usageGroupName	("all");
+			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
+
+			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
+			{
+				const vk::VkDeviceSize	size		= sizes[sizeNdx];
+				const string			testName	(de::toString((deUint64)(size)));
+				const TestConfig		config		=
+				{
+					all,
+					size,
+					vk::VK_SHARING_MODE_EXCLUSIVE
+				};
+
+				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
+			}
+
+			group->addChild(usageGroup.get());
+			usageGroup.release();
+		}
+
+		{
+			const string					usageGroupName	("all_device");
+			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
+
+			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
+			{
+				const vk::VkDeviceSize	size		= sizes[sizeNdx];
+				const string			testName	(de::toString((deUint64)(size)));
+				const TestConfig		config		=
+				{
+					(Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
+					size,
+					vk::VK_SHARING_MODE_EXCLUSIVE
+				};
+
+				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
+			}
+
+			group->addChild(usageGroup.get());
+			usageGroup.release();
+		}
+	}
+
+	return group.release();
+}
+
+} // memory
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.hpp b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.hpp
new file mode 100644
index 0000000..7cf591f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryPipelineBarrierTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTMEMORYPIPELINEBARRIERTESTS_HPP
+#define _VKTMEMORYPIPELINEBARRIERTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pipeline barrier tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace memory
+{
+
+tcu::TestCaseGroup*		createPipelineBarrierTests		(tcu::TestContext& testCtx);
+
+} // memory
+} // vkt
+
+#endif // _VKTMEMORYPIPELINEBARRIERTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryTests.cpp b/external/vulkancts/modules/vulkan/memory/vktMemoryTests.cpp
new file mode 100644
index 0000000..af5d2bc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryTests.cpp
@@ -0,0 +1,67 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktMemoryTests.hpp"
+
+#include "vktMemoryAllocationTests.hpp"
+#include "vktMemoryMappingTests.hpp"
+#include "vktMemoryPipelineBarrierTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace memory
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* memoryTests)
+{
+	tcu::TestContext&	testCtx		= memoryTests->getTestContext();
+
+	memoryTests->addChild(createAllocationTests			(testCtx));
+	memoryTests->addChild(createMappingTests			(testCtx));
+	memoryTests->addChild(createPipelineBarrierTests	(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "memory", "Memory Tests", createChildren);
+}
+
+} // memory
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/memory/vktMemoryTests.hpp b/external/vulkancts/modules/vulkan/memory/vktMemoryTests.hpp
new file mode 100644
index 0000000..7234c3a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/memory/vktMemoryTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTMEMORYTESTS_HPP
+#define _VKTMEMORYTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Memory tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace memory
+{
+
+tcu::TestCaseGroup*		createTests		(tcu::TestContext& testCtx);
+
+} // memory
+} // vkt
+
+#endif // _VKTMEMORYTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/CMakeLists.txt b/external/vulkancts/modules/vulkan/pipeline/CMakeLists.txt
new file mode 100644
index 0000000..fc3c099
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/CMakeLists.txt
@@ -0,0 +1,54 @@
+
+include_directories(
+	..
+	)
+
+set(DEQP_VK_PIPELINE_SRCS
+	vktPipelineBlendTests.cpp
+	vktPipelineBlendTests.hpp
+	vktPipelineClearUtil.cpp
+	vktPipelineClearUtil.hpp
+	vktPipelineCombinationsIterator.hpp
+	vktPipelineDepthTests.cpp
+	vktPipelineDepthTests.hpp
+	vktPipelineImageSamplingInstance.cpp
+	vktPipelineImageSamplingInstance.hpp
+	vktPipelineImageTests.cpp
+	vktPipelineImageTests.hpp
+	vktPipelinePushConstantTests.cpp
+	vktPipelinePushConstantTests.hpp
+	vktPipelineImageUtil.cpp
+	vktPipelineImageUtil.hpp
+	vktPipelineImageViewTests.cpp
+	vktPipelineImageViewTests.hpp
+	vktPipelineMultisampleTests.cpp
+	vktPipelineMultisampleTests.hpp
+	vktPipelineInputAssemblyTests.cpp
+	vktPipelineInputAssemblyTests.hpp
+	vktPipelineReferenceRenderer.cpp
+	vktPipelineReferenceRenderer.hpp
+	vktPipelineSamplerTests.cpp
+	vktPipelineSamplerTests.hpp
+	vktPipelineStencilTests.cpp
+	vktPipelineStencilTests.hpp
+	vktPipelineTests.cpp
+	vktPipelineTests.hpp
+	vktPipelineUniqueRandomIterator.hpp
+	vktPipelineVertexInputTests.cpp
+	vktPipelineVertexInputTests.hpp
+	vktPipelineTimestampTests.cpp
+	vktPipelineTimestampTests.hpp
+	vktPipelineVertexUtil.cpp
+	vktPipelineVertexUtil.hpp
+	)
+
+set(DEQP_VK_PIPELINE_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	referencerenderer
+	)
+
+add_library(deqp-vk-pipeline STATIC ${DEQP_VK_PIPELINE_SRCS})
+target_link_libraries(deqp-vk-pipeline ${DEQP_VK_PIPELINE_LIBS})
+
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.cpp
new file mode 100644
index 0000000..80e0152
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.cpp
@@ -0,0 +1,1137 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Blend Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineBlendTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineUniqueRandomIterator.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuPlatform.hpp"
+#include "tcuTextureUtil.hpp"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include <cstring>
+#include <set>
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+bool isSupportedBlendFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	VkFormatProperties formatProps;
+
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT) &&
+		   (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT);
+}
+
+class BlendStateUniqueRandomIterator : public UniqueRandomIterator<VkPipelineColorBlendAttachmentState>
+{
+public:
+											BlendStateUniqueRandomIterator		(deUint32 numberOfCombinations, int seed);
+	virtual									~BlendStateUniqueRandomIterator		(void) {}
+	VkPipelineColorBlendAttachmentState		getIndexedValue	(deUint32 index);
+
+private:
+	const static VkBlendFactor				m_blendFactors[];
+	const static VkBlendOp					m_blendOps[];
+
+	// Pre-calculated constants
+	const static deUint32					m_blendFactorsLength;
+	const static deUint32					m_blendFactorsLength2;
+	const static deUint32					m_blendFactorsLength3;
+	const static deUint32					m_blendFactorsLength4;
+	const static deUint32					m_blendOpsLength;
+
+	// Total number of cross-combinations of (srcBlendColor x destBlendColor x blendOpColor x srcBlendAlpha x destBlendAlpha x blendOpAlpha)
+	const static deUint32					m_totalBlendStates;
+};
+
+class BlendTest : public vkt::TestCase
+{
+public:
+	enum
+	{
+		QUAD_COUNT = 4
+	};
+
+	const static VkColorComponentFlags	s_colorWriteMasks[QUAD_COUNT];
+	const static tcu::Vec4				s_blendConst;
+
+										BlendTest				(tcu::TestContext&							testContext,
+																 const std::string&							name,
+																 const std::string&							description,
+																 const VkFormat								colorFormat,
+																 const VkPipelineColorBlendAttachmentState	blendStates[QUAD_COUNT]);
+	virtual								~BlendTest				(void);
+	virtual void						initPrograms			(SourceCollections& sourceCollections) const;
+	virtual TestInstance*				createInstance			(Context& context) const;
+
+private:
+	const VkFormat						m_colorFormat;
+	VkPipelineColorBlendAttachmentState	m_blendStates[QUAD_COUNT];
+};
+
+class BlendTestInstance : public vkt::TestInstance
+{
+public:
+										BlendTestInstance		(Context& context, const VkFormat colorFormat, const VkPipelineColorBlendAttachmentState blendStates[BlendTest::QUAD_COUNT]);
+	virtual								~BlendTestInstance		(void);
+	virtual tcu::TestStatus				iterate					(void);
+
+private:
+	static float						getNormChannelThreshold	(const tcu::TextureFormat& format, int numBits);
+	static tcu::Vec4					getFormatThreshold		(const tcu::TextureFormat& format);
+	tcu::TestStatus						verifyImage				(void);
+
+	VkPipelineColorBlendAttachmentState	m_blendStates[BlendTest::QUAD_COUNT];
+
+	const tcu::UVec2					m_renderSize;
+	const VkFormat						m_colorFormat;
+
+	VkImageCreateInfo					m_colorImageCreateInfo;
+	Move<VkImage>						m_colorImage;
+	de::MovePtr<Allocation>				m_colorImageAlloc;
+	Move<VkImageView>					m_colorAttachmentView;
+	Move<VkRenderPass>					m_renderPass;
+	Move<VkFramebuffer>					m_framebuffer;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	std::vector<Vertex4RGBA>			m_vertices;
+	de::MovePtr<Allocation>				m_vertexBufferAlloc;
+
+	Move<VkPipelineLayout>				m_pipelineLayout;
+	Move<VkPipeline>					m_graphicsPipelines[BlendTest::QUAD_COUNT];
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+
+	Move<VkFence>						m_fence;
+};
+
+
+// BlendStateUniqueRandomIterator
+
+const VkBlendFactor BlendStateUniqueRandomIterator::m_blendFactors[] =
+{
+	VK_BLEND_FACTOR_ZERO,
+	VK_BLEND_FACTOR_ONE,
+	VK_BLEND_FACTOR_SRC_COLOR,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+	VK_BLEND_FACTOR_DST_COLOR,
+	VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+	VK_BLEND_FACTOR_SRC_ALPHA,
+	VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+	VK_BLEND_FACTOR_DST_ALPHA,
+	VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+	VK_BLEND_FACTOR_CONSTANT_COLOR,
+	VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+	VK_BLEND_FACTOR_CONSTANT_ALPHA,
+	VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+	VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
+};
+
+const VkBlendOp BlendStateUniqueRandomIterator::m_blendOps[] =
+{
+	VK_BLEND_OP_ADD,
+	VK_BLEND_OP_SUBTRACT,
+	VK_BLEND_OP_REVERSE_SUBTRACT,
+	VK_BLEND_OP_MIN,
+	VK_BLEND_OP_MAX
+};
+
+const deUint32 BlendStateUniqueRandomIterator::m_blendFactorsLength		= DE_LENGTH_OF_ARRAY(m_blendFactors);
+const deUint32 BlendStateUniqueRandomIterator::m_blendFactorsLength2	= m_blendFactorsLength * m_blendFactorsLength;
+const deUint32 BlendStateUniqueRandomIterator::m_blendFactorsLength3	= m_blendFactorsLength2 * m_blendFactorsLength;
+const deUint32 BlendStateUniqueRandomIterator::m_blendFactorsLength4	= m_blendFactorsLength3 * m_blendFactorsLength;
+const deUint32 BlendStateUniqueRandomIterator::m_blendOpsLength			= DE_LENGTH_OF_ARRAY(m_blendOps);
+const deUint32 BlendStateUniqueRandomIterator::m_totalBlendStates		= m_blendFactorsLength4 * m_blendOpsLength * m_blendOpsLength;
+
+
+BlendStateUniqueRandomIterator::BlendStateUniqueRandomIterator (deUint32 numberOfCombinations, int seed)
+	: UniqueRandomIterator<VkPipelineColorBlendAttachmentState>(numberOfCombinations, m_totalBlendStates, seed)
+{
+}
+
+VkPipelineColorBlendAttachmentState BlendStateUniqueRandomIterator::getIndexedValue (deUint32 index)
+{
+	const deUint32		blendOpAlphaIndex			= index / (m_blendFactorsLength4 * m_blendOpsLength);
+	const deUint32		blendOpAlphaSeqIndex		= blendOpAlphaIndex * (m_blendFactorsLength4 * m_blendOpsLength);
+
+	const deUint32		destBlendAlphaIndex			= (index - blendOpAlphaSeqIndex) / (m_blendFactorsLength3 * m_blendOpsLength);
+	const deUint32		destBlendAlphaSeqIndex		= destBlendAlphaIndex * (m_blendFactorsLength3 * m_blendOpsLength);
+
+	const deUint32		srcBlendAlphaIndex			= (index - blendOpAlphaSeqIndex - destBlendAlphaSeqIndex) / (m_blendFactorsLength2 * m_blendOpsLength);
+	const deUint32		srcBlendAlphaSeqIndex		= srcBlendAlphaIndex * (m_blendFactorsLength2 * m_blendOpsLength);
+
+	const deUint32		blendOpColorIndex			= (index - blendOpAlphaSeqIndex - destBlendAlphaSeqIndex - srcBlendAlphaSeqIndex) / m_blendFactorsLength2;
+	const deUint32		blendOpColorSeqIndex		= blendOpColorIndex * m_blendFactorsLength2;
+
+	const deUint32		destBlendColorIndex			= (index - blendOpAlphaSeqIndex - destBlendAlphaSeqIndex - srcBlendAlphaSeqIndex - blendOpColorSeqIndex) / m_blendFactorsLength;
+	const deUint32		destBlendColorSeqIndex		= destBlendColorIndex * m_blendFactorsLength;
+
+	const deUint32		srcBlendColorIndex			= index - blendOpAlphaSeqIndex - destBlendAlphaSeqIndex - srcBlendAlphaSeqIndex - blendOpColorSeqIndex - destBlendColorSeqIndex;
+
+	const VkPipelineColorBlendAttachmentState blendAttachmentState =
+	{
+		true,														// VkBool32					blendEnable;
+		m_blendFactors[srcBlendColorIndex],							// VkBlendFactor			srcColorBlendFactor;
+		m_blendFactors[destBlendColorIndex],						// VkBlendFactor			dstColorBlendFactor;
+		m_blendOps[blendOpColorIndex],								// VkBlendOp				colorBlendOp;
+		m_blendFactors[srcBlendAlphaIndex],							// VkBlendFactor			srcAlphaBlendFactor;
+		m_blendFactors[destBlendAlphaIndex],						// VkBlendFactor			dstAlphaBlendFactor;
+		m_blendOps[blendOpAlphaIndex],								// VkBlendOp				alphaBlendOp;
+		VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |		// VkColorComponentFlags	colorWriteMask;
+			VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+	};
+
+	return blendAttachmentState;
+}
+
+
+// BlendTest
+
+const VkColorComponentFlags BlendTest::s_colorWriteMasks[BlendTest::QUAD_COUNT] = { VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT,	// Pair of channels: R & G
+																					VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT,	// Pair of channels: G & B
+																					VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT,	// Pair of channels: B & A
+																					VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT | VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT };	// All channels
+
+const tcu::Vec4 BlendTest::s_blendConst = tcu::Vec4(0.1f, 0.2f, 0.3f, 0.4f);
+
+BlendTest::BlendTest (tcu::TestContext&								testContext,
+					  const std::string&							name,
+					  const std::string&							description,
+					  const VkFormat								colorFormat,
+					  const VkPipelineColorBlendAttachmentState		blendStates[QUAD_COUNT])
+	: vkt::TestCase	(testContext, name, description)
+	, m_colorFormat(colorFormat)
+{
+	deMemcpy(m_blendStates, blendStates, sizeof(VkPipelineColorBlendAttachmentState) * QUAD_COUNT);
+}
+
+BlendTest::~BlendTest (void)
+{
+}
+
+TestInstance* BlendTest::createInstance(Context& context) const
+{
+	return new BlendTestInstance(context, m_colorFormat, m_blendStates);
+}
+
+void BlendTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream fragmentSource;
+
+	sourceCollections.glslSources.add("color_vert") << glu::VertexSource(
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 position;\n"
+		"layout(location = 1) in highp vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	gl_Position = position;\n"
+		"	vtxColor = color;\n"
+		"}\n");
+
+	fragmentSource << "#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n";
+
+	sourceCollections.glslSources.add("color_frag") << glu::FragmentSource(fragmentSource.str());
+}
+
+
+// BlendTestInstance
+
+BlendTestInstance::BlendTestInstance (Context&									context,
+									  const VkFormat							colorFormat,
+									  const VkPipelineColorBlendAttachmentState	blendStates[BlendTest::QUAD_COUNT])
+	: vkt::TestInstance	(context)
+	, m_renderSize		(32, 32)
+	, m_colorFormat		(colorFormat)
+{
+	const DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const VkDevice				vkDevice			= m_context.getDevice();
+	const deUint32				queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	// Copy depth operators
+	deMemcpy(m_blendStates, blendStates, sizeof(VkPipelineColorBlendAttachmentState) * BlendTest::QUAD_COUNT);
+
+	// Create color image
+	{
+		if (!isSupportedBlendFormat(context.getInstanceInterface(), context.getPhysicalDevice(), m_colorFormat))
+			throw tcu::NotSupportedError(std::string("Unsupported color blending format: ") + getFormatName(m_colorFormat));
+
+		const VkImageCreateInfo	colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImageCreateInfo	= colorImageParams;
+		m_colorImage			= createImage(vk, vkDevice, &m_colorImageCreateInfo);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			{VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }		// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlag		flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,													// deUint32						preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			&colorAttachmentDescription,						// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkFramebufferCreateFlags	flags;
+			*m_renderPass,										// VkRenderPass				renderPass;
+			1u,													// deUint32					attachmentCount;
+			&m_colorAttachmentView.get(),						// const VkImageView*		pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32					width;
+			(deUint32)m_renderSize.y(),							// deUint32					height;
+			1u													// deUint32					layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkPipelineLayoutCreateFlags		flags;
+			0u,													// deUint32							setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+			0u,													// deUint32							pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*		pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStages[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType					sType;
+				DE_NULL,												// const void*						pNext;
+				0u,														// VkPipelineShaderStageCreateFlags	flags;
+				VK_SHADER_STAGE_VERTEX_BIT,								// VkShaderStageFlagBits			stage;
+				*m_vertexShaderModule,									// VkShaderModule					module;
+				"main",													// const char*						pName;
+				DE_NULL													// const VkSpecializationInfo*		pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType					sType;
+				DE_NULL,												// const void*						pNext;
+				0u,														// VkPipelineShaderStageCreateFlags	flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,							// VkShaderStageFlagBits			stage;
+				*m_fragmentShaderModule,								// VkShaderModule					module;
+				"main",													// const char*						pName;
+				DE_NULL													// const VkSpecializationInfo*		pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,									// deUint32					binding;
+			sizeof(Vertex4RGBA),				// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputStepRate	inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,								// deUint32	location;
+				0u,								// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,	// VkFormat	format;
+				0u								// deUint32	offset;
+			},
+			{
+				1u,								// deUint32	location;
+				0u,								// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,	// VkFormat	format;
+				(deUint32)(sizeof(float) * 4),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			false,															// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u						// deUint32		reference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f														// float			maxDepthBounds;
+		};
+
+		// The color blend attachment will be set up before creating the graphics pipeline.
+		VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			0u,															// deUint32										attachmentCount;
+			DE_NULL,													// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{															// float										blendConstants[4];
+				BlendTest::s_blendConst.x(),
+				BlendTest::s_blendConst.y(),
+				BlendTest::s_blendConst.z(),
+				BlendTest::s_blendConst.w()
+			}
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStages,										// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+		{
+			colorBlendStateParams.attachmentCount	= 1u;
+			colorBlendStateParams.pAttachments		= &m_blendStates[quadNdx];
+			m_graphicsPipelines[quadNdx]			= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+		}
+	}
+
+	// Create vertex buffer
+	{
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			1024u,										// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertices			= createOverlappingQuads();
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Adjust vertex colors
+		if (!isFloatFormat(m_colorFormat))
+		{
+			const tcu::TextureFormatInfo formatInfo = tcu::getTextureFormatInfo(mapVkFormat(m_colorFormat));
+			for (size_t vertexNdx = 0; vertexNdx < m_vertices.size(); vertexNdx++)
+				m_vertices[vertexNdx].color = (m_vertices[vertexNdx].color - formatInfo.lookupBias) / formatInfo.lookupScale;
+		}
+
+		// Upload vertex data
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+
+		const VkMappedMemoryRange flushRange =
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// VkStructureType	sType;
+			DE_NULL,								// const void*		pNext;
+			m_vertexBufferAlloc->getMemory(),		// VkDeviceMemory	memory;
+			m_vertexBufferAlloc->getOffset(),		// VkDeviceSize		offset;
+			vertexBufferParams.size					// VkDeviceSize		size;
+		};
+
+		vk.flushMappedMemoryRanges(vkDevice, 1, &flushRange);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags	flags;
+			queueFamilyIndex								// deUint32					queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,												// deUint32				bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValue = defaultClearValue(m_colorFormat);
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			1,														// deUint32				clearValueCount;
+			&attachmentClearValue									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		const VkDeviceSize quadOffset = (m_vertices.size() / BlendTest::QUAD_COUNT) * sizeof(Vertex4RGBA);
+
+		for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+		{
+			VkDeviceSize vertexBufferOffset = quadOffset * quadNdx;
+
+			vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]);
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+			vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / BlendTest::QUAD_COUNT), 1, 0, 0);
+		}
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+BlendTestInstance::~BlendTestInstance (void)
+{
+}
+
+tcu::TestStatus BlendTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	return verifyImage();
+}
+
+float BlendTestInstance::getNormChannelThreshold (const tcu::TextureFormat& format, int numBits)
+{
+	switch (tcu::getTextureChannelClass(format.type))
+	{
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:	return BlendTest::QUAD_COUNT / static_cast<float>((1 << numBits) - 1);
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:	return BlendTest::QUAD_COUNT / static_cast<float>((1 << (numBits - 1)) - 1);
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return 0.0f;
+}
+
+tcu::Vec4 BlendTestInstance::getFormatThreshold (const tcu::TextureFormat& format)
+{
+	using tcu::Vec4;
+	using tcu::TextureFormat;
+
+	Vec4 threshold(0.01f);
+
+	switch (format.type)
+	{
+		case TextureFormat::UNORM_BYTE_44:
+			threshold = Vec4(getNormChannelThreshold(format, 4), getNormChannelThreshold(format, 4), 1.0f, 1.0f);
+			break;
+
+		case TextureFormat::UNORM_SHORT_565:
+			threshold = Vec4(getNormChannelThreshold(format, 5), getNormChannelThreshold(format, 6), getNormChannelThreshold(format, 5), 1.0f);
+			break;
+
+		case TextureFormat::UNORM_SHORT_555:
+			threshold = Vec4(getNormChannelThreshold(format, 5), getNormChannelThreshold(format, 5), getNormChannelThreshold(format, 5), 1.0f);
+			break;
+
+		case TextureFormat::UNORM_SHORT_4444:
+			threshold = Vec4(getNormChannelThreshold(format, 4));
+			break;
+
+		case TextureFormat::UNORM_SHORT_5551:
+			threshold = Vec4(getNormChannelThreshold(format, 5), getNormChannelThreshold(format, 5), getNormChannelThreshold(format, 5), 0.1f);
+			break;
+
+		case TextureFormat::UNORM_INT_1010102_REV:
+		case TextureFormat::SNORM_INT_1010102_REV:
+			threshold = Vec4(getNormChannelThreshold(format, 10), getNormChannelThreshold(format, 10), getNormChannelThreshold(format, 10), 0.34f);
+			break;
+
+		case TextureFormat::UNORM_INT8:
+		case TextureFormat::SNORM_INT8:
+			threshold = Vec4(getNormChannelThreshold(format, 8));
+			break;
+
+		case TextureFormat::UNORM_INT16:
+		case TextureFormat::SNORM_INT16:
+			threshold = Vec4(getNormChannelThreshold(format, 16));
+			break;
+
+		case TextureFormat::UNORM_INT32:
+		case TextureFormat::SNORM_INT32:
+			threshold = Vec4(getNormChannelThreshold(format, 32));
+			break;
+
+		case TextureFormat::HALF_FLOAT:
+			threshold = Vec4(0.005f);
+			break;
+
+		case TextureFormat::FLOAT:
+			threshold = Vec4(0.00001f);
+			break;
+
+		case TextureFormat::UNSIGNED_INT_11F_11F_10F_REV:
+			threshold = Vec4(0.02f, 0.02f, 0.0625f, 1.0f);
+			break;
+
+		case TextureFormat::UNSIGNED_INT_999_E5_REV:
+			threshold = Vec4(0.05f, 0.05f, 0.05f, 1.0f);
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	// Return value matching the channel order specified by the format
+	if (format.order == tcu::TextureFormat::BGR || format.order == tcu::TextureFormat::BGRA)
+		return threshold.swizzle(2, 1, 0, 3);
+	else
+		return threshold;
+}
+
+tcu::TestStatus BlendTestInstance::verifyImage (void)
+{
+	const tcu::TextureFormat	tcuColorFormat	= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat	tcuDepthFormat	= tcu::TextureFormat(); // Undefined depth/stencil format
+	const ColorVertexShader		vertexShader;
+	const ColorFragmentShader	fragmentShader	(tcuColorFormat, tcuDepthFormat);
+	const rr::Program			program			(&vertexShader, &fragmentShader);
+	ReferenceRenderer			refRenderer		(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuDepthFormat, &program);
+	bool						compareOk		= false;
+
+	// Render reference image
+	{
+		for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+		{
+			const VkPipelineColorBlendAttachmentState& blendState = m_blendStates[quadNdx];
+
+			// Set blend state
+			rr::RenderState renderState					(refRenderer.getViewportState());
+			renderState.fragOps.blendMode				= rr::BLENDMODE_STANDARD;
+			renderState.fragOps.blendRGBState.srcFunc	= mapVkBlendFactor(blendState.srcColorBlendFactor);
+			renderState.fragOps.blendRGBState.dstFunc	= mapVkBlendFactor(blendState.dstColorBlendFactor);
+			renderState.fragOps.blendRGBState.equation	= mapVkBlendOp(blendState.colorBlendOp);
+			renderState.fragOps.blendAState.srcFunc		= mapVkBlendFactor(blendState.srcAlphaBlendFactor);
+			renderState.fragOps.blendAState.dstFunc		= mapVkBlendFactor(blendState.dstAlphaBlendFactor);
+			renderState.fragOps.blendAState.equation	= mapVkBlendOp(blendState.alphaBlendOp);
+			renderState.fragOps.blendColor				= BlendTest::s_blendConst;
+			renderState.fragOps.colorMask				= mapVkColorComponentFlags(BlendTest::s_colorWriteMasks[quadNdx]);
+
+			refRenderer.draw(renderState,
+							 rr::PRIMITIVETYPE_TRIANGLES,
+							 std::vector<Vertex4RGBA>(m_vertices.begin() + quadNdx * 6,
+													  m_vertices.begin() + (quadNdx + 1) * 6));
+		}
+	}
+
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&				vk							= m_context.getDeviceInterface();
+		const VkDevice						vkDevice					= m_context.getDevice();
+		const VkQueue						queue						= m_context.getUniversalQueue();
+		const deUint32						queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator						allocator					(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::UniquePtr<tcu::TextureLevel>	result						(readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize).release());
+		const tcu::Vec4						threshold					(getFormatThreshold(tcuColorFormat));
+
+		compareOk = tcu::floatThresholdCompare(m_context.getTestContext().getLog(),
+											   "FloatImageCompare",
+											   "Image comparison",
+											   refRenderer.getAccess(),
+											   result->getAccess(),
+											   threshold,
+											   tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+} // anonymous
+
+std::string getBlendStateName (const VkPipelineColorBlendAttachmentState& blendState)
+{
+	const char* shortBlendFactorNames[] =
+	{
+		"z",		// VK_BLEND_ZERO
+		"o",		// VK_BLEND_ONE
+		"sc",		// VK_BLEND_SRC_COLOR
+		"1msc",		// VK_BLEND_ONE_MINUS_SRC_COLOR
+		"dc",		// VK_BLEND_DEST_COLOR
+		"1mdc",		// VK_BLEND_ONE_MINUS_DEST_COLOR
+		"sa",		// VK_BLEND_SRC_ALPHA
+		"1msa",		// VK_BLEND_ONE_MINUS_SRC_ALPHA
+		"da",		// VK_BLEND_DEST_ALPHA
+		"1mda",		// VK_BLEND_ONE_MINUS_DEST_ALPHA
+		"cc",		// VK_BLEND_CONSTANT_COLOR
+		"1mcc",		// VK_BLEND_ONE_MINUS_CONSTANT_COLOR
+		"ca",		// VK_BLEND_CONSTANT_ALPHA
+		"1mca",		// VK_BLEND_ONE_MINUS_CONSTANT_ALPHA
+		"sas"		// VK_BLEND_SRC_ALPHA_SATURATE
+	};
+
+	const char* blendOpNames[] =
+	{
+		"add",		// VK_BLEND_OP_ADD
+		"sub",		// VK_BLEND_OP_SUBTRACT
+		"rsub",		// VK_BLEND_OP_REVERSE_SUBTRACT
+		"min",		// VK_BLEND_OP_MIN
+		"max",		// VK_BLEND_OP_MAX
+	};
+
+	std::ostringstream shortName;
+
+	shortName << "color_" << shortBlendFactorNames[blendState.srcColorBlendFactor] << "_" << shortBlendFactorNames[blendState.dstColorBlendFactor] << "_" << blendOpNames[blendState.colorBlendOp];
+	shortName << "_alpha_" << shortBlendFactorNames[blendState.srcAlphaBlendFactor] << "_" << shortBlendFactorNames[blendState.dstAlphaBlendFactor] << "_" << blendOpNames[blendState.alphaBlendOp];
+
+	return shortName.str();
+}
+
+std::string getBlendStateSetName (const VkPipelineColorBlendAttachmentState blendStates[BlendTest::QUAD_COUNT])
+{
+	std::ostringstream name;
+
+	for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+	{
+		name << getBlendStateName(blendStates[quadNdx]);
+
+		if (quadNdx < BlendTest::QUAD_COUNT - 1)
+			name << "-";
+	}
+
+	return name.str();
+}
+
+std::string getBlendStateSetDescription (const VkPipelineColorBlendAttachmentState blendStates[BlendTest::QUAD_COUNT])
+{
+	std::ostringstream description;
+
+	description << "Draws " << BlendTest::QUAD_COUNT << " quads with the following blend states:\n";
+
+	for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+		description << blendStates[quadNdx] << "\n";
+
+	return description.str();
+}
+
+std::string getFormatCaseName (VkFormat format)
+{
+	const std::string fullName = getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+tcu::TestCaseGroup* createBlendTests (tcu::TestContext& testCtx)
+{
+	const deUint32 blendStatesPerFormat = 100 * BlendTest::QUAD_COUNT;
+
+	// Formats that are dEQP-compatible, non-integer and uncompressed
+	const VkFormat blendFormats[] =
+	{
+		VK_FORMAT_R4G4_UNORM_PACK8,
+		VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_SRGB,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_SRGB,
+		VK_FORMAT_R8G8B8_UNORM,
+		VK_FORMAT_R8G8B8_SNORM,
+		VK_FORMAT_R8G8B8_SRGB,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16_UNORM,
+		VK_FORMAT_R16G16B16_SNORM,
+		VK_FORMAT_R16G16B16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+		VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+		VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+		VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>		blendTests		(new tcu::TestCaseGroup(testCtx, "blend", "Blend tests"));
+	de::MovePtr<tcu::TestCaseGroup>		formatTests		(new tcu::TestCaseGroup(testCtx, "format", "Uses different blend formats"));
+	BlendStateUniqueRandomIterator		blendStateItr	(blendStatesPerFormat, 123);
+
+	for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(blendFormats); formatNdx++)
+	{
+		const VkFormat					format			= blendFormats[formatNdx];
+		de::MovePtr<tcu::TestCaseGroup>	formatTest		(new tcu::TestCaseGroup(testCtx,
+																				getFormatCaseName(format).c_str(),
+																				(std::string("Uses format ") + getFormatName(format)).c_str()));
+		de::MovePtr<tcu::TestCaseGroup>	blendStateTests;
+		{
+			std::ostringstream blendStateDescription;
+			blendStateDescription << "Combines blend factors, operators and channel write masks. The constant color used in all tests is " << BlendTest::s_blendConst;
+			blendStateTests = de::MovePtr<tcu::TestCaseGroup>(new tcu::TestCaseGroup(testCtx, "states", blendStateDescription.str().c_str()));
+		}
+
+		blendStateItr.reset();
+
+		while (blendStateItr.hasNext())
+		{
+			VkPipelineColorBlendAttachmentState quadBlendConfigs[BlendTest::QUAD_COUNT];
+
+			for (int quadNdx = 0; quadNdx < BlendTest::QUAD_COUNT; quadNdx++)
+			{
+				quadBlendConfigs[quadNdx]					= blendStateItr.next();
+				quadBlendConfigs[quadNdx].colorWriteMask	= BlendTest::s_colorWriteMasks[quadNdx];
+			}
+
+			blendStateTests->addChild(new BlendTest(testCtx,
+													getBlendStateSetName(quadBlendConfigs),
+													getBlendStateSetDescription(quadBlendConfigs),
+													format,
+													quadBlendConfigs));
+		}
+		formatTest->addChild(blendStateTests.release());
+		formatTests->addChild(formatTest.release());
+	}
+	blendTests->addChild(formatTests.release());
+
+	return blendTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.hpp
new file mode 100644
index 0000000..7585e95
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineBlendTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEBLENDTESTS_HPP
+#define _VKTPIPELINEBLENDTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Blend Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createBlendTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEBLENDTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.cpp
new file mode 100644
index 0000000..32bc5c4
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.cpp
@@ -0,0 +1,147 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for clear values.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineClearUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "tcuTextureUtil.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+tcu::Vec4 defaultClearColor (const tcu::TextureFormat& format)
+{
+   if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_FLOATING_POINT)
+       return defaultClearColorUnorm();
+   else
+   {
+       const tcu::TextureFormatInfo formatInfo = tcu::getTextureFormatInfo(format);
+       return (defaultClearColorUnorm() - formatInfo.lookupBias) / formatInfo.lookupScale;
+   }
+}
+
+tcu::IVec4 defaultClearColorInt (const tcu::TextureFormat& format)
+{
+	const tcu::TextureFormatInfo	formatInfo	= tcu::getTextureFormatInfo(format);
+	const tcu::Vec4					color		= (defaultClearColorUnorm() - formatInfo.lookupBias) / formatInfo.lookupScale;
+
+	const tcu::IVec4				result		((deInt32)deFloatRound(color.x()), (deInt32)deFloatRound(color.y()),
+												 (deInt32)deFloatRound(color.z()), (deInt32)deFloatRound(color.w()));
+
+	return result;
+}
+
+tcu::UVec4 defaultClearColorUint (const tcu::TextureFormat& format)
+{
+	const tcu::TextureFormatInfo	formatInfo	= tcu::getTextureFormatInfo(format);
+	const tcu::Vec4					color		= (defaultClearColorUnorm() - formatInfo.lookupBias) / formatInfo.lookupScale;
+
+	const	tcu::UVec4				result		((deUint32)deFloatRound(color.x()), (deUint32)deFloatRound(color.y()),
+												 (deUint32)deFloatRound(color.z()), (deUint32)deFloatRound(color.w()));
+
+	return result;
+}
+
+tcu::Vec4 defaultClearColorUnorm (void)
+{
+	return tcu::Vec4(0.39f, 0.58f, 0.93f, 1.0f);
+}
+
+float defaultClearDepth (void)
+{
+	return 1.0f;
+}
+
+deUint32 defaultClearStencil (void)
+{
+	return 0;
+}
+
+VkClearDepthStencilValue defaultClearDepthStencilValue (void)
+{
+	VkClearDepthStencilValue clearDepthStencilValue;
+	clearDepthStencilValue.depth	= defaultClearDepth();
+	clearDepthStencilValue.stencil	= defaultClearStencil();
+
+	return clearDepthStencilValue;
+}
+
+VkClearValue defaultClearValue (VkFormat clearFormat)
+{
+	VkClearValue clearValue;
+
+	if (isDepthStencilFormat(clearFormat))
+	{
+		const VkClearDepthStencilValue dsValue = defaultClearDepthStencilValue();
+		clearValue.depthStencil.stencil	= dsValue.stencil;
+		clearValue.depthStencil.depth	= dsValue.depth;
+	}
+	else
+	{
+		const tcu::TextureFormat tcuClearFormat = mapVkFormat(clearFormat);
+		if (isUintFormat(clearFormat))
+		{
+			const tcu::UVec4 defaultColor	= defaultClearColorUint(tcuClearFormat);
+			clearValue.color.uint32[0]			= defaultColor.x();
+			clearValue.color.uint32[1]			= defaultColor.y();
+			clearValue.color.uint32[2]			= defaultColor.z();
+			clearValue.color.uint32[3]			= defaultColor.w();
+		}
+		else if (isIntFormat(clearFormat))
+		{
+			const tcu::IVec4 defaultColor	= defaultClearColorInt(tcuClearFormat);
+			clearValue.color.int32[0]			= defaultColor.x();
+			clearValue.color.int32[1]			= defaultColor.y();
+			clearValue.color.int32[2]			= defaultColor.z();
+			clearValue.color.int32[3]			= defaultColor.w();
+		}
+		else
+		{
+			const tcu::Vec4 defaultColor	= defaultClearColor(tcuClearFormat);
+			clearValue.color.float32[0]			= defaultColor.x();
+			clearValue.color.float32[1]			= defaultColor.y();
+			clearValue.color.float32[2]			= defaultColor.z();
+			clearValue.color.float32[3]			= defaultColor.w();
+		}
+	}
+
+	return clearValue;
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.hpp
new file mode 100644
index 0000000..6383aca
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineClearUtil.hpp
@@ -0,0 +1,60 @@
+#ifndef _VKTPIPELINECLEARUTIL_HPP
+#define _VKTPIPELINECLEARUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for clear values.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTexture.hpp"
+#include "tcuVectorUtil.hpp"
+#include "vkDefs.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::Vec4						defaultClearColor				(const tcu::TextureFormat& format);
+tcu::IVec4						defaultClearColorInt			(const tcu::TextureFormat& format);
+tcu::UVec4						defaultClearColorUint			(const tcu::TextureFormat& format);
+tcu::Vec4						defaultClearColorUnorm			(void);
+float							defaultClearDepth				(void);
+deUint32						defaultClearStencil				(void);
+
+vk::VkClearDepthStencilValue	defaultClearDepthStencilValue	(void);
+vk::VkClearValue				defaultClearValue				(vk::VkFormat format);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINECLEARUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineCombinationsIterator.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineCombinationsIterator.hpp
new file mode 100644
index 0000000..2ba2b25
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineCombinationsIterator.hpp
@@ -0,0 +1,152 @@
+#ifndef _VKTPIPELINECOMBINATIONSITERATOR_HPP
+#define _VKTPIPELINECOMBINATIONSITERATOR_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Iterator over combinations of items without repetition
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "deRandom.hpp"
+#include <set>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+template <typename T>
+class CombinationsIterator
+{
+public:
+							CombinationsIterator	(deUint32 numItems, deUint32 combinationSize);
+	virtual					~CombinationsIterator	(void) {}
+	bool					hasNext					(void) const;
+	T						next					(void);
+	void					reset					(void);
+
+protected:
+	virtual T				getCombinationValue		(const std::vector<deUint32>& combination) = 0;
+
+private:
+	static deUint32			factorial				(deUint32 x);
+	deUint32				m_numItems;
+
+	deUint32				m_combinationIndex;
+	deUint32				m_combinationSize;
+	deUint32				m_combinationCount;
+
+	std::vector<deUint32>	m_combination;
+};
+
+static deUint32 seriesProduct (deUint32 first, deUint32 last)
+{
+	deUint32 result = 1;
+
+	for (deUint32 i = first; i <= last; i++)
+		result *= i;
+
+	return result;
+}
+
+template <typename T>
+CombinationsIterator<T>::CombinationsIterator (deUint32 numItems, deUint32 combinationSize)
+	: m_numItems		(numItems)
+	, m_combinationSize	(combinationSize)
+{
+	DE_ASSERT(m_combinationSize > 0);
+	DE_ASSERT(m_combinationSize <= m_numItems);
+
+	m_combinationCount	= seriesProduct(numItems - combinationSize + 1, numItems) / seriesProduct(1, combinationSize);
+
+	m_combination.resize(m_combinationSize);
+	reset();
+}
+
+template <typename T>
+bool CombinationsIterator<T>::hasNext (void) const
+{
+	return m_combinationIndex < m_combinationCount;
+}
+
+template <typename T>
+T CombinationsIterator<T>::next (void)
+{
+	DE_ASSERT(m_combinationIndex < m_combinationCount);
+
+	if (m_combinationIndex > 0)
+	{
+		for (int combinationItemNdx = (int)m_combinationSize - 1; combinationItemNdx >= 0; combinationItemNdx--)
+		{
+			if ((m_combination[combinationItemNdx] + 1 < m_numItems) && ((combinationItemNdx == (int)m_combinationSize - 1) || (m_combination[combinationItemNdx + 1] > m_combination[combinationItemNdx] + 1)))
+			{
+				m_combination[combinationItemNdx]++;
+
+				for (deUint32 resetNdx = combinationItemNdx + 1; resetNdx < m_combinationSize; resetNdx++)
+					m_combination[resetNdx] = m_combination[resetNdx - 1] + 1;
+
+				break;
+			}
+		}
+	}
+
+	m_combinationIndex++;
+
+	return getCombinationValue(m_combination);
+}
+
+template <typename T>
+void CombinationsIterator<T>::reset (void)
+{
+	// Set up first combination
+	for (deUint32 itemNdx = 0; itemNdx < m_combinationSize; itemNdx++)
+		m_combination[itemNdx] = itemNdx;
+
+	m_combinationIndex = 0;
+}
+
+template <typename T>
+deUint32 CombinationsIterator<T>::factorial (deUint32 x)
+{
+	deUint32 result = 1;
+
+	for (deUint32 value = x; value > 1; value--)
+		result *= value;
+
+	return result;
+}
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINECOMBINATIONSITERATOR_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.cpp
new file mode 100644
index 0000000..a242a7e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.cpp
@@ -0,0 +1,1074 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Depth Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineDepthTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+bool isSupportedDepthStencilFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	VkFormatProperties formatProps;
+
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0u;
+}
+
+tcu::TestStatus testSupportsDepthStencilFormat (Context& context, VkFormat format)
+{
+	DE_ASSERT(vk::isDepthStencilFormat(format));
+
+	if (isSupportedDepthStencilFormat(context.getInstanceInterface(), context.getPhysicalDevice(), format))
+		return tcu::TestStatus::pass("Format can be used in depth/stencil attachment");
+	else
+		return tcu::TestStatus::fail("Unsupported depth/stencil attachment format");
+}
+
+tcu::TestStatus testSupportsAtLeastOneDepthStencilFormat (Context& context, const std::vector<VkFormat> formats)
+{
+	std::ostringstream	supportedFormatsMsg;
+	bool				pass					= false;
+
+	DE_ASSERT(!formats.empty());
+
+	for (size_t formatNdx = 0; formatNdx < formats.size(); formatNdx++)
+	{
+		const VkFormat format = formats[formatNdx];
+
+		DE_ASSERT(vk::isDepthStencilFormat(format));
+
+		if (isSupportedDepthStencilFormat(context.getInstanceInterface(), context.getPhysicalDevice(), format))
+		{
+			pass = true;
+			supportedFormatsMsg << vk::getFormatName(format);
+
+			if (formatNdx < formats.size() - 1)
+				supportedFormatsMsg << ", ";
+		}
+	}
+
+	if (pass)
+		return tcu::TestStatus::pass(std::string("Supported depth/stencil formats: ") + supportedFormatsMsg.str());
+	else
+		return tcu::TestStatus::fail("All depth/stencil formats are unsupported");
+}
+
+class DepthTest : public vkt::TestCase
+{
+public:
+	enum
+	{
+		QUAD_COUNT = 4
+	};
+
+	static const float					quadDepths[QUAD_COUNT];
+
+										DepthTest				(tcu::TestContext&		testContext,
+																 const std::string&		name,
+																 const std::string&		description,
+																 const VkFormat			depthFormat,
+																 const VkCompareOp		depthCompareOps[QUAD_COUNT]);
+	virtual								~DepthTest				(void);
+	virtual void						initPrograms			(SourceCollections& programCollection) const;
+	virtual TestInstance*				createInstance			(Context& context) const;
+
+private:
+	const VkFormat						m_depthFormat;
+	VkCompareOp							m_depthCompareOps[QUAD_COUNT];
+};
+
+class DepthTestInstance : public vkt::TestInstance
+{
+public:
+										DepthTestInstance		(Context& context, const VkFormat depthFormat, const VkCompareOp depthCompareOps[DepthTest::QUAD_COUNT]);
+	virtual								~DepthTestInstance		(void);
+	virtual tcu::TestStatus				iterate					(void);
+
+private:
+	tcu::TestStatus						verifyImage				(void);
+
+private:
+	VkCompareOp							m_depthCompareOps[DepthTest::QUAD_COUNT];
+	const tcu::UVec2					m_renderSize;
+	const VkFormat						m_colorFormat;
+	const VkFormat						m_depthFormat;
+
+	Move<VkImage>						m_colorImage;
+	de::MovePtr<Allocation>				m_colorImageAlloc;
+	Move<VkImage>						m_depthImage;
+	de::MovePtr<Allocation>				m_depthImageAlloc;
+	Move<VkImageView>					m_colorAttachmentView;
+	Move<VkImageView>					m_depthAttachmentView;
+	Move<VkRenderPass>					m_renderPass;
+	Move<VkFramebuffer>					m_framebuffer;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	std::vector<Vertex4RGBA>			m_vertices;
+	de::MovePtr<Allocation>				m_vertexBufferAlloc;
+
+	Move<VkPipelineLayout>				m_pipelineLayout;
+	Move<VkPipeline>					m_graphicsPipelines[DepthTest::QUAD_COUNT];
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+
+	Move<VkFence>						m_fence;
+};
+
+const float DepthTest::quadDepths[QUAD_COUNT] =
+{
+	0.1f,
+	0.0f,
+	0.3f,
+	0.2f
+};
+
+DepthTest::DepthTest (tcu::TestContext&		testContext,
+					  const std::string&	name,
+					  const std::string&	description,
+					  const VkFormat		depthFormat,
+					  const VkCompareOp		depthCompareOps[QUAD_COUNT])
+	: vkt::TestCase	(testContext, name, description)
+	, m_depthFormat	(depthFormat)
+{
+	deMemcpy(m_depthCompareOps, depthCompareOps, sizeof(VkCompareOp) * QUAD_COUNT);
+}
+
+DepthTest::~DepthTest (void)
+{
+}
+
+TestInstance* DepthTest::createInstance (Context& context) const
+{
+	return new DepthTestInstance(context, m_depthFormat, m_depthCompareOps);
+}
+
+void DepthTest::initPrograms (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("color_vert") << glu::VertexSource(
+		"#version 310 es\n"
+		"layout(location = 0) in vec4 position;\n"
+		"layout(location = 1) in vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	gl_Position = position;\n"
+		"	vtxColor = color;\n"
+		"}\n");
+
+	programCollection.glslSources.add("color_frag") << glu::FragmentSource(
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n");
+}
+
+DepthTestInstance::DepthTestInstance (Context&				context,
+									  const VkFormat		depthFormat,
+									  const VkCompareOp		depthCompareOps[DepthTest::QUAD_COUNT])
+	: vkt::TestInstance	(context)
+	, m_renderSize		(32, 32)
+	, m_colorFormat		(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_depthFormat		(depthFormat)
+{
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	// Copy depth operators
+	deMemcpy(m_depthCompareOps, depthCompareOps, sizeof(VkCompareOp) * DepthTest::QUAD_COUNT);
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImage			= createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create depth image
+	{
+		// Check format support
+		if (!isSupportedDepthStencilFormat(context.getInstanceInterface(), context.getPhysicalDevice(), m_depthFormat))
+			throw tcu::NotSupportedError(std::string("Unsupported depth/stencil format: ") + getFormatName(m_depthFormat));
+
+		const VkImageCreateInfo depthImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,								// VkImageType				imageType;
+			m_depthFormat,									// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },		// VkExtent3D				extent;
+			1u,												// deUint32					mipLevels;
+			1u,												// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,						// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,	// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,						// VkSharingMode			sharingMode;
+			1u,												// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,								// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			initialLayout;
+		};
+
+		m_depthImage = createImage(vk, vkDevice, &depthImageParams);
+
+		// Allocate and bind depth image memory
+		m_depthImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_depthImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_depthImage, m_depthImageAlloc->getMemory(), m_depthImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageViewCreateFlags	flags;
+			*m_colorImage,									// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,							// VkImageViewType			viewType;
+			m_colorFormat,									// VkFormat					format;
+			componentMappingRGBA,							// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }	// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create depth attachment view
+	{
+		const VkImageViewCreateInfo depthAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageViewCreateFlags	flags;
+			*m_depthImage,									// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,							// VkImageViewType			viewType;
+			m_depthFormat,									// VkFormat					format;
+			componentMappingRGBA,							// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_DEPTH_BIT, 0u, 1u, 0u, 1u }	// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_depthAttachmentView = createImageView(vk, vkDevice, &depthAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentDescription depthAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_depthFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,	// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,	// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentDescription attachments[2] =
+		{
+			colorAttachmentDescription,
+			depthAttachmentDescription
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkAttachmentReference depthAttachmentReference =
+		{
+			1u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL	// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags		flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint				pipelineBindPoint;
+			0u,													// deUint32							inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*		pInputAttachments;
+			1u,													// deUint32							colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*		pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*		pResolveAttachments;
+			&depthAttachmentReference,							// const VkAttachmentReference*		pDepthStencilAttachment;
+			0u,													// deUint32							preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*		pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			2u,													// deUint32							attachmentCount;
+			attachments,										// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachmentBindInfos[2] =
+		{
+			*m_colorAttachmentView,
+			*m_depthAttachmentView,
+		};
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkFramebufferCreateFlags		flags;
+			*m_renderPass,										// VkRenderPass					renderPass;
+			2u,													// deUint32						attachmentCount;
+			attachmentBindInfos,								// const VkImageView*			pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32						width;
+			(deUint32)m_renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkPipelineLayoutCreateFlags		flags;
+			0u,													// deUint32							setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+			0u,													// deUint32							pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*		pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Shader modules
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStages[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+				DE_NULL,												// const void*							pNext;
+				0u,														// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,								// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,									// VkShaderModule						module;
+				"main",													// const char*							pName;
+				DE_NULL													// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+				DE_NULL,												// const void*							pNext;
+				0u,														// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,							// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,								// VkShaderModule						module;
+				"main",													// const char*							pName;
+				DE_NULL													// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,									// deUint32					binding;
+			sizeof(Vertex4RGBA),				// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputStepRate	inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offset;
+			},
+			{
+				1u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				DE_OFFSET_OF(Vertex4RGBA, color),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+		const VkRect2D scissor =
+		{
+			{ 0, 0 },												// VkOffset2D  offset;
+			{ m_renderSize.x(), m_renderSize.y() }					// VkExtent2D  extent;
+		};
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f,															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,																		// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor			srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |						// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo	multisampleStateParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			true,														// VkBool32									depthTestEnable;
+			true,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f,														// float			maxDepthBounds;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStages,										// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u,													// deInt32											basePipelineIndex;
+		};
+
+		for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+		{
+			depthStencilStateParams.depthCompareOp	= depthCompareOps[quadNdx];
+			m_graphicsPipelines[quadNdx]			= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+		}
+	}
+
+	// Create vertex buffer
+	{
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			1024u,										// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertices			= createOverlappingQuads();
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Adjust depths
+		for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+			for (int vertexNdx = 0; vertexNdx < 6; vertexNdx++)
+				m_vertices[quadNdx * 6 + vertexNdx].position.z() = DepthTest::quadDepths[quadNdx];
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), vertexBufferParams.size);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,		// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex							// deUint32				queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValues[2] =
+		{
+			defaultClearValue(m_colorFormat),
+			defaultClearValue(m_depthFormat),
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			2,														// deUint32				clearValueCount;
+			attachmentClearValues									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		const VkDeviceSize		quadOffset		= (m_vertices.size() / DepthTest::QUAD_COUNT) * sizeof(Vertex4RGBA);
+
+		for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+		{
+			VkDeviceSize vertexBufferOffset = quadOffset * quadNdx;
+
+			vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]);
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+			vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / DepthTest::QUAD_COUNT), 1, 0, 0);
+		}
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+DepthTestInstance::~DepthTestInstance (void)
+{
+}
+
+tcu::TestStatus DepthTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return verifyImage();
+}
+
+tcu::TestStatus DepthTestInstance::verifyImage (void)
+{
+	const tcu::TextureFormat	tcuColorFormat	= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat	tcuDepthFormat	= mapVkFormat(m_depthFormat);
+	const ColorVertexShader		vertexShader;
+	const ColorFragmentShader	fragmentShader	(tcuColorFormat, tcuDepthFormat);
+	const rr::Program			program			(&vertexShader, &fragmentShader);
+	ReferenceRenderer			refRenderer		(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuDepthFormat, &program);
+	bool						compareOk		= false;
+
+	// Render reference image
+	{
+		for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+		{
+			// Set depth state
+			rr::RenderState renderState(refRenderer.getViewportState());
+			renderState.fragOps.depthTestEnabled = true;
+			renderState.fragOps.depthFunc = mapVkCompareOp(m_depthCompareOps[quadNdx]);
+
+			refRenderer.draw(renderState,
+							 rr::PRIMITIVETYPE_TRIANGLES,
+							 std::vector<Vertex4RGBA>(m_vertices.begin() + quadNdx * 6,
+													  m_vertices.begin() + (quadNdx + 1) * 6));
+		}
+	}
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&			vk					= m_context.getDeviceInterface();
+		const VkDevice					vkDevice			= m_context.getDevice();
+		const VkQueue					queue				= m_context.getUniversalQueue();
+		const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator					allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::MovePtr<tcu::TextureLevel>	result				= readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize);
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  refRenderer.getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(2, 2, 2, 2),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+std::string getFormatCaseName (const VkFormat format)
+{
+	const std::string	fullName	= getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+std::string	getCompareOpsName (const VkCompareOp quadDepthOps[DepthTest::QUAD_COUNT])
+{
+	std::ostringstream name;
+
+	for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+	{
+		const std::string	fullOpName	= getCompareOpName(quadDepthOps[quadNdx]);
+
+		DE_ASSERT(de::beginsWith(fullOpName, "VK_COMPARE_OP_"));
+
+		name << de::toLower(fullOpName.substr(14));
+
+		if (quadNdx < DepthTest::QUAD_COUNT - 1)
+			name << "_";
+	}
+
+	return name.str();
+}
+
+std::string	getCompareOpsDescription (const VkCompareOp quadDepthOps[DepthTest::QUAD_COUNT])
+{
+	std::ostringstream desc;
+	desc << "Draws " << DepthTest::QUAD_COUNT << " quads with depth compare ops: ";
+
+	for (int quadNdx = 0; quadNdx < DepthTest::QUAD_COUNT; quadNdx++)
+	{
+		desc << getCompareOpName(quadDepthOps[quadNdx]) << " at depth " << DepthTest::quadDepths[quadNdx];
+
+		if (quadNdx < DepthTest::QUAD_COUNT - 1)
+			desc << ", ";
+	}
+	return desc.str();
+}
+
+
+} // anonymous
+
+tcu::TestCaseGroup* createDepthTests (tcu::TestContext& testCtx)
+{
+	const VkFormat depthFormats[] =
+	{
+		VK_FORMAT_D16_UNORM,
+		VK_FORMAT_X8_D24_UNORM_PACK32,
+		VK_FORMAT_D32_SFLOAT,
+		VK_FORMAT_D16_UNORM_S8_UINT,
+		VK_FORMAT_D24_UNORM_S8_UINT,
+		VK_FORMAT_D32_SFLOAT_S8_UINT
+	};
+
+	// Each entry configures the depth compare operators of QUAD_COUNT quads.
+	// All entries cover pair-wise combinations of compare operators.
+	const VkCompareOp depthOps[][DepthTest::QUAD_COUNT] =
+	{
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_LESS,				VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_LESS,				VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_EQUAL,				VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_NOT_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_LESS_OR_EQUAL,		VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_ALWAYS,				VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_LESS_OR_EQUAL },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_LESS },
+		{ VK_COMPARE_OP_GREATER_OR_EQUAL,	VK_COMPARE_OP_NEVER,			VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_NEVER },
+		{ VK_COMPARE_OP_LESS,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_EQUAL,			VK_COMPARE_OP_EQUAL },
+		{ VK_COMPARE_OP_NEVER,				VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_ALWAYS,			VK_COMPARE_OP_GREATER_OR_EQUAL },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER,			VK_COMPARE_OP_ALWAYS },
+		{ VK_COMPARE_OP_NOT_EQUAL,			VK_COMPARE_OP_LESS_OR_EQUAL,	VK_COMPARE_OP_NOT_EQUAL,		VK_COMPARE_OP_GREATER }
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> depthTests (new tcu::TestCaseGroup(testCtx, "depth", "Depth tests"));
+
+	// Tests for format features
+	{
+		de::MovePtr<tcu::TestCaseGroup> formatFeaturesTests (new tcu::TestCaseGroup(testCtx, "format_features", "Checks depth format features"));
+
+		// Formats that must be supported in all implementations
+		addFunctionCase(formatFeaturesTests.get(),
+						"support_d16_unorm",
+						"Tests if VK_FORMAT_D16_UNORM is supported as depth/stencil attachment format",
+						testSupportsDepthStencilFormat,
+						VK_FORMAT_D16_UNORM);
+
+		// Sets where at least one of the formats must be supported
+		const VkFormat	depthOnlyFormats[]		= { VK_FORMAT_X8_D24_UNORM_PACK32, VK_FORMAT_D32_SFLOAT };
+		const VkFormat	depthStencilFormats[]	= { VK_FORMAT_D24_UNORM_S8_UINT, VK_FORMAT_D32_SFLOAT_S8_UINT };
+
+		addFunctionCase(formatFeaturesTests.get(),
+						"support_d24_unorm_or_d32_sfloat",
+						"Tests if any of VK_FORMAT_D24_UNORM_X8 or VK_FORMAT_D32_SFLOAT are supported as depth/stencil attachment format",
+						testSupportsAtLeastOneDepthStencilFormat,
+						std::vector<VkFormat>(depthOnlyFormats, depthOnlyFormats + DE_LENGTH_OF_ARRAY(depthOnlyFormats)));
+
+		addFunctionCase(formatFeaturesTests.get(),
+						"support_d24_unorm_s8_uint_or_d32_sfloat_s8_uint",
+						"Tests if any of VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT are supported as depth/stencil attachment format",
+						testSupportsAtLeastOneDepthStencilFormat,
+						std::vector<VkFormat>(depthStencilFormats, depthStencilFormats + DE_LENGTH_OF_ARRAY(depthStencilFormats)));
+
+		depthTests->addChild(formatFeaturesTests.release());
+	}
+
+	// Tests for format and compare operators
+	{
+		de::MovePtr<tcu::TestCaseGroup> formatTests (new tcu::TestCaseGroup(testCtx, "format", "Uses different depth formats"));
+
+		for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(depthFormats); formatNdx++)
+		{
+			de::MovePtr<tcu::TestCaseGroup>	formatTest		(new tcu::TestCaseGroup(testCtx,
+																					getFormatCaseName(depthFormats[formatNdx]).c_str(),
+																					(std::string("Uses format ") + getFormatName(depthFormats[formatNdx])).c_str()));
+			de::MovePtr<tcu::TestCaseGroup>	compareOpsTests	(new tcu::TestCaseGroup(testCtx, "compare_ops", "Combines depth compare operators"));
+
+			for (size_t opsNdx = 0; opsNdx < DE_LENGTH_OF_ARRAY(depthOps); opsNdx++)
+			{
+				compareOpsTests->addChild(new DepthTest(testCtx,
+														getCompareOpsName(depthOps[opsNdx]),
+														getCompareOpsDescription(depthOps[opsNdx]),
+														depthFormats[formatNdx],
+														depthOps[opsNdx]));
+			}
+			formatTest->addChild(compareOpsTests.release());
+			formatTests->addChild(formatTest.release());
+		}
+		depthTests->addChild(formatTests.release());
+	}
+
+	return depthTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.hpp
new file mode 100644
index 0000000..006ce21
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineDepthTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEDEPTHTESTS_HPP
+#define _VKTPIPELINEDEPTHTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Depth Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createDepthTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEDEPTHTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.cpp
new file mode 100644
index 0000000..3783907
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.cpp
@@ -0,0 +1,1021 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image sampling case
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineImageSamplingInstance.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+using de::MovePtr;
+
+namespace
+{
+
+static VkImageType getCompatibleImageType (VkImageViewType viewType)
+{
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:				return VK_IMAGE_TYPE_1D;
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:		return VK_IMAGE_TYPE_1D;
+		case VK_IMAGE_VIEW_TYPE_2D:				return VK_IMAGE_TYPE_2D;
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:		return VK_IMAGE_TYPE_2D;
+		case VK_IMAGE_VIEW_TYPE_3D:				return VK_IMAGE_TYPE_3D;
+		case VK_IMAGE_VIEW_TYPE_CUBE:			return VK_IMAGE_TYPE_2D;
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:		return VK_IMAGE_TYPE_2D;
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return VK_IMAGE_TYPE_1D;
+}
+
+template<typename TcuFormatType>
+static MovePtr<TestTexture> createTestTexture (const TcuFormatType format, VkImageViewType viewType, const tcu::IVec3& size, int layerCount)
+{
+	MovePtr<TestTexture>	texture;
+	const VkImageType		imageType = getCompatibleImageType(viewType);
+
+	switch (imageType)
+	{
+		case VK_IMAGE_TYPE_1D:
+			if (layerCount == 1)
+				texture = MovePtr<TestTexture>(new TestTexture1D(format, size.x()));
+			else
+				texture = MovePtr<TestTexture>(new TestTexture1DArray(format, size.x(), layerCount));
+
+			break;
+
+		case VK_IMAGE_TYPE_2D:
+			if (layerCount == 1)
+			{
+				texture = MovePtr<TestTexture>(new TestTexture2D(format, size.x(), size.y()));
+			}
+			else
+			{
+				if (viewType == VK_IMAGE_VIEW_TYPE_CUBE || viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+				{
+					if (layerCount == tcu::CUBEFACE_LAST)
+					{
+						texture = MovePtr<TestTexture>(new TestTextureCube(format, size.x()));
+					}
+					else
+					{
+						DE_ASSERT(layerCount % tcu::CUBEFACE_LAST == 0);
+
+						texture = MovePtr<TestTexture>(new TestTextureCubeArray(format, size.x(), layerCount));
+					}
+				}
+				else
+				{
+					texture = MovePtr<TestTexture>(new TestTexture2DArray(format, size.x(), size.y(), layerCount));
+				}
+			}
+
+			break;
+
+		case VK_IMAGE_TYPE_3D:
+			texture = MovePtr<TestTexture>(new TestTexture3D(format, size.x(), size.y(), size.z()));
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return texture;
+}
+
+template<typename TcuTextureType>
+static void copySubresourceRange (TcuTextureType& dest, const TcuTextureType& src, const VkImageSubresourceRange& subresourceRange)
+{
+	DE_ASSERT(subresourceRange.levelCount <= (deUint32)dest.getNumLevels());
+	DE_ASSERT(subresourceRange.baseMipLevel + subresourceRange.levelCount <= (deUint32)src.getNumLevels());
+
+	for (int levelNdx = 0; levelNdx < dest.getNumLevels(); levelNdx++)
+	{
+		const tcu::ConstPixelBufferAccess	srcLevel		(src.getLevel(subresourceRange.baseMipLevel + levelNdx));
+		const deUint32						srcLayerOffset	= subresourceRange.baseArrayLayer * srcLevel.getWidth() * srcLevel.getHeight() * srcLevel.getFormat().getPixelSize();
+		const tcu::ConstPixelBufferAccess	srcLevelLayers	(srcLevel.getFormat(), srcLevel.getWidth(), srcLevel.getHeight(), subresourceRange.layerCount, (deUint8*)srcLevel.getDataPtr() + srcLayerOffset);
+
+		if (dest.isLevelEmpty(levelNdx))
+			dest.allocLevel(levelNdx);
+
+		tcu::copy(dest.getLevel(levelNdx), srcLevelLayers);
+	}
+}
+
+template<>
+void copySubresourceRange<tcu::Texture1DArray> (tcu::Texture1DArray& dest, const tcu::Texture1DArray& src, const VkImageSubresourceRange& subresourceRange)
+{
+	DE_ASSERT(subresourceRange.levelCount <= (deUint32)dest.getNumLevels());
+	DE_ASSERT(subresourceRange.baseMipLevel + subresourceRange.levelCount <= (deUint32)src.getNumLevels());
+
+	DE_ASSERT(subresourceRange.layerCount == (deUint32)dest.getNumLayers());
+	DE_ASSERT(subresourceRange.baseArrayLayer + subresourceRange.layerCount <= (deUint32)src.getNumLayers());
+
+	for (int levelNdx = 0; levelNdx < dest.getNumLevels(); levelNdx++)
+	{
+		const tcu::ConstPixelBufferAccess	srcLevel		(src.getLevel(subresourceRange.baseMipLevel + levelNdx));
+		const deUint32						srcLayerOffset	= subresourceRange.baseArrayLayer * srcLevel.getWidth() * srcLevel.getFormat().getPixelSize();
+		const tcu::ConstPixelBufferAccess	srcLevelLayers	(srcLevel.getFormat(), srcLevel.getWidth(), subresourceRange.layerCount, 1, (deUint8*)srcLevel.getDataPtr() + srcLayerOffset);
+
+		if (dest.isLevelEmpty(levelNdx))
+			dest.allocLevel(levelNdx);
+
+		tcu::copy(dest.getLevel(levelNdx), srcLevelLayers);
+	}
+}
+
+template<>
+void copySubresourceRange<tcu::Texture3D>(tcu::Texture3D& dest, const tcu::Texture3D& src, const VkImageSubresourceRange& subresourceRange)
+{
+	DE_ASSERT(subresourceRange.levelCount <= (deUint32)dest.getNumLevels());
+	DE_ASSERT(subresourceRange.baseMipLevel + subresourceRange.levelCount <= (deUint32)src.getNumLevels());
+
+	for (int levelNdx = 0; levelNdx < dest.getNumLevels(); levelNdx++)
+	{
+		const tcu::ConstPixelBufferAccess	srcLevel(src.getLevel(subresourceRange.baseMipLevel + levelNdx));
+		const tcu::ConstPixelBufferAccess	srcLevelLayers(srcLevel.getFormat(), srcLevel.getWidth(), srcLevel.getHeight(), srcLevel.getDepth(), (deUint8*)srcLevel.getDataPtr());
+
+		if (dest.isLevelEmpty(levelNdx))
+			dest.allocLevel(levelNdx);
+
+		tcu::copy(dest.getLevel(levelNdx), srcLevelLayers);
+	}
+}
+
+static MovePtr<Program> createRefProgram(const tcu::TextureFormat&			renderTargetFormat,
+										  const tcu::Sampler&				sampler,
+										  float								samplerLod,
+										  const tcu::UVec4&					componentMapping,
+										  const TestTexture&				testTexture,
+										  VkImageViewType					viewType,
+										  int								layerCount,
+										  const VkImageSubresourceRange&	subresource)
+{
+	MovePtr<Program>	program;
+	const VkImageType	imageType		= getCompatibleImageType(viewType);
+	tcu::Vec4			lookupScale		(1.0f);
+	tcu::Vec4			lookupBias		(0.0f);
+
+	if (!testTexture.isCompressed())
+	{
+		const tcu::TextureFormatInfo	fmtInfo	= tcu::getTextureFormatInfo(testTexture.getLevel(0, 0).getFormat());
+
+		// Needed to normalize various formats to 0..1 range for writing into RT
+		lookupScale	= fmtInfo.lookupScale;
+		lookupBias	= fmtInfo.lookupBias;
+	}
+	// else: All supported compressed formats are fine with no normalization.
+	//		 ASTC LDR blocks decompress to f16 so querying normalization parameters
+	//		 based on uncompressed formats would actually lead to massive precision loss
+	//		 and complete lack of coverage in case of R8G8B8A8_UNORM RT.
+
+	switch (imageType)
+	{
+		case VK_IMAGE_TYPE_1D:
+			if (layerCount == 1)
+			{
+				const tcu::Texture1D& texture = dynamic_cast<const TestTexture1D&>(testTexture).getTexture();
+				program = MovePtr<Program>(new SamplerProgram<tcu::Texture1D>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+			}
+			else
+			{
+				const tcu::Texture1DArray& texture = dynamic_cast<const TestTexture1DArray&>(testTexture).getTexture();
+
+				if (subresource.baseMipLevel > 0 || subresource.layerCount < (deUint32)texture.getNumLayers())
+				{
+					// Not all texture levels and layers are needed. Create new sub-texture.
+					const tcu::ConstPixelBufferAccess	baseLevel	= texture.getLevel(subresource.baseMipLevel);
+					tcu::Texture1DArray					textureView	(texture.getFormat(), baseLevel.getWidth(), subresource.layerCount);
+
+					copySubresourceRange(textureView, texture, subresource);
+
+					program = MovePtr<Program>(new SamplerProgram<tcu::Texture1DArray>(renderTargetFormat, textureView, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+				}
+				else
+				{
+					program = MovePtr<Program>(new SamplerProgram<tcu::Texture1DArray>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+				}
+			}
+			break;
+
+		case VK_IMAGE_TYPE_2D:
+			if (layerCount == 1)
+			{
+				const tcu::Texture2D& texture = dynamic_cast<const TestTexture2D&>(testTexture).getTexture();
+				program = MovePtr<Program>(new SamplerProgram<tcu::Texture2D>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+			}
+			else
+			{
+				if (viewType == VK_IMAGE_VIEW_TYPE_CUBE || viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+				{
+					if (layerCount == tcu::CUBEFACE_LAST)
+					{
+						const tcu::TextureCube& texture = dynamic_cast<const TestTextureCube&>(testTexture).getTexture();
+						program = MovePtr<Program>(new SamplerProgram<tcu::TextureCube>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+					}
+					else
+					{
+						DE_ASSERT(layerCount % tcu::CUBEFACE_LAST == 0);
+
+						const tcu::TextureCubeArray& texture = dynamic_cast<const TestTextureCubeArray&>(testTexture).getTexture();
+
+						if (subresource.baseMipLevel > 0 || subresource.layerCount < (deUint32)texture.getDepth())
+						{
+							DE_ASSERT(subresource.baseArrayLayer + subresource.layerCount <= (deUint32)texture.getDepth());
+
+							// Not all texture levels and layers are needed. Create new sub-texture.
+							const tcu::ConstPixelBufferAccess	baseLevel		= texture.getLevel(subresource.baseMipLevel);
+							tcu::TextureCubeArray				textureView		(texture.getFormat(), baseLevel.getWidth(), subresource.layerCount);
+
+							copySubresourceRange(textureView, texture, subresource);
+
+							program = MovePtr<Program>(new SamplerProgram<tcu::TextureCubeArray>(renderTargetFormat, textureView, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+						}
+						else
+						{
+							// Use all array layers
+							program = MovePtr<Program>(new SamplerProgram<tcu::TextureCubeArray>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+						}
+					}
+				}
+				else
+				{
+					const tcu::Texture2DArray& texture = dynamic_cast<const TestTexture2DArray&>(testTexture).getTexture();
+
+					if (subresource.baseMipLevel > 0 || subresource.layerCount < (deUint32)texture.getNumLayers())
+					{
+						DE_ASSERT(subresource.baseArrayLayer + subresource.layerCount <= (deUint32)texture.getNumLayers());
+
+						// Not all texture levels and layers are needed. Create new sub-texture.
+						const tcu::ConstPixelBufferAccess	baseLevel	= texture.getLevel(subresource.baseMipLevel);
+						tcu::Texture2DArray					textureView	(texture.getFormat(), baseLevel.getWidth(), baseLevel.getHeight(), subresource.layerCount);
+
+						copySubresourceRange(textureView, texture, subresource);
+
+						program = MovePtr<Program>(new SamplerProgram<tcu::Texture2DArray>(renderTargetFormat, textureView, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+					}
+					else
+					{
+						// Use all array layers
+						program = MovePtr<Program>(new SamplerProgram<tcu::Texture2DArray>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+					}
+				}
+			}
+			break;
+
+		case VK_IMAGE_TYPE_3D:
+			{
+				const tcu::Texture3D& texture = dynamic_cast<const TestTexture3D&>(testTexture).getTexture();
+
+				if (subresource.baseMipLevel > 0)
+				{
+					// Not all texture levels are needed. Create new sub-texture.
+					const tcu::ConstPixelBufferAccess	baseLevel = texture.getLevel(subresource.baseMipLevel);
+					tcu::Texture3D						textureView(texture.getFormat(), baseLevel.getWidth(), baseLevel.getHeight(), baseLevel.getDepth());
+
+					copySubresourceRange(textureView, texture, subresource);
+
+					program = MovePtr<Program>(new SamplerProgram<tcu::Texture3D>(renderTargetFormat, textureView, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+				}
+				else
+				{
+					program = MovePtr<Program>(new SamplerProgram<tcu::Texture3D>(renderTargetFormat, texture, sampler, samplerLod, lookupScale, lookupBias, componentMapping));
+				}
+			}
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return program;
+}
+
+} // anonymous
+
+ImageSamplingInstance::ImageSamplingInstance (Context&							context,
+											  const tcu::UVec2&					renderSize,
+											  VkImageViewType					imageViewType,
+											  VkFormat							imageFormat,
+											  const tcu::IVec3&					imageSize,
+											  int								layerCount,
+											  const VkComponentMapping&			componentMapping,
+											  const VkImageSubresourceRange&	subresourceRange,
+											  const VkSamplerCreateInfo&		samplerParams,
+											  float								samplerLod,
+											  const std::vector<Vertex4Tex4>&	vertices)
+	: vkt::TestInstance		(context)
+	, m_imageViewType		(imageViewType)
+	, m_imageSize			(imageSize)
+	, m_layerCount			(layerCount)
+	, m_componentMapping	(componentMapping)
+	, m_subresourceRange	(subresourceRange)
+	, m_samplerParams		(samplerParams)
+	, m_samplerLod			(samplerLod)
+	, m_renderSize			(renderSize)
+	, m_colorFormat			(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_vertices			(vertices)
+{
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const VkQueue				queue					= context.getUniversalQueue();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	if (!isSupportedSamplableFormat(context.getInstanceInterface(), context.getPhysicalDevice(), imageFormat))
+		throw tcu::NotSupportedError(std::string("Unsupported format for sampling: ") + getFormatName(imageFormat));
+
+	if ((samplerParams.minFilter == VK_FILTER_LINEAR ||
+		 samplerParams.magFilter == VK_FILTER_LINEAR ||
+		 samplerParams.mipmapMode == VK_SAMPLER_MIPMAP_MODE_LINEAR) &&
+		!isLinearFilteringSupported(context.getInstanceInterface(), context.getPhysicalDevice(), imageFormat, VK_IMAGE_TILING_OPTIMAL))
+		throw tcu::NotSupportedError(std::string("Unsupported format for linear filtering: ") + getFormatName(imageFormat));
+
+	if (isCompressedFormat(imageFormat) && imageViewType == VK_IMAGE_VIEW_TYPE_3D)
+	{
+		// \todo [2016-01-22 pyry] Mandate VK_ERROR_FORMAT_NOT_SUPPORTED
+		try
+		{
+			const VkImageFormatProperties	formatProperties	= getPhysicalDeviceImageFormatProperties(context.getInstanceInterface(),
+																										 context.getPhysicalDevice(),
+																										 imageFormat,
+																										 VK_IMAGE_TYPE_3D,
+																										 VK_IMAGE_TILING_OPTIMAL,
+																										 VK_IMAGE_USAGE_SAMPLED_BIT,
+																										 (VkImageCreateFlags)0);
+
+			if (formatProperties.maxExtent.width == 0 &&
+				formatProperties.maxExtent.height == 0 &&
+				formatProperties.maxExtent.depth == 0)
+				TCU_THROW(NotSupportedError, "3D compressed format not supported");
+		}
+		catch (const Error&)
+		{
+			TCU_THROW(NotSupportedError, "3D compressed format not supported");
+		}
+	}
+
+	// Create texture image, view and sampler
+	{
+		VkImageCreateFlags			imageFlags			= 0u;
+
+		if (m_imageViewType == VK_IMAGE_VIEW_TYPE_CUBE || m_imageViewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+			imageFlags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
+
+		// Initialize texture data
+		if (isCompressedFormat(imageFormat))
+			m_texture = createTestTexture(mapVkCompressedFormat(imageFormat), imageViewType, imageSize, layerCount);
+		else
+			m_texture = createTestTexture(mapVkFormat(imageFormat), imageViewType, imageSize, layerCount);
+
+		const VkImageCreateInfo	imageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,							// VkStructureType			sType;
+			DE_NULL,														// const void*				pNext;
+			imageFlags,														// VkImageCreateFlags		flags;
+			getCompatibleImageType(m_imageViewType),						// VkImageType				imageType;
+			imageFormat,													// VkFormat					format;
+			{																// VkExtent3D				extent;
+				(deUint32)m_imageSize.x(),
+				(deUint32)m_imageSize.y(),
+				(deUint32)m_imageSize.z()
+			},
+			(deUint32)m_texture->getNumLevels(),							// deUint32					mipLevels;
+			(deUint32)m_layerCount,											// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,										// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,	// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,										// VkSharingMode			sharingMode;
+			1u,																// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,												// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED										// VkImageLayout			initialLayout;
+		};
+
+		m_image			= createImage(vk, vkDevice, &imageParams);
+		m_imageAlloc	= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_image), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_image, m_imageAlloc->getMemory(), m_imageAlloc->getOffset()));
+
+		// Upload texture data
+		uploadTestTexture(vk, vkDevice, queue, queueFamilyIndex, memAlloc, *m_texture, *m_image);
+
+		// Create image view and sampler
+		const VkImageViewCreateInfo imageViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,	// VkStructureType			sType;
+			DE_NULL,									// const void*				pNext;
+			0u,											// VkImageViewCreateFlags	flags;
+			*m_image,									// VkImage					image;
+			m_imageViewType,							// VkImageViewType			viewType;
+			imageFormat,								// VkFormat					format;
+			m_componentMapping,							// VkComponentMapping		components;
+			m_subresourceRange,							// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_imageView	= createImageView(vk, vkDevice, &imageViewParams);
+		m_sampler	= createSampler(vk, vkDevice, &m_samplerParams);
+	}
+
+	// Create descriptor set for combined image and sampler
+	{
+		DescriptorPoolBuilder descriptorPoolBuilder;
+		descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 1u);
+		m_descriptorPool = descriptorPoolBuilder.build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+		DescriptorSetLayoutBuilder setLayoutBuilder;
+		setLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT);
+		m_descriptorSetLayout = setLayoutBuilder.build(vk, vkDevice);
+
+		const VkDescriptorSetAllocateInfo descriptorSetAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			*m_descriptorPool,									// VkDescriptorPool				descriptorPool;
+			1u,													// deUint32						setLayoutCount;
+			&m_descriptorSetLayout.get()						// const VkDescriptorSetLayout*	pSetLayouts;
+		};
+
+		m_descriptorSet = allocateDescriptorSet(vk, vkDevice, &descriptorSetAllocateInfo);
+
+		const VkDescriptorImageInfo descriptorImageInfo =
+		{
+			*m_sampler,									// VkSampler		sampler;
+			*m_imageView,								// VkImageView		imageView;
+			VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL	// VkImageLayout	imageLayout;
+		};
+
+		DescriptorSetUpdateBuilder setUpdateBuilder;
+		setUpdateBuilder.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfo);
+		setUpdateBuilder.update(vk, vkDevice);
+	}
+
+	// Create color image and view
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y(), 1u },				// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImage			= createImage(vk, vkDevice, &colorImageParams);
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			componentMappingRGBA,								// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }		// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,													// deUint32						preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			&colorAttachmentDescription,						// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkFramebufferCreateFlags	flags;
+			*m_renderPass,										// VkRenderPass				renderPass;
+			1u,													// deUint32					attachmentCount;
+			&m_colorAttachmentView.get(),						// const VkImageView*		pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32					width;
+			(deUint32)m_renderSize.y(),							// deUint32					height;
+			1u													// deUint32					layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkPipelineLayoutCreateFlags	flags;
+			1u,													// deUint32						setLayoutCount;
+			&m_descriptorSetLayout.get(),						// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("tex_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("tex_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStages[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,										// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,									// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,									// deUint32					binding;
+			sizeof(Vertex4Tex4),				// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputStepRate	inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,										// deUint32	location;
+				0u,										// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,			// VkFormat	format;
+				0u										// deUint32	offset;
+			},
+			{
+				1u,										// deUint32	location;
+				0u,										// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,			// VkFormat	format;
+				DE_OFFSET_OF(Vertex4Tex4, texCoord),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			false,															// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,														// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,										// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,										// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,											// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,										// VkBlendFactor			srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,										// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,											// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |		// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			{															// VkStencilOpState							front;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u						// deUint32		reference;
+			},
+			{															// VkStencilOpState	back;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_ZERO,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f														// float			maxDepthBounds;
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStages,										// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipeline	= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex buffer
+	{
+		const VkDeviceSize			vertexBufferSize	= (VkDeviceSize)(m_vertices.size() * sizeof(Vertex4Tex4));
+		const VkBufferCreateInfo	vertexBufferParams	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			vertexBufferSize,							// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		DE_ASSERT(vertexBufferSize > 0);
+
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), &m_vertices[0], (size_t)vertexBufferSize);
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), vertexBufferParams.size);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,										// const void*					pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags	flags;
+			queueFamilyIndex								// deUint32					queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValue = defaultClearValue(m_colorFormat);
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{
+				{ 0, 0 },
+				{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() }
+			},														// VkRect2D				renderArea;
+			1,														// deUint32				clearValueCount;
+			&attachmentClearValue									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
+
+		vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0, 1, &m_descriptorSet.get(), 0, DE_NULL);
+
+		const VkDeviceSize vertexBufferOffset = 0;
+		vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+		vk.cmdDraw(*m_cmdBuffer, (deUint32)m_vertices.size(), 1, 0, 0);
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+ImageSamplingInstance::~ImageSamplingInstance (void)
+{
+}
+
+tcu::TestStatus ImageSamplingInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+	return verifyImage();
+}
+
+tcu::TestStatus ImageSamplingInstance::verifyImage (void)
+{
+	const tcu::TextureFormat		colorFormat				= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat		depthStencilFormat		= tcu::TextureFormat(); // Undefined depth/stencil format.
+	const tcu::Sampler				sampler					= mapVkSampler(m_samplerParams);
+	const tcu::UVec4				componentMapping		= mapVkComponentMapping(m_componentMapping);
+	float							samplerLod;
+	bool							compareOk;
+	MovePtr<Program>				program;
+	MovePtr<ReferenceRenderer>		refRenderer;
+
+	// Set up LOD of reference sampler
+	samplerLod = de::max(m_samplerParams.minLod, de::min(m_samplerParams.maxLod, m_samplerParams.mipLodBias + m_samplerLod));
+
+	// Create reference program that uses image subresource range
+	program = createRefProgram(colorFormat, sampler, samplerLod, componentMapping, *m_texture, m_imageViewType, m_layerCount, m_subresourceRange);
+	const rr::Program referenceProgram = program->getReferenceProgram();
+
+	// Render reference image
+	refRenderer = MovePtr<ReferenceRenderer>(new ReferenceRenderer(m_renderSize.x(), m_renderSize.y(), 1, colorFormat, depthStencilFormat, &referenceProgram));
+	const rr::RenderState renderState(refRenderer->getViewportState());
+	refRenderer->draw(renderState, rr::PRIMITIVETYPE_TRIANGLES, m_vertices);
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&		vk							= m_context.getDeviceInterface();
+		const VkDevice				vkDevice					= m_context.getDevice();
+		const VkQueue				queue						= m_context.getUniversalQueue();
+		const deUint32				queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator				memAlloc					(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		MovePtr<tcu::TextureLevel>	result						= readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, memAlloc, *m_colorImage, m_colorFormat, m_renderSize);
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  refRenderer->getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(4, 4, 4, 4),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.hpp
new file mode 100644
index 0000000..8b5eae6
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageSamplingInstance.hpp
@@ -0,0 +1,122 @@
+#ifndef _VKTPIPELINEIMAGESAMPLINGINSTANCE_HPP
+#define _VKTPIPELINEIMAGESAMPLINGINSTANCE_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image sampling case
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "tcuVectorUtil.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+class ImageSamplingInstance : public vkt::TestInstance
+{
+public:
+												ImageSamplingInstance	(Context&							context,
+																		 const tcu::UVec2&					renderSize,
+																		 vk::VkImageViewType				imageViewType,
+																		 vk::VkFormat						imageFormat,
+																		 const tcu::IVec3&					imageSize,
+																		 int								layerCount,
+																		 const vk::VkComponentMapping&		componentMapping,
+																		 const vk::VkImageSubresourceRange&	subresourceRange,
+																		 const vk::VkSamplerCreateInfo&		samplerParams,
+																		 float								samplerLod,
+																		 const std::vector<Vertex4Tex4>&	vertices);
+
+	virtual										~ImageSamplingInstance	(void);
+
+	virtual tcu::TestStatus						iterate					(void);
+
+protected:
+	tcu::TestStatus								verifyImage				(void);
+
+private:
+	const vk::VkImageViewType					m_imageViewType;
+	const tcu::IVec3							m_imageSize;
+	const int									m_layerCount;
+
+	const vk::VkComponentMapping				m_componentMapping;
+	const vk::VkImageSubresourceRange			m_subresourceRange;
+	const vk::VkSamplerCreateInfo				m_samplerParams;
+	const float									m_samplerLod;
+
+	vk::Move<vk::VkImage>						m_image;
+	de::MovePtr<vk::Allocation>					m_imageAlloc;
+	vk::Move<vk::VkImageView>					m_imageView;
+	vk::Move<vk::VkSampler>						m_sampler;
+	de::MovePtr<TestTexture>					m_texture;
+
+	const tcu::UVec2							m_renderSize;
+	const vk::VkFormat							m_colorFormat;
+
+	vk::Move<vk::VkDescriptorPool>				m_descriptorPool;
+	vk::Move<vk::VkDescriptorSetLayout>			m_descriptorSetLayout;
+	vk::Move<vk::VkDescriptorSet>				m_descriptorSet;
+
+	vk::Move<vk::VkImage>						m_colorImage;
+	de::MovePtr<vk::Allocation>					m_colorImageAlloc;
+	vk::Move<vk::VkImageView>					m_colorAttachmentView;
+	vk::Move<vk::VkRenderPass>					m_renderPass;
+	vk::Move<vk::VkFramebuffer>					m_framebuffer;
+
+	vk::Move<vk::VkShaderModule>				m_vertexShaderModule;
+	vk::Move<vk::VkShaderModule>				m_fragmentShaderModule;
+
+	vk::Move<vk::VkBuffer>						m_vertexBuffer;
+	std::vector<Vertex4Tex4>					m_vertices;
+	de::MovePtr<vk::Allocation>					m_vertexBufferAlloc;
+
+	vk::Move<vk::VkPipelineLayout>				m_pipelineLayout;
+	vk::Move<vk::VkPipeline>					m_graphicsPipeline;
+
+	vk::Move<vk::VkCommandPool>					m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>				m_cmdBuffer;
+
+	vk::Move<vk::VkFence>						m_fence;
+};
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEIMAGESAMPLINGINSTANCE_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.cpp
new file mode 100644
index 0000000..6fd422b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.cpp
@@ -0,0 +1,593 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineImageTests.hpp"
+#include "vktPipelineImageSamplingInstance.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkPrograms.hpp"
+#include "tcuTextureUtil.hpp"
+#include "deStringUtil.hpp"
+
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+using de::MovePtr;
+
+namespace
+{
+
+class ImageTest : public vkt::TestCase
+{
+public:
+							ImageTest				(tcu::TestContext&	testContext,
+													 const char*		name,
+													 const char*		description,
+													 VkImageViewType	imageViewType,
+													 VkFormat			imageFormat,
+													 const tcu::IVec3&	imageSize,
+													 int				arraySize);
+
+	virtual void			initPrograms			(SourceCollections& sourceCollections) const;
+	virtual TestInstance*	createInstance			(Context& context) const;
+	static std::string		getGlslSamplerType		(const tcu::TextureFormat& format, VkImageViewType type);
+
+private:
+	VkImageViewType			m_imageViewType;
+	VkFormat				m_imageFormat;
+	tcu::IVec3				m_imageSize;
+	int						m_arraySize;
+};
+
+ImageTest::ImageTest (tcu::TestContext&	testContext,
+					  const char*		name,
+					  const char*		description,
+					  VkImageViewType	imageViewType,
+					  VkFormat			imageFormat,
+					  const tcu::IVec3&	imageSize,
+					  int				arraySize)
+
+	: vkt::TestCase		(testContext, name, description)
+	, m_imageViewType	(imageViewType)
+	, m_imageFormat		(imageFormat)
+	, m_imageSize		(imageSize)
+	, m_arraySize		(arraySize)
+{
+}
+
+void ImageTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream				vertexSrc;
+	std::ostringstream				fragmentSrc;
+	const char*						texCoordSwizzle	= DE_NULL;
+	const tcu::TextureFormat		format			= (isCompressedFormat(m_imageFormat)) ? tcu::getUncompressedFormat(mapVkCompressedFormat(m_imageFormat))
+																						  : mapVkFormat(m_imageFormat);
+
+	// \note We don't want to perform normalization on any compressed formats.
+	//		 In case of non-sRGB LDR ASTC it would lead to lack of coverage
+	//		 as uncompressed format for that is f16 but values will be in range
+	//		 0..1 already.
+	const tcu::TextureFormatInfo	formatInfo		= (!isCompressedFormat(m_imageFormat) ? tcu::getTextureFormatInfo(format)
+																						  : tcu::getTextureFormatInfo(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)));
+
+	switch (m_imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			texCoordSwizzle = "x";
+			break;
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D:
+			texCoordSwizzle = "xy";
+			break;
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_3D:
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			texCoordSwizzle = "xyz";
+			break;
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			texCoordSwizzle = "xyzw";
+			break;
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	vertexSrc << "#version 440\n"
+			  << "layout(location = 0) in vec4 position;\n"
+			  << "layout(location = 1) in vec4 texCoords;\n"
+			  << "layout(location = 0) out highp vec4 vtxTexCoords;\n"
+			  << "out gl_PerVertex {\n"
+			  << "	vec4 gl_Position;\n"
+			  << "};\n"
+			  << "void main (void)\n"
+			  << "{\n"
+			  << "	gl_Position = position;\n"
+			  << "	vtxTexCoords = texCoords;\n"
+			  << "}\n";
+
+	fragmentSrc << "#version 440\n"
+				<< "layout(set = 0, binding = 0) uniform highp " << getGlslSamplerType(format, m_imageViewType) << " texSampler;\n"
+				<< "layout(location = 0) in highp vec4 vtxTexCoords;\n"
+				<< "layout(location = 0) out highp vec4 fragColor;\n"
+				<< "void main (void)\n"
+				<< "{\n"
+				<< "	fragColor = (texture(texSampler, vtxTexCoords." << texCoordSwizzle << std::scientific << ") * vec4" << formatInfo.lookupScale << ") + vec4" << formatInfo.lookupBias << ";\n"
+				<< "}\n";
+
+	sourceCollections.glslSources.add("tex_vert") << glu::VertexSource(vertexSrc.str());
+	sourceCollections.glslSources.add("tex_frag") << glu::FragmentSource(fragmentSrc.str());
+}
+
+TestInstance* ImageTest::createInstance (Context& context) const
+{
+	tcu::UVec2 renderSize;
+
+	if (m_imageViewType == VK_IMAGE_VIEW_TYPE_1D || m_imageViewType == VK_IMAGE_VIEW_TYPE_2D)
+	{
+		renderSize = tcu::UVec2((deUint32)m_imageSize.x(), (deUint32)m_imageSize.y());
+	}
+	else
+	{
+		// Draw a 3x2 grid of texture layers
+		renderSize = tcu::UVec2((deUint32)m_imageSize.x() * 3, (deUint32)m_imageSize.y() * 2);
+	}
+
+	const std::vector<Vertex4Tex4>	vertices			= createTestQuadMosaic(m_imageViewType);
+	const VkComponentMapping		componentMapping	= getFormatComponentMapping(m_imageFormat);
+	const VkImageSubresourceRange	subresourceRange	=
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,
+		0u,
+		(deUint32)deLog2Floor32(deMax32(m_imageSize.x(), deMax32(m_imageSize.y(), m_imageSize.z()))) + 1,
+		0u,
+		(deUint32)m_arraySize,
+	};
+
+	const VkSamplerCreateInfo samplerParams =
+	{
+		VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,									// VkStructureType			sType;
+		DE_NULL,																// const void*				pNext;
+		0u,																		// VkSamplerCreateFlags		flags;
+		VK_FILTER_NEAREST,														// VkFilter					magFilter;
+		VK_FILTER_NEAREST,														// VkFilter					minFilter;
+		VK_SAMPLER_MIPMAP_MODE_NEAREST,											// VkSamplerMipmapMode		mipmapMode;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeU;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeV;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeW;
+		0.0f,																	// float					mipLodBias;
+		VK_FALSE,																// VkBool32					anisotropyEnable;
+		1.0f,																	// float					maxAnisotropy;
+		false,																	// VkBool32					compareEnable;
+		VK_COMPARE_OP_NEVER,													// VkCompareOp				compareOp;
+		0.0f,																	// float					minLod;
+		(float)(subresourceRange.levelCount - 1),								// float					maxLod;
+		getFormatBorderColor(BORDER_COLOR_TRANSPARENT_BLACK, m_imageFormat),	// VkBorderColor			borderColor;
+		false																	// VkBool32					unnormalizedCoordinates;
+	};
+
+	return new ImageSamplingInstance(context, renderSize, m_imageViewType, m_imageFormat, m_imageSize, m_arraySize, componentMapping, subresourceRange, samplerParams, 0.0f, vertices);
+}
+
+std::string ImageTest::getGlslSamplerType (const tcu::TextureFormat& format, VkImageViewType type)
+{
+	std::ostringstream samplerType;
+
+	if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+		samplerType << "u";
+	else if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+		samplerType << "i";
+
+	switch (type)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			samplerType << "sampler1D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			samplerType << "sampler1DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D:
+			samplerType << "sampler2D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			samplerType << "sampler2DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			samplerType << "sampler3D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			samplerType << "samplerCube";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			samplerType << "samplerCubeArray";
+			break;
+
+		default:
+			DE_FATAL("Unknown image view type");
+			break;
+	}
+
+	return samplerType.str();
+}
+
+std::string getFormatCaseName (const VkFormat format)
+{
+	const std::string	fullName	= getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+std::string getSizeName (VkImageViewType viewType, const tcu::IVec3& size, int arraySize)
+{
+	std::ostringstream	caseName;
+
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+		case VK_IMAGE_VIEW_TYPE_2D:
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			caseName << size.x() << "x" << size.y();
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			caseName << size.x() << "x" << size.y() << "x" << size.z();
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			caseName << size.x() << "x" << size.y() << "_array_of_" << arraySize;
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	return caseName.str();
+}
+
+de::MovePtr<tcu::TestCaseGroup> createImageSizeTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat)
+{
+	using tcu::IVec3;
+
+	std::vector<IVec3>					imageSizes;
+	std::vector<int>					arraySizes;
+	de::MovePtr<tcu::TestCaseGroup>		imageSizeTests	(new tcu::TestCaseGroup(testCtx, "size", ""));
+
+	// Select image imageSizes
+	switch (imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			// POT
+			imageSizes.push_back(IVec3(1, 1, 1));
+			imageSizes.push_back(IVec3(2, 1, 1));
+			imageSizes.push_back(IVec3(32, 1, 1));
+			imageSizes.push_back(IVec3(128, 1, 1));
+			imageSizes.push_back(IVec3(512, 1, 1));
+
+			// NPOT
+			imageSizes.push_back(IVec3(3, 1, 1));
+			imageSizes.push_back(IVec3(13, 1, 1));
+			imageSizes.push_back(IVec3(127, 1, 1));
+			imageSizes.push_back(IVec3(443, 1, 1));
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			// POT
+			imageSizes.push_back(IVec3(1, 1, 1));
+			imageSizes.push_back(IVec3(2, 2, 1));
+			imageSizes.push_back(IVec3(32, 32, 1));
+
+			// NPOT
+			imageSizes.push_back(IVec3(3, 3, 1));
+			imageSizes.push_back(IVec3(13, 13, 1));
+
+			// POT rectangular
+			imageSizes.push_back(IVec3(8, 16, 1));
+			imageSizes.push_back(IVec3(32, 16, 1));
+
+			// NPOT rectangular
+			imageSizes.push_back(IVec3(13, 23, 1));
+			imageSizes.push_back(IVec3(23, 8, 1));
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			// POT cube
+			imageSizes.push_back(IVec3(1, 1, 1));
+			imageSizes.push_back(IVec3(2, 2, 2));
+			imageSizes.push_back(IVec3(16, 16, 16));
+
+			// POT non-cube
+			imageSizes.push_back(IVec3(32, 16, 8));
+			imageSizes.push_back(IVec3(8, 16, 32));
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			// POT
+			imageSizes.push_back(IVec3(32, 32, 1));
+
+			// NPOT
+			imageSizes.push_back(IVec3(13, 13, 1));
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	// Select array sizes
+	switch (imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			arraySizes.push_back(3);
+			arraySizes.push_back(6);
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			arraySizes.push_back(6);
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			arraySizes.push_back(6);
+			arraySizes.push_back(6 * 6);
+			break;
+
+		default:
+			arraySizes.push_back(1);
+			break;
+	}
+
+	for (size_t sizeNdx = 0; sizeNdx < imageSizes.size(); sizeNdx++)
+	{
+		for (size_t arraySizeNdx = 0; arraySizeNdx < arraySizes.size(); arraySizeNdx++)
+		{
+			imageSizeTests->addChild(new ImageTest(testCtx,
+												   getSizeName(imageViewType, imageSizes[sizeNdx], arraySizes[arraySizeNdx]).c_str(),
+												   "",
+												   imageViewType,
+												   imageFormat,
+												   imageSizes[sizeNdx],
+												   arraySizes[arraySizeNdx]));
+		}
+	}
+
+	return imageSizeTests;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createImageTests (tcu::TestContext& testCtx)
+{
+	const struct
+	{
+		VkImageViewType		type;
+		const char*			name;
+	}
+	imageViewTypes[] =
+	{
+		{ VK_IMAGE_VIEW_TYPE_1D,			"1d" },
+		{ VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array" },
+		{ VK_IMAGE_VIEW_TYPE_2D,			"2d" },
+		{ VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array" },
+		{ VK_IMAGE_VIEW_TYPE_3D,			"3d" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE,			"cube" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array" }
+	};
+
+	// All supported dEQP formats that are not intended for depth or stencil.
+	const VkFormat formats[] =
+	{
+		VK_FORMAT_R4G4_UNORM_PACK8,
+		VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_USCALED,
+		VK_FORMAT_R8_SSCALED,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8_SRGB,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_USCALED,
+		VK_FORMAT_R8G8_SSCALED,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8_SRGB,
+		VK_FORMAT_R8G8B8_UNORM,
+		VK_FORMAT_R8G8B8_SNORM,
+		VK_FORMAT_R8G8B8_USCALED,
+		VK_FORMAT_R8G8B8_SSCALED,
+		VK_FORMAT_R8G8B8_UINT,
+		VK_FORMAT_R8G8B8_SINT,
+		VK_FORMAT_R8G8B8_SRGB,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_USCALED,
+		VK_FORMAT_R8G8B8A8_SSCALED,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+		VK_FORMAT_A2R10G10B10_UINT_PACK32,
+		VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_USCALED,
+		VK_FORMAT_R16_SSCALED,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_USCALED,
+		VK_FORMAT_R16G16_SSCALED,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16_UNORM,
+		VK_FORMAT_R16G16B16_SNORM,
+		VK_FORMAT_R16G16B16_USCALED,
+		VK_FORMAT_R16G16B16_SSCALED,
+		VK_FORMAT_R16G16B16_UINT,
+		VK_FORMAT_R16G16B16_SINT,
+		VK_FORMAT_R16G16B16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_USCALED,
+		VK_FORMAT_R16G16B16A16_SSCALED,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R32G32B32_SINT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+		VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+		VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+		VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+
+		// Compressed formats
+		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+		VK_FORMAT_EAC_R11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11_SNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> imageTests			(new tcu::TestCaseGroup(testCtx, "image", "Image tests"));
+	de::MovePtr<tcu::TestCaseGroup> viewTypeTests		(new tcu::TestCaseGroup(testCtx, "view_type", ""));
+
+	for (int viewTypeNdx = 0; viewTypeNdx < DE_LENGTH_OF_ARRAY(imageViewTypes); viewTypeNdx++)
+	{
+		const VkImageViewType			viewType		= imageViewTypes[viewTypeNdx].type;
+		de::MovePtr<tcu::TestCaseGroup>	viewTypeGroup	(new tcu::TestCaseGroup(testCtx, imageViewTypes[viewTypeNdx].name, (std::string("Uses a ") + imageViewTypes[viewTypeNdx].name + " view").c_str()));
+		de::MovePtr<tcu::TestCaseGroup>	formatTests		(new tcu::TestCaseGroup(testCtx, "format", "Tests samplable formats"));
+
+		for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(formats); formatNdx++)
+		{
+			const VkFormat	format	= formats[formatNdx];
+
+			if (isCompressedFormat(format))
+			{
+				// Do not use compressed formats with 1D and 1D array textures.
+				if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY)
+					break;
+			}
+
+			de::MovePtr<tcu::TestCaseGroup>	formatGroup	(new tcu::TestCaseGroup(testCtx,
+																				getFormatCaseName(format).c_str(),
+																				(std::string("Samples a texture of format ") + getFormatName(format)).c_str()));
+
+			de::MovePtr<tcu::TestCaseGroup> sizeTests = createImageSizeTests(testCtx, viewType, format);
+
+			formatGroup->addChild(sizeTests.release());
+			formatTests->addChild(formatGroup.release());
+		}
+
+		viewTypeGroup->addChild(formatTests.release());
+		viewTypeTests->addChild(viewTypeGroup.release());
+	}
+
+	imageTests->addChild(viewTypeTests.release());
+
+	return imageTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.hpp
new file mode 100644
index 0000000..31bf516
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEIMAGETESTS_HPP
+#define _VKTPIPELINEIMAGETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createImageTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEIMAGETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.cpp
new file mode 100644
index 0000000..b18b317
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.cpp
@@ -0,0 +1,1190 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for images.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineImageUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuAstcUtil.hpp"
+#include "deRandom.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+/*! Gets the next multiple of a given divisor */
+static deUint32 getNextMultiple (deUint32 divisor, deUint32 value)
+{
+	if (value % divisor == 0)
+	{
+		return value;
+	}
+	return value + divisor - (value % divisor);
+}
+
+/*! Gets the next value that is multiple of all given divisors */
+static deUint32 getNextMultiple (const std::vector<deUint32>& divisors, deUint32 value)
+{
+	deUint32	nextMultiple		= value;
+	bool		nextMultipleFound	= false;
+
+	while (true)
+	{
+		nextMultipleFound = true;
+
+		for (size_t divNdx = 0; divNdx < divisors.size(); divNdx++)
+			nextMultipleFound = nextMultipleFound && (nextMultiple % divisors[divNdx] == 0);
+
+		if (nextMultipleFound)
+			break;
+
+		DE_ASSERT(nextMultiple < ~((deUint32)0u));
+		nextMultiple = getNextMultiple(divisors[0], nextMultiple + 1);
+	}
+
+	return nextMultiple;
+}
+
+bool isSupportedSamplableFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	if (isCompressedFormat(format))
+	{
+		VkPhysicalDeviceFeatures		physicalFeatures;
+		const tcu::CompressedTexFormat	compressedFormat	= mapVkCompressedFormat(format);
+
+		instanceInterface.getPhysicalDeviceFeatures(device, &physicalFeatures);
+
+		if (tcu::isAstcFormat(compressedFormat))
+		{
+			if (!physicalFeatures.textureCompressionASTC_LDR)
+				return false;
+		}
+		else if (tcu::isEtcFormat(compressedFormat))
+		{
+			if (!physicalFeatures.textureCompressionETC2)
+				return false;
+		}
+		else
+		{
+			DE_FATAL("Unsupported compressed format");
+		}
+	}
+
+	VkFormatProperties	formatProps;
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0u;
+}
+
+// \todo [2016-01-21 pyry] Update this to just rely on vkDefs.hpp once
+//						   CTS has been updated to 1.0.2.
+enum
+{
+	VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
+};
+
+bool isLinearFilteringSupported (const InstanceInterface& vki, VkPhysicalDevice physicalDevice, VkFormat format, VkImageTiling tiling)
+{
+	const VkFormatProperties	formatProperties	= getPhysicalDeviceFormatProperties(vki, physicalDevice, format);
+	const VkFormatFeatureFlags	formatFeatures		= tiling == VK_IMAGE_TILING_LINEAR
+													? formatProperties.linearTilingFeatures
+													: formatProperties.optimalTilingFeatures;
+
+	switch (format)
+	{
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+		case VK_FORMAT_R64_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return (formatFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT) != 0;
+
+		default:
+			// \todo [2016-01-21 pyry] Check for all formats once drivers have been updated to 1.0.2
+			//						   and we have tests to verify format properties.
+			return true;
+	}
+}
+
+VkBorderColor getFormatBorderColor (BorderColor color, VkFormat format)
+{
+	if (!isCompressedFormat(format) && (isIntFormat(format) || isUintFormat(format)))
+	{
+		switch (color)
+		{
+			case BORDER_COLOR_OPAQUE_BLACK:			return VK_BORDER_COLOR_INT_OPAQUE_BLACK;
+			case BORDER_COLOR_OPAQUE_WHITE:			return VK_BORDER_COLOR_INT_OPAQUE_WHITE;
+			case BORDER_COLOR_TRANSPARENT_BLACK:	return VK_BORDER_COLOR_INT_TRANSPARENT_BLACK;
+			default:
+				break;
+		}
+	}
+	else
+	{
+		switch (color)
+		{
+			case BORDER_COLOR_OPAQUE_BLACK:			return VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
+			case BORDER_COLOR_OPAQUE_WHITE:			return VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
+			case BORDER_COLOR_TRANSPARENT_BLACK:	return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
+			default:
+				break;
+		}
+	}
+
+	DE_ASSERT(false);
+	return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
+}
+
+de::MovePtr<tcu::TextureLevel> readColorAttachment (const vk::DeviceInterface&	vk,
+													vk::VkDevice				device,
+													vk::VkQueue					queue,
+													deUint32					queueFamilyIndex,
+													vk::Allocator&				allocator,
+													vk::VkImage					image,
+													vk::VkFormat				format,
+													const tcu::UVec2&			renderSize)
+{
+	Move<VkBuffer>					buffer;
+	de::MovePtr<Allocation>			bufferAlloc;
+	Move<VkCommandPool>				cmdPool;
+	Move<VkCommandBuffer>			cmdBuffer;
+	Move<VkFence>					fence;
+	const tcu::TextureFormat		tcuFormat		= mapVkFormat(format);
+	const VkDeviceSize				pixelDataSize	= renderSize.x() * renderSize.y() * tcuFormat.getPixelSize();
+	de::MovePtr<tcu::TextureLevel>	resultLevel		(new tcu::TextureLevel(tcuFormat, renderSize.x(), renderSize.y()));
+
+	// Create destination buffer
+	{
+		const VkBufferCreateInfo bufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			pixelDataSize,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			0u,											// deUint32				queueFamilyIndexCount;
+			DE_NULL										// const deUint32*		pQueueFamilyIndices;
+		};
+
+		buffer		= createBuffer(vk, device, &bufferParams);
+		bufferAlloc = allocator.allocate(getBufferMemoryRequirements(vk, device, *buffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(device, *buffer, bufferAlloc->getMemory(), bufferAlloc->getOffset()));
+	}
+
+	// Create command pool and buffer
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32				queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, device, &cmdPoolParams);
+
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u											// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, device, &fenceParams);
+	}
+
+	// Barriers for copying image to buffer
+
+	const VkImageMemoryBarrier imageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+		DE_NULL,									// const void*				pNext;
+		VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32					dstQueueFamilyIndex;
+		image,										// VkImage					image;
+		{											// VkImageSubresourceRange	subresourceRange;
+			VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+			0u,							// deUint32				baseMipLevel;
+			1u,							// deUint32				mipLevels;
+			0u,							// deUint32				baseArraySlice;
+			1u							// deUint32				arraySize;
+		}
+	};
+
+	const VkBufferMemoryBarrier bufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,				// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_HOST_READ_BIT,					// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*buffer,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		pixelDataSize								// VkDeviceSize		size;
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			// VkStructureType					sType;
+		DE_NULL,												// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,			// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Copy image to buffer
+
+	const VkBufferImageCopy copyRegion =
+	{
+		0u,												// VkDeviceSize				bufferOffset;
+		(deUint32)renderSize.x(),						// deUint32					bufferRowLength;
+		(deUint32)renderSize.y(),						// deUint32					bufferImageHeight;
+		{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u },		// VkImageSubresourceLayers	imageSubresource;
+		{ 0, 0, 0 },									// VkOffset3D				imageOffset;
+		{ renderSize.x(), renderSize.y(), 1u }			// VkExtent3D				imageExtent;
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
+	vk.cmdCopyImageToBuffer(*cmdBuffer, image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *buffer, 1, &copyRegion);
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1, &fence.get(), 0, ~(0ull) /* infinity */));
+
+	// Read buffer data
+	invalidateMappedMemoryRange(vk, device, bufferAlloc->getMemory(), bufferAlloc->getOffset(), pixelDataSize);
+	tcu::copy(*resultLevel, tcu::ConstPixelBufferAccess(resultLevel->getFormat(), resultLevel->getSize(), bufferAlloc->getHostPtr()));
+
+	return resultLevel;
+}
+
+void uploadTestTexture (const DeviceInterface&			vk,
+						VkDevice						device,
+						VkQueue							queue,
+						deUint32						queueFamilyIndex,
+						Allocator&						allocator,
+						const TestTexture&				srcTexture,
+						VkImage							destImage)
+{
+	deUint32						bufferSize;
+	Move<VkBuffer>					buffer;
+	de::MovePtr<Allocation>			bufferAlloc;
+	Move<VkCommandPool>				cmdPool;
+	Move<VkCommandBuffer>			cmdBuffer;
+	Move<VkFence>					fence;
+	std::vector<deUint32>			levelDataSizes;
+
+	// Calculate buffer size
+	bufferSize =  (srcTexture.isCompressed())? srcTexture.getCompressedSize(): srcTexture.getSize();
+
+	// Create source buffer
+	{
+		const VkBufferCreateInfo bufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			bufferSize,									// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			0u,											// deUint32				queueFamilyIndexCount;
+			DE_NULL,									// const deUint32*		pQueueFamilyIndices;
+		};
+
+		buffer		= createBuffer(vk, device, &bufferParams);
+		bufferAlloc = allocator.allocate(getBufferMemoryRequirements(vk, device, *buffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(device, *buffer, bufferAlloc->getMemory(), bufferAlloc->getOffset()));
+	}
+
+	// Create command pool and buffer
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32					queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, device, &cmdPoolParams);
+
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,												// deUint32					bufferCount;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u											// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, device, &fenceParams);
+	}
+
+	// Barriers for copying buffer to image
+	const VkBufferMemoryBarrier preBufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_HOST_WRITE_BIT,					// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*buffer,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		bufferSize									// VkDeviceSize		size;
+	};
+
+	const VkImageMemoryBarrier preImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkAccessFlags			srcAccessMask;
+		0u,												// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		destImage,										// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			VK_IMAGE_ASPECT_COLOR_BIT,				// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			(deUint32)srcTexture.getNumLevels(),	// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			(deUint32)srcTexture.getArraySize(),	// deUint32			arraySize;
+		}
+	};
+
+	const VkImageMemoryBarrier postImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,					// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_SHADER_READ_BIT,						// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		destImage,										// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			VK_IMAGE_ASPECT_COLOR_BIT,				// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			(deUint32)srcTexture.getNumLevels(),	// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			(deUint32)srcTexture.getArraySize(),	// deUint32			arraySize;
+		}
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const std::vector<VkBufferImageCopy>	copyRegions		= srcTexture.getBufferCopyRegions();
+
+	// Write buffer data
+	srcTexture.write(reinterpret_cast<deUint8*>(bufferAlloc->getHostPtr()));
+	flushMappedMemoryRange(vk, device, bufferAlloc->getMemory(), bufferAlloc->getOffset(), bufferSize);
+
+	// Copy buffer to image
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &preBufferBarrier, 1, &preImageBarrier);
+	vk.cmdCopyBufferToImage(*cmdBuffer, *buffer, destImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegions.size(), copyRegions.data());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1, &fence.get(), true, ~(0ull) /* infinity */));
+}
+
+
+// Utilities for test textures
+
+template<typename TcuTextureType>
+void allocateLevels (TcuTextureType& texture)
+{
+	for (int levelNdx = 0; levelNdx < texture.getNumLevels(); levelNdx++)
+		texture.allocLevel(levelNdx);
+}
+
+template<typename TcuTextureType>
+std::vector<tcu::PixelBufferAccess> getLevelsVector (const TcuTextureType& texture)
+{
+	std::vector<tcu::PixelBufferAccess> levels(texture.getNumLevels());
+
+	for (int levelNdx = 0; levelNdx < texture.getNumLevels(); levelNdx++)
+		levels[levelNdx] = *reinterpret_cast<const tcu::PixelBufferAccess*>(&texture.getLevel(levelNdx));
+
+	return levels;
+}
+
+
+// TestTexture
+
+TestTexture::TestTexture (const tcu::TextureFormat& format, int width, int height, int depth)
+{
+	DE_ASSERT(width >= 1);
+	DE_ASSERT(height >= 1);
+	DE_ASSERT(depth >= 1);
+
+	DE_UNREF(format);
+	DE_UNREF(width);
+	DE_UNREF(height);
+	DE_UNREF(depth);
+}
+
+TestTexture::TestTexture (const tcu::CompressedTexFormat& format, int width, int height, int depth)
+{
+	DE_ASSERT(width >= 1);
+	DE_ASSERT(height >= 1);
+	DE_ASSERT(depth >= 1);
+
+	DE_UNREF(format);
+	DE_UNREF(width);
+	DE_UNREF(height);
+	DE_UNREF(depth);
+}
+
+TestTexture::~TestTexture (void)
+{
+	for (size_t levelNdx = 0; levelNdx < m_compressedLevels.size(); levelNdx++)
+		delete m_compressedLevels[levelNdx];
+}
+
+deUint32 TestTexture::getSize (void) const
+{
+	std::vector<deUint32>	offsetMultiples;
+	deUint32				textureSize = 0;
+
+	offsetMultiples.push_back(4);
+	offsetMultiples.push_back(getLevel(0, 0).getFormat().getPixelSize());
+
+	for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+	{
+		for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+		{
+			const tcu::ConstPixelBufferAccess level = getLevel(levelNdx, layerNdx);
+			textureSize = getNextMultiple(offsetMultiples, textureSize);
+			textureSize += level.getWidth() * level.getHeight() * level.getDepth() * level.getFormat().getPixelSize();
+		}
+	}
+
+	return textureSize;
+}
+
+deUint32 TestTexture::getCompressedSize (void) const
+{
+	if (!isCompressed())
+		throw tcu::InternalError("Texture is not compressed");
+
+	std::vector<deUint32>	offsetMultiples;
+	deUint32				textureSize			= 0;
+
+	offsetMultiples.push_back(4);
+	offsetMultiples.push_back(tcu::getBlockSize(getCompressedLevel(0, 0).getFormat()));
+
+	for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+	{
+		for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+		{
+			textureSize = getNextMultiple(offsetMultiples, textureSize);
+			textureSize += getCompressedLevel(levelNdx, layerNdx).getDataSize();
+		}
+	}
+
+	return textureSize;
+}
+
+tcu::CompressedTexture& TestTexture::getCompressedLevel (int level, int layer)
+{
+	DE_ASSERT(level >= 0 && level < getNumLevels());
+	DE_ASSERT(layer >= 0 && layer < getArraySize());
+
+	return *m_compressedLevels[level * getArraySize() + layer];
+}
+
+const tcu::CompressedTexture& TestTexture::getCompressedLevel (int level, int layer) const
+{
+	DE_ASSERT(level >= 0 && level < getNumLevels());
+	DE_ASSERT(layer >= 0 && layer < getArraySize());
+
+	return *m_compressedLevels[level * getArraySize() + layer];
+}
+
+std::vector<VkBufferImageCopy> TestTexture::getBufferCopyRegions (void) const
+{
+	std::vector<deUint32>			offsetMultiples;
+	std::vector<VkBufferImageCopy>	regions;
+	deUint32						layerDataOffset	= 0;
+
+	offsetMultiples.push_back(4);
+
+	if (isCompressed())
+	{
+		offsetMultiples.push_back(tcu::getBlockSize(getCompressedLevel(0, 0).getFormat()));
+
+		for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+		{
+			for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+			{
+				const tcu::CompressedTexture& level = getCompressedLevel(levelNdx, layerNdx);
+				tcu::IVec3 blockPixelSize			= getBlockPixelSize(level.getFormat());
+				layerDataOffset						= getNextMultiple(offsetMultiples, layerDataOffset);
+
+				const VkBufferImageCopy layerRegion =
+				{
+					layerDataOffset,													// VkDeviceSize				bufferOffset;
+					(deUint32)getNextMultiple(blockPixelSize.x(), level.getWidth()),	// deUint32					bufferRowLength;
+					(deUint32)getNextMultiple(blockPixelSize.y(), level.getHeight()),	// deUint32					bufferImageHeight;
+					{																	// VkImageSubresourceLayers	imageSubresource;
+						VK_IMAGE_ASPECT_COLOR_BIT,
+						(deUint32)levelNdx,
+						(deUint32)layerNdx,
+						1u
+					},
+					{ 0u, 0u, 0u },							// VkOffset3D				imageOffset;
+					{										// VkExtent3D				imageExtent;
+						(deUint32)level.getWidth(),
+						(deUint32)level.getHeight(),
+						(deUint32)level.getDepth()
+					}
+				};
+
+				regions.push_back(layerRegion);
+				layerDataOffset += level.getDataSize();
+			}
+		}
+	}
+	else
+	{
+		offsetMultiples.push_back(getLevel(0, 0).getFormat().getPixelSize());
+
+		for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+		{
+			for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+			{
+				const tcu::ConstPixelBufferAccess level = getLevel(levelNdx, layerNdx);
+
+				layerDataOffset = getNextMultiple(offsetMultiples, layerDataOffset);
+
+				const VkBufferImageCopy layerRegion =
+				{
+					layerDataOffset,						// VkDeviceSize				bufferOffset;
+					(deUint32)level.getWidth(),				// deUint32					bufferRowLength;
+					(deUint32)level.getHeight(),			// deUint32					bufferImageHeight;
+					{										// VkImageSubresourceLayers	imageSubresource;
+						VK_IMAGE_ASPECT_COLOR_BIT,
+						(deUint32)levelNdx,
+						(deUint32)layerNdx,
+						1u
+					},
+					{ 0u, 0u, 0u },							// VkOffset3D			imageOffset;
+					{										// VkExtent3D			imageExtent;
+						(deUint32)level.getWidth(),
+						(deUint32)level.getHeight(),
+						(deUint32)level.getDepth()
+					}
+				};
+
+				regions.push_back(layerRegion);
+				layerDataOffset += level.getWidth() * level.getHeight() * level.getDepth() * level.getFormat().getPixelSize();
+			}
+		}
+	}
+
+	return regions;
+}
+
+void TestTexture::write (deUint8* destPtr) const
+{
+	std::vector<deUint32>	offsetMultiples;
+	deUint32				levelOffset		= 0;
+
+	offsetMultiples.push_back(4);
+
+	if (isCompressed())
+	{
+		offsetMultiples.push_back(tcu::getBlockSize(getCompressedLevel(0, 0).getFormat()));
+
+		for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+		{
+			for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+			{
+				levelOffset = getNextMultiple(offsetMultiples, levelOffset);
+
+				const tcu::CompressedTexture&		compressedTex	= getCompressedLevel(levelNdx, layerNdx);
+
+				deMemcpy(destPtr + levelOffset, compressedTex.getData(), compressedTex.getDataSize());
+				levelOffset += compressedTex.getDataSize();
+			}
+		}
+	}
+	else
+	{
+		offsetMultiples.push_back(getLevel(0, 0).getFormat().getPixelSize());
+
+		for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+		{
+			for (int layerNdx = 0; layerNdx < getArraySize(); layerNdx++)
+			{
+				levelOffset = getNextMultiple(offsetMultiples, levelOffset);
+
+				const tcu::ConstPixelBufferAccess	srcAccess		= getLevel(levelNdx, layerNdx);
+				const tcu::PixelBufferAccess		destAccess		(srcAccess.getFormat(), srcAccess.getSize(), srcAccess.getPitch(), destPtr + levelOffset);
+
+				tcu::copy(destAccess, srcAccess);
+				levelOffset += srcAccess.getWidth() * srcAccess.getHeight() * srcAccess.getDepth() * srcAccess.getFormat().getPixelSize();
+			}
+		}
+	}
+}
+
+void TestTexture::populateLevels (const std::vector<tcu::PixelBufferAccess>& levels)
+{
+	for (size_t levelNdx = 0; levelNdx < levels.size(); levelNdx++)
+		TestTexture::fillWithGradient(levels[levelNdx]);
+}
+
+void TestTexture::populateCompressedLevels (tcu::CompressedTexFormat format, const std::vector<tcu::PixelBufferAccess>& decompressedLevels)
+{
+	// Generate random compressed data and update decompressed data
+
+	de::Random random(123);
+
+	for (size_t levelNdx = 0; levelNdx < decompressedLevels.size(); levelNdx++)
+	{
+		const tcu::PixelBufferAccess	level				= decompressedLevels[levelNdx];
+		tcu::CompressedTexture*			compressedLevel		= new tcu::CompressedTexture(format, level.getWidth(), level.getHeight(), level.getDepth());
+		deUint8* const					compressedData		= (deUint8*)compressedLevel->getData();
+
+		if (tcu::isAstcFormat(format))
+		{
+			// \todo [2016-01-20 pyry] Comparison doesn't currently handle invalid blocks correctly so we use only valid blocks
+			tcu::astc::generateRandomValidBlocks(compressedData, compressedLevel->getDataSize()/tcu::astc::BLOCK_SIZE_BYTES,
+												 format, tcu::TexDecompressionParams::ASTCMODE_LDR, random.getUint32());
+		}
+		else
+		{
+			// Generate random compressed data
+			for (int byteNdx = 0; byteNdx < compressedLevel->getDataSize(); byteNdx++)
+				compressedData[byteNdx] = 0xFF & random.getUint32();
+		}
+
+		m_compressedLevels.push_back(compressedLevel);
+
+		// Store decompressed data
+		compressedLevel->decompress(level, tcu::TexDecompressionParams(tcu::TexDecompressionParams::ASTCMODE_LDR));
+	}
+}
+
+void TestTexture::fillWithGradient (const tcu::PixelBufferAccess& levelAccess)
+{
+	const tcu::TextureFormatInfo formatInfo = tcu::getTextureFormatInfo(levelAccess.getFormat());
+	tcu::fillWithComponentGradients(levelAccess, formatInfo.valueMin, formatInfo.valueMax);
+}
+
+// TestTexture1D
+
+TestTexture1D::TestTexture1D (const tcu::TextureFormat& format, int width)
+	: TestTexture	(format, width, 1, 1)
+	, m_texture		(format, width)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTexture1D::TestTexture1D (const tcu::CompressedTexFormat& format, int width)
+	: TestTexture	(format, width, 1, 1)
+	, m_texture		(tcu::getUncompressedFormat(format), width)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateCompressedLevels(format, getLevelsVector(m_texture));
+}
+
+TestTexture1D::~TestTexture1D (void)
+{
+}
+
+int TestTexture1D::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTexture1D::getLevel (int level, int layer)
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::ConstPixelBufferAccess TestTexture1D::getLevel (int level, int layer) const
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::Texture1D& TestTexture1D::getTexture (void) const
+{
+	return m_texture;
+}
+
+
+// TestTexture1DArray
+
+TestTexture1DArray::TestTexture1DArray (const tcu::TextureFormat& format, int width, int arraySize)
+	: TestTexture	(format, width, 1, arraySize)
+	, m_texture		(format, width, arraySize)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTexture1DArray::TestTexture1DArray (const tcu::CompressedTexFormat& format, int width, int arraySize)
+	: TestTexture	(format, width, 1, arraySize)
+	, m_texture		(tcu::getUncompressedFormat(format), width, arraySize)
+{
+	allocateLevels(m_texture);
+
+	std::vector<tcu::PixelBufferAccess> layers;
+	for (int levelNdx = 0; levelNdx < m_texture.getNumLevels(); levelNdx++)
+		for (int layerNdx = 0; layerNdx < m_texture.getNumLayers(); layerNdx++)
+			layers.push_back(getLevel(levelNdx, layerNdx));
+
+	TestTexture::populateCompressedLevels(format, layers);
+}
+
+TestTexture1DArray::~TestTexture1DArray (void)
+{
+}
+
+int TestTexture1DArray::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTexture1DArray::getLevel (int level, int layer)
+{
+	const tcu::PixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32					layerSize	= levelLayers.getWidth() * levelLayers.getFormat().getPixelSize();
+	const deUint32					layerOffset	= layerSize * layer;
+
+	return tcu::PixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), 1, 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+const tcu::ConstPixelBufferAccess TestTexture1DArray::getLevel (int level, int layer) const
+{
+	const tcu::ConstPixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32						layerSize	= levelLayers.getWidth() * levelLayers.getFormat().getPixelSize();
+	const deUint32						layerOffset	= layerSize * layer;
+
+	return tcu::ConstPixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), 1, 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+const tcu::Texture1DArray& TestTexture1DArray::getTexture (void) const
+{
+	return m_texture;
+}
+
+int TestTexture1DArray::getArraySize (void) const
+{
+	return m_texture.getNumLayers();
+}
+
+
+// TestTexture2D
+
+TestTexture2D::TestTexture2D (const tcu::TextureFormat& format, int width, int height)
+	: TestTexture	(format, width, height, 1)
+	, m_texture		(format, width, height)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTexture2D::TestTexture2D (const tcu::CompressedTexFormat& format, int width, int height)
+	: TestTexture	(format, width, height, 1)
+	, m_texture		(tcu::getUncompressedFormat(format), width, height)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateCompressedLevels(format, getLevelsVector(m_texture));
+}
+
+TestTexture2D::~TestTexture2D (void)
+{
+}
+
+int TestTexture2D::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTexture2D::getLevel (int level, int layer)
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::ConstPixelBufferAccess TestTexture2D::getLevel (int level, int layer) const
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::Texture2D& TestTexture2D::getTexture (void) const
+{
+	return m_texture;
+}
+
+
+// TestTexture2DArray
+
+TestTexture2DArray::TestTexture2DArray (const tcu::TextureFormat& format, int width, int height, int arraySize)
+	: TestTexture	(format, width, height, arraySize)
+	, m_texture		(format, width, height, arraySize)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTexture2DArray::TestTexture2DArray (const tcu::CompressedTexFormat& format, int width, int height, int arraySize)
+	: TestTexture	(format, width, height, arraySize)
+	, m_texture		(tcu::getUncompressedFormat(format), width, height, arraySize)
+{
+	allocateLevels(m_texture);
+
+	std::vector<tcu::PixelBufferAccess> layers;
+	for (int levelNdx = 0; levelNdx < m_texture.getNumLevels(); levelNdx++)
+		for (int layerNdx = 0; layerNdx < m_texture.getNumLayers(); layerNdx++)
+			layers.push_back(getLevel(levelNdx, layerNdx));
+
+	TestTexture::populateCompressedLevels(format, layers);
+}
+
+TestTexture2DArray::~TestTexture2DArray (void)
+{
+}
+
+int TestTexture2DArray::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTexture2DArray::getLevel (int level, int layer)
+{
+	const tcu::PixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32					layerSize	= levelLayers.getWidth() * levelLayers.getHeight() * levelLayers.getFormat().getPixelSize();
+	const deUint32					layerOffset	= layerSize * layer;
+
+	return tcu::PixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), levelLayers.getHeight(), 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+const tcu::ConstPixelBufferAccess TestTexture2DArray::getLevel (int level, int layer) const
+{
+	const tcu::ConstPixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32						layerSize	= levelLayers.getWidth() * levelLayers.getHeight() * levelLayers.getFormat().getPixelSize();
+	const deUint32						layerOffset	= layerSize * layer;
+
+	return tcu::ConstPixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), levelLayers.getHeight(), 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+const tcu::Texture2DArray& TestTexture2DArray::getTexture (void) const
+{
+	return m_texture;
+}
+
+int TestTexture2DArray::getArraySize (void) const
+{
+	return m_texture.getNumLayers();
+}
+
+
+// TestTexture3D
+
+TestTexture3D::TestTexture3D (const tcu::TextureFormat& format, int width, int height, int depth)
+	: TestTexture	(format, width, height, depth)
+	, m_texture		(format, width, height, depth)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTexture3D::TestTexture3D (const tcu::CompressedTexFormat& format, int width, int height, int depth)
+	: TestTexture	(format, width, height, depth)
+	, m_texture		(tcu::getUncompressedFormat(format), width, height, depth)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateCompressedLevels(format, getLevelsVector(m_texture));
+}
+
+TestTexture3D::~TestTexture3D (void)
+{
+}
+
+int TestTexture3D::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTexture3D::getLevel (int level, int layer)
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::ConstPixelBufferAccess TestTexture3D::getLevel (int level, int layer) const
+{
+	DE_ASSERT(layer == 0);
+	DE_UNREF(layer);
+	return m_texture.getLevel(level);
+}
+
+const tcu::Texture3D& TestTexture3D::getTexture (void) const
+{
+	return m_texture;
+}
+
+
+// TestTextureCube
+
+const static tcu::CubeFace tcuFaceMapping[tcu::CUBEFACE_LAST] =
+{
+	tcu::CUBEFACE_POSITIVE_X,
+	tcu::CUBEFACE_NEGATIVE_X,
+	tcu::CUBEFACE_POSITIVE_Y,
+	tcu::CUBEFACE_NEGATIVE_Y,
+	tcu::CUBEFACE_POSITIVE_Z,
+	tcu::CUBEFACE_NEGATIVE_Z
+};
+
+TestTextureCube::TestTextureCube (const tcu::TextureFormat& format, int size)
+	: TestTexture	(format, size, size, 1)
+	, m_texture		(format, size)
+{
+	for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+	{
+		for (int faceNdx = 0; faceNdx < tcu::CUBEFACE_LAST; faceNdx++)
+		{
+			m_texture.allocLevel(tcuFaceMapping[faceNdx], levelNdx);
+			TestTexture::fillWithGradient(m_texture.getLevelFace(levelNdx, tcuFaceMapping[faceNdx]));
+		}
+	}
+}
+
+TestTextureCube::TestTextureCube (const tcu::CompressedTexFormat& format, int size)
+	: TestTexture	(format, size, size, 1)
+	, m_texture		(tcu::getUncompressedFormat(format), size)
+{
+	std::vector<tcu::PixelBufferAccess> levels(m_texture.getNumLevels() * tcu::CUBEFACE_LAST);
+
+	for (int levelNdx = 0; levelNdx < getNumLevels(); levelNdx++)
+	{
+		for (int faceNdx = 0; faceNdx < tcu::CUBEFACE_LAST; faceNdx++)
+		{
+			m_texture.allocLevel(tcuFaceMapping[faceNdx], levelNdx);
+			levels[levelNdx * tcu::CUBEFACE_LAST + faceNdx] = m_texture.getLevelFace(levelNdx, tcuFaceMapping[faceNdx]);
+		}
+	}
+
+	TestTexture::populateCompressedLevels(format, levels);
+}
+
+TestTextureCube::~TestTextureCube (void)
+{
+}
+
+int TestTextureCube::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTextureCube::getLevel (int level, int face)
+{
+	return m_texture.getLevelFace(level, (tcu::CubeFace)face);
+}
+
+const tcu::ConstPixelBufferAccess TestTextureCube::getLevel (int level, int face) const
+{
+	return m_texture.getLevelFace(level, (tcu::CubeFace)face);
+}
+
+int TestTextureCube::getArraySize (void) const
+{
+	return (int)tcu::CUBEFACE_LAST;
+}
+
+const tcu::TextureCube& TestTextureCube::getTexture (void) const
+{
+	return m_texture;
+}
+
+// TestTextureCubeArray
+
+TestTextureCubeArray::TestTextureCubeArray (const tcu::TextureFormat& format, int size, int arraySize)
+	: TestTexture	(format, size, size, arraySize)
+	, m_texture		(format, size, arraySize)
+{
+	allocateLevels(m_texture);
+	TestTexture::populateLevels(getLevelsVector(m_texture));
+}
+
+TestTextureCubeArray::TestTextureCubeArray (const tcu::CompressedTexFormat& format, int size, int arraySize)
+	: TestTexture	(format, size, size, arraySize)
+	, m_texture		(tcu::getUncompressedFormat(format), size, arraySize)
+{
+	DE_ASSERT(arraySize % 6 == 0);
+
+	allocateLevels(m_texture);
+
+	std::vector<tcu::PixelBufferAccess> layers;
+	for (int levelNdx = 0; levelNdx < m_texture.getNumLevels(); levelNdx++)
+		for (int layerNdx = 0; layerNdx < m_texture.getDepth(); layerNdx++)
+			layers.push_back(getLevel(levelNdx, layerNdx));
+
+	TestTexture::populateCompressedLevels(format, layers);
+}
+
+TestTextureCubeArray::~TestTextureCubeArray (void)
+{
+}
+
+int TestTextureCubeArray::getNumLevels (void) const
+{
+	return m_texture.getNumLevels();
+}
+
+tcu::PixelBufferAccess TestTextureCubeArray::getLevel (int level, int layer)
+{
+	const tcu::PixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32					layerSize	= levelLayers.getWidth() * levelLayers.getHeight() * levelLayers.getFormat().getPixelSize();
+	const deUint32					layerOffset	= layerSize * layer;
+
+	return tcu::PixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), levelLayers.getHeight(), 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+const tcu::ConstPixelBufferAccess TestTextureCubeArray::getLevel (int level, int layer) const
+{
+	const tcu::ConstPixelBufferAccess	levelLayers	= m_texture.getLevel(level);
+	const deUint32						layerSize	= levelLayers.getWidth() * levelLayers.getHeight() * levelLayers.getFormat().getPixelSize();
+	const deUint32						layerOffset	= layerSize * layer;
+
+	return tcu::ConstPixelBufferAccess(levelLayers.getFormat(), levelLayers.getWidth(), levelLayers.getHeight(), 1, (deUint8*)levelLayers.getDataPtr() + layerOffset);
+}
+
+int TestTextureCubeArray::getArraySize (void) const
+{
+	return m_texture.getDepth();
+}
+
+const tcu::TextureCubeArray& TestTextureCubeArray::getTexture (void) const
+{
+	return m_texture;
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.hpp
new file mode 100644
index 0000000..17af27a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageUtil.hpp
@@ -0,0 +1,255 @@
+#ifndef _VKTPIPELINEIMAGEUTIL_HPP
+#define _VKTPIPELINEIMAGEUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for images.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vkDefs.hpp"
+#include "vkDefs.hpp"
+#include "vkPlatform.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRef.hpp"
+#include "tcuTexture.hpp"
+#include "tcuCompressedTexture.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+class TestTexture;
+
+enum BorderColor
+{
+	BORDER_COLOR_OPAQUE_BLACK,
+	BORDER_COLOR_OPAQUE_WHITE,
+	BORDER_COLOR_TRANSPARENT_BLACK,
+
+	BORDER_COLOR_COUNT
+};
+
+bool							isSupportedSamplableFormat	(const vk::InstanceInterface&	instanceInterface,
+															 vk::VkPhysicalDevice			device,
+															 vk::VkFormat					format);
+bool							isLinearFilteringSupported	(const vk::InstanceInterface&	instanceInterface,
+															 vk::VkPhysicalDevice			device,
+															 vk::VkFormat					format,
+															 vk::VkImageTiling				tiling);
+
+vk::VkBorderColor				getFormatBorderColor		(BorderColor color, vk::VkFormat format);
+
+/*--------------------------------------------------------------------*//*!
+ * Gets a tcu::TextureLevel initialized with data from a VK color
+ * attachment.
+ *
+ * The VkImage must be non-multisampled and able to be used as a source
+ * operand for transfer operations.
+ *//*--------------------------------------------------------------------*/
+de::MovePtr<tcu::TextureLevel>	readColorAttachment			 (const vk::DeviceInterface&	vk,
+															  vk::VkDevice					device,
+															  vk::VkQueue					queue,
+															  deUint32						queueFamilyIndex,
+															  vk::Allocator&				allocator,
+															  vk::VkImage					image,
+															  vk::VkFormat					format,
+															  const tcu::UVec2&				renderSize);
+
+/*--------------------------------------------------------------------*//*!
+ * Uploads data from a test texture to a destination VK image.
+ *
+ * The VkImage must be non-multisampled and able to be used as a
+ * destination operand for transfer operations.
+ *//*--------------------------------------------------------------------*/
+void							uploadTestTexture			(const vk::DeviceInterface&		vk,
+															 vk::VkDevice					device,
+															 vk::VkQueue					queue,
+															 deUint32						queueFamilyIndex,
+															 vk::Allocator&					allocator,
+															 const TestTexture&				testTexture,
+															 vk::VkImage					destImage);
+
+class TestTexture
+{
+public:
+												TestTexture					(const tcu::TextureFormat& format, int width, int height, int depth);
+												TestTexture					(const tcu::CompressedTexFormat& format, int width, int height, int depth);
+	virtual										~TestTexture				(void);
+
+	virtual int									getNumLevels				(void) const = 0;
+	virtual deUint32							getSize						(void) const;
+	virtual int									getArraySize				(void) const { return 1; }
+
+	virtual bool								isCompressed				(void) const { return !m_compressedLevels.empty(); }
+	virtual deUint32							getCompressedSize			(void) const;
+
+	virtual tcu::PixelBufferAccess				getLevel					(int level, int layer) = 0;
+	virtual const tcu::ConstPixelBufferAccess	getLevel					(int level, int layer) const = 0;
+
+	virtual tcu::CompressedTexture&				getCompressedLevel			(int level, int layer);
+	virtual const tcu::CompressedTexture&		getCompressedLevel			(int level, int layer) const;
+
+	virtual std::vector<vk::VkBufferImageCopy>	getBufferCopyRegions		(void) const;
+	virtual void								write						(deUint8* destPtr) const;
+
+protected:
+	void										populateLevels				(const std::vector<tcu::PixelBufferAccess>& levels);
+	void										populateCompressedLevels	(tcu::CompressedTexFormat format, const std::vector<tcu::PixelBufferAccess>& decompressedLevels);
+
+	static void									fillWithGradient			(const tcu::PixelBufferAccess& levelAccess);
+
+protected:
+	std::vector<tcu::CompressedTexture*>		m_compressedLevels;
+};
+
+class TestTexture1D : public TestTexture
+{
+private:
+	tcu::Texture1D								m_texture;
+
+public:
+												TestTexture1D	(const tcu::TextureFormat& format, int width);
+												TestTexture1D	(const tcu::CompressedTexFormat& format, int width);
+	virtual										~TestTexture1D	(void);
+
+	virtual int getNumLevels (void) const;
+	virtual tcu::PixelBufferAccess				getLevel		(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel		(int level, int layer) const;
+	virtual const tcu::Texture1D&				getTexture		(void) const;
+};
+
+class TestTexture1DArray : public TestTexture
+{
+private:
+	tcu::Texture1DArray							m_texture;
+
+public:
+												TestTexture1DArray	(const tcu::TextureFormat& format, int width, int arraySize);
+												TestTexture1DArray	(const tcu::CompressedTexFormat& format, int width, int arraySize);
+	virtual										~TestTexture1DArray	(void);
+
+	virtual int									getNumLevels		(void) const;
+	virtual tcu::PixelBufferAccess				getLevel			(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel			(int level, int layer) const;
+	virtual const tcu::Texture1DArray&			getTexture			(void) const;
+	virtual int									getArraySize		(void) const;
+};
+
+class TestTexture2D : public TestTexture
+{
+private:
+	tcu::Texture2D								m_texture;
+
+public:
+												TestTexture2D		(const tcu::TextureFormat& format, int width, int height);
+												TestTexture2D		(const tcu::CompressedTexFormat& format, int width, int height);
+	virtual										~TestTexture2D		(void);
+
+	virtual int									getNumLevels		(void) const;
+	virtual tcu::PixelBufferAccess				getLevel			(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel			(int level, int layer) const;
+	virtual const tcu::Texture2D&				getTexture			(void) const;
+};
+
+class TestTexture2DArray : public TestTexture
+{
+private:
+	tcu::Texture2DArray	m_texture;
+
+public:
+												TestTexture2DArray	(const tcu::TextureFormat& format, int width, int height, int arraySize);
+												TestTexture2DArray	(const tcu::CompressedTexFormat& format, int width, int height, int arraySize);
+	virtual										~TestTexture2DArray	(void);
+
+	virtual int									getNumLevels		(void) const;
+	virtual tcu::PixelBufferAccess				getLevel			(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel			(int level, int layer) const;
+	virtual const tcu::Texture2DArray&			getTexture			(void) const;
+	virtual int									getArraySize		(void) const;
+};
+
+class TestTexture3D : public TestTexture
+{
+private:
+	tcu::Texture3D	m_texture;
+
+public:
+												TestTexture3D		(const tcu::TextureFormat& format, int width, int height, int depth);
+												TestTexture3D		(const tcu::CompressedTexFormat& format, int width, int height, int depth);
+	virtual										~TestTexture3D		(void);
+
+	virtual int									getNumLevels		(void) const;
+	virtual tcu::PixelBufferAccess				getLevel			(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel			(int level, int layer) const;
+	virtual const tcu::Texture3D&				getTexture			(void) const;
+};
+
+class TestTextureCube : public TestTexture
+{
+private:
+	tcu::TextureCube							m_texture;
+
+public:
+												TestTextureCube			(const tcu::TextureFormat& format, int size);
+												TestTextureCube			(const tcu::CompressedTexFormat& format, int size);
+	virtual										~TestTextureCube		(void);
+
+	virtual int									getNumLevels			(void) const;
+	virtual tcu::PixelBufferAccess				getLevel				(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel				(int level, int layer) const;
+	virtual int									getArraySize			(void) const;
+	virtual const tcu::TextureCube&				getTexture				(void) const;
+};
+
+class TestTextureCubeArray: public TestTexture
+{
+private:
+	tcu::TextureCubeArray						m_texture;
+
+public:
+												TestTextureCubeArray	(const tcu::TextureFormat& format, int size, int arraySize);
+												TestTextureCubeArray	(const tcu::CompressedTexFormat& format, int size, int arraySize);
+	virtual										~TestTextureCubeArray	(void);
+
+	virtual int									getNumLevels			(void) const;
+	virtual tcu::PixelBufferAccess				getLevel				(int level, int layer);
+	virtual const tcu::ConstPixelBufferAccess	getLevel				(int level, int layer) const;
+	virtual int									getArraySize			(void) const;
+	virtual const tcu::TextureCubeArray&		getTexture				(void) const;
+};
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEIMAGEUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.cpp
new file mode 100644
index 0000000..600adba
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.cpp
@@ -0,0 +1,805 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image View Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineImageViewTests.hpp"
+#include "vktPipelineImageSamplingInstance.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkPrograms.hpp"
+#include "tcuPlatform.hpp"
+#include "tcuTextureUtil.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+using de::MovePtr;
+
+namespace
+{
+
+
+class ImageViewTest : public vkt::TestCase
+{
+public:
+							ImageViewTest			(tcu::TestContext&				testContext,
+													 const char*					name,
+													 const char*					description,
+													 VkImageViewType				imageViewType,
+													 VkFormat						imageFormat,
+													 float							samplerLod,
+													 const VkComponentMapping&		componentMapping,
+													 const VkImageSubresourceRange&	subresourceRange);
+	virtual					~ImageViewTest			(void) {}
+
+	virtual void			initPrograms			(SourceCollections&				sourceCollections) const;
+	virtual TestInstance*	createInstance			(Context&						context) const;
+	static std::string		getGlslSamplerType		(const tcu::TextureFormat&		format,
+													 VkImageViewType				type);
+	static tcu::UVec2		getRenderSize			(VkImageViewType				viewType);
+	static tcu::IVec3		getImageSize			(VkImageViewType				viewType);
+	static int				getArraySize			(VkImageViewType				viewType);
+	static int				getNumLevels			(VkImageViewType				viewType);
+	static tcu::Vec4		swizzle					(tcu::Vec4						inputData,
+													 VkComponentMapping				componentMapping);
+private:
+	VkImageViewType			m_imageViewType;
+	VkFormat				m_imageFormat;
+	float					m_samplerLod;
+	VkComponentMapping		m_componentMapping;
+	VkImageSubresourceRange	m_subresourceRange;
+};
+
+ImageViewTest::ImageViewTest (tcu::TestContext&					testContext,
+							  const char*						name,
+							  const char*						description,
+							  VkImageViewType					imageViewType,
+							  VkFormat							imageFormat,
+							  float								samplerLod,
+							  const VkComponentMapping&			componentMapping,
+							  const VkImageSubresourceRange&	subresourceRange)
+
+	: vkt::TestCase			(testContext, name, description)
+	, m_imageViewType		(imageViewType)
+	, m_imageFormat			(imageFormat)
+	, m_samplerLod			(samplerLod)
+	, m_componentMapping	(componentMapping)
+	, m_subresourceRange	(subresourceRange)
+{
+}
+
+tcu::Vec4 ImageViewTest::swizzle (tcu::Vec4 inputData, VkComponentMapping componentMapping)
+{
+	// array map with enum VkComponentSwizzle
+	const float channelValues[] =
+	{
+		-1.0f,
+		0.0f,
+		1.0f,
+		inputData.x(),
+		inputData.y(),
+		inputData.z(),
+		inputData.w(),
+		-1.0f
+	};
+
+	return tcu::Vec4(channelValues[componentMapping.r],
+					 channelValues[componentMapping.g],
+					 channelValues[componentMapping.b],
+					 channelValues[componentMapping.a]);
+}
+
+void ImageViewTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream				vertexSrc;
+	std::ostringstream				fragmentSrc;
+	const char*						texCoordSwizzle	= DE_NULL;
+	const tcu::TextureFormat		format			= (isCompressedFormat(m_imageFormat)) ? tcu::getUncompressedFormat(mapVkCompressedFormat(m_imageFormat))
+																						  : mapVkFormat(m_imageFormat);
+
+	// \note We don't want to perform normalization on any compressed formats.
+	//		 In case of non-sRGB LDR ASTC it would lead to lack of coverage
+	//		 as uncompressed format for that is f16 but values will be in range
+	//		 0..1 already.
+	const tcu::TextureFormatInfo	formatInfo		= (!isCompressedFormat(m_imageFormat) ? tcu::getTextureFormatInfo(format)
+																						  : tcu::getTextureFormatInfo(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)));
+
+	tcu::Vec4						swizzledScale	= swizzle(formatInfo.lookupScale, m_componentMapping);
+	tcu::Vec4						swizzledBias	= swizzle(formatInfo.lookupBias, m_componentMapping);
+
+	switch (m_imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			texCoordSwizzle = "x";
+			break;
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D:
+			texCoordSwizzle = "xy";
+			break;
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_3D:
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			texCoordSwizzle = "xyz";
+			break;
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			texCoordSwizzle = "xyzw";
+			break;
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	vertexSrc << "#version 440\n"
+			  << "layout(location = 0) in vec4 position;\n"
+			  << "layout(location = 1) in vec4 texCoords;\n"
+			  << "layout(location = 0) out highp vec4 vtxTexCoords;\n"
+			  << "out gl_PerVertex {\n"
+			  << "	vec4 gl_Position;\n"
+			  << "};\n"
+			  << "void main (void)\n"
+			  << "{\n"
+			  << "	gl_Position = position;\n"
+			  << "	vtxTexCoords = texCoords;\n"
+			  << "}\n";
+
+	fragmentSrc << "#version 440\n"
+				<< "layout(set = 0, binding = 0) uniform highp " << getGlslSamplerType(format, m_imageViewType) << " texSampler;\n"
+				<< "layout(location = 0) in highp vec4 vtxTexCoords;\n"
+				<< "layout(location = 0) out highp vec4 fragColor;\n"
+				<< "void main (void)\n"
+				<< "{\n"
+				<< "	fragColor = ";
+
+	if (m_samplerLod > 0.0f)
+		fragmentSrc << "textureLod(texSampler, vtxTexCoords." << texCoordSwizzle << ", " << std::fixed <<  m_samplerLod << ")";
+	else
+		fragmentSrc << "texture(texSampler, vtxTexCoords." << texCoordSwizzle << ")" << std::fixed;
+
+	fragmentSrc << " * vec4" << std::scientific << swizzledScale << " + vec4" << swizzledBias << ";\n"
+				<< "}\n";
+
+	sourceCollections.glslSources.add("tex_vert") << glu::VertexSource(vertexSrc.str());
+	sourceCollections.glslSources.add("tex_frag") << glu::FragmentSource(fragmentSrc.str());
+}
+
+TestInstance* ImageViewTest::createInstance (Context& context) const
+{
+	const tcu::UVec2				renderSize		= getRenderSize(m_imageViewType);
+	const tcu::IVec3				imageSize		= getImageSize(m_imageViewType);
+	const int						arraySize		= getArraySize(m_imageViewType);
+	const std::vector<Vertex4Tex4>	vertices		= createTestQuadMosaic(m_imageViewType);
+
+	const VkSamplerCreateInfo		samplerParams	=
+	{
+		VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,									// VkStructureType			sType;
+		DE_NULL,																// const void*				pNext;
+		0u,																		// VkSamplerCreateFlags		flags;
+		VK_FILTER_NEAREST,														// VkFilter					magFilter;
+		VK_FILTER_NEAREST,														// VkFilter					minFilter;
+		VK_SAMPLER_MIPMAP_MODE_NEAREST,											// VkSamplerMipmapMode		mipmapMode;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeU;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeV;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeW;
+		0.0f,																	// float					mipLodBias;
+		VK_FALSE,																// VkBool32					anisotropyEnable;
+		1.0f,																	// float					maxAnisotropy;
+		false,																	// VkBool32					compareEnable;
+		VK_COMPARE_OP_NEVER,													// VkCompareOp				compareOp;
+		0.0f,																	// float					minLod;
+		(float)(m_subresourceRange.levelCount - 1),								// float					maxLod;
+		getFormatBorderColor(BORDER_COLOR_TRANSPARENT_BLACK, m_imageFormat),	// VkBorderColor			borderColor;
+		false																	// VkBool32					unnormalizedCoordinates;
+	};
+
+	return new ImageSamplingInstance(context, renderSize, m_imageViewType, m_imageFormat, imageSize, arraySize, m_componentMapping, m_subresourceRange, samplerParams, m_samplerLod, vertices);
+}
+
+std::string ImageViewTest::getGlslSamplerType (const tcu::TextureFormat& format, VkImageViewType type)
+{
+	std::ostringstream samplerType;
+
+	if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+		samplerType << "u";
+	else if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+		samplerType << "i";
+
+	switch (type)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			samplerType << "sampler1D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			samplerType << "sampler1DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D:
+			samplerType << "sampler2D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			samplerType << "sampler2DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			samplerType << "sampler3D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			samplerType << "samplerCube";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			samplerType << "samplerCubeArray";
+			break;
+
+		default:
+			DE_FATAL("Unknown image view type");
+			break;
+	}
+
+	return samplerType.str();
+}
+
+tcu::UVec2 ImageViewTest::getRenderSize (VkImageViewType viewType)
+{
+	if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_2D)
+		return tcu::UVec2(16u, 16u);
+	else
+		return tcu::UVec2(16u * 3u, 16u * 2u);
+}
+
+tcu::IVec3 ImageViewTest::getImageSize (VkImageViewType viewType)
+{
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			return tcu::IVec3(16, 1, 1);
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			return tcu::IVec3(16);
+
+		default:
+			break;
+	}
+
+	return tcu::IVec3(16, 16, 1);
+}
+
+int ImageViewTest::getArraySize (VkImageViewType viewType)
+{
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_3D:
+			return 1;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			return 18;
+
+		default:
+			break;
+	}
+
+	return 6;
+}
+
+int ImageViewTest::getNumLevels (VkImageViewType viewType)
+{
+	const tcu::IVec3 imageSize = getImageSize(viewType);
+
+	return deLog2Floor32(deMax32(imageSize.x(), deMax32(imageSize.y(), imageSize.z()))) + 1;
+}
+
+static std::string getFormatCaseName (const VkFormat format)
+{
+	const std::string fullName = getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+static de::MovePtr<tcu::TestCaseGroup> createSubresourceRangeTests(tcu::TestContext& testCtx, VkImageViewType viewType, VkFormat imageFormat)
+{
+	struct TestCaseConfig
+	{
+		const char*				name;
+		float					samplerLod;
+		VkImageSubresourceRange	subresourceRange;
+	};
+
+	const deUint32				numLevels				= ImageViewTest::getNumLevels(viewType);
+	const deUint32				arraySize				= ImageViewTest::getArraySize(viewType);
+	const VkImageAspectFlags	imageAspectFlags		= VK_IMAGE_ASPECT_COLOR_BIT;
+	const VkComponentMapping	componentMapping		= getFormatComponentMapping(imageFormat);
+
+	de::MovePtr<tcu::TestCaseGroup> rangeTests (new tcu::TestCaseGroup(testCtx, "subresource_range", ""));
+
+#define ADD_SUBRESOURCE_RANGE_TESTS(TEST_CASES)															\
+	do {																								\
+		for (int configNdx = 0; configNdx < DE_LENGTH_OF_ARRAY(TEST_CASES); configNdx++)				\
+		{																								\
+			std::ostringstream		desc;																\
+			const TestCaseConfig	config	= TEST_CASES[configNdx];									\
+			desc << "Samples level " << config.samplerLod << " with :\n" << config.subresourceRange;	\
+			rangeTests->addChild(new ImageViewTest(testCtx, config.name, desc.str().c_str(), viewType,	\
+												   imageFormat, config.samplerLod, componentMapping,		\
+												   config.subresourceRange));							\
+		}																								\
+	} while (0)
+
+	if (viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY || viewType == VK_IMAGE_VIEW_TYPE_2D_ARRAY)
+	{
+		const TestCaseConfig mipLevelRangeCases[] =
+		{
+			//	name					samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level",		0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 0u, arraySize } },
+			{ "lod_mip_levels",			4.0f,		{ imageAspectFlags, 0u, 3u, 0u, arraySize } },
+		};
+
+		const TestCaseConfig arrayRangeCases[] =
+		{
+			//	name					samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "base_array_layer",		0.0f,			{ imageAspectFlags, 0u, numLevels, 1u, arraySize - 1u } },
+			{ "array_size",				0.0f,			{ imageAspectFlags, 0u, numLevels, 0u, 4u } },
+			{ "array_base_and_size",	0.0f,			{ imageAspectFlags, 0u, numLevels, 2u, 3u } },
+		};
+
+		const TestCaseConfig mipLevelAndArrayRangeCases[] =
+		{
+			//	name										samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level_base_array_layer",		0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 1u, 5u } },
+			{ "lod_mip_levels_base_array_layer",			4.0f,			{ imageAspectFlags, 0u, 3u, 1u, 5u } },
+
+			{ "lod_base_mip_level_array_size",				0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 0u, 4u } },
+			{ "lod_mip_levels_array_size",					4.0f,			{ imageAspectFlags, 0u, 3u, 0u, 4u } },
+
+			{ "lod_base_mip_level_array_base_and_size",		0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 2u, 3u } },
+			{ "lod_mip_levels_array_base_and_size",			4.0f,			{ imageAspectFlags, 0u, 3u, 2u, 3u } },
+		};
+
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(arrayRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelAndArrayRangeCases);
+	}
+	else if (viewType == VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+	{
+		const TestCaseConfig mipLevelRangeCases[] =
+		{
+			//	name					samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level",		0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 0u, arraySize } },
+			{ "lod_mip_levels",			4.0f,		{ imageAspectFlags, 0u, 3u, 0u, arraySize } },
+		};
+
+		const TestCaseConfig arrayRangeCases[] =
+		{
+			//	name					samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "base_array_layer",		0.0f,			{ imageAspectFlags, 0u, numLevels, 6u, arraySize - 6u } },
+			{ "array_size",				0.0f,			{ imageAspectFlags, 0u, numLevels, 0u, 6u } },
+			{ "array_base_and_size",	0.0f,			{ imageAspectFlags, 0u, numLevels, 12u, 6u } },
+		};
+
+		const TestCaseConfig mipLevelAndArrayRangeCases[] =
+		{
+			//	name										samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level_base_array_layer",		0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 6u, arraySize - 6u } },
+			{ "lod_mip_levels_base_array_layer",			4.0f,			{ imageAspectFlags, 0u, 3u, 6u, arraySize - 6u } },
+
+			{ "lod_base_mip_level_array_size",				0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 0u, 6u } },
+			{ "lod_mip_levels_array_size",					4.0f,			{ imageAspectFlags, 0u, 3u, 0u, 6u } },
+
+			{ "lod_base_mip_level_array_base_and_size",		0.0f,			{ imageAspectFlags, 2u, numLevels - 2u, 12u, 6u } },
+			{ "lod_mip_levels_array_base_and_size",			4.0f,			{ imageAspectFlags, 0u, 3u, 12u, 6u } },
+		};
+
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(arrayRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelAndArrayRangeCases);
+	}
+	else if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_2D)
+	{
+		const TestCaseConfig mipLevelRangeCases[] =
+		{
+			//	name					samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level",		0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 0u, 1u } },
+			{ "lod_mip_levels",			4.0f,		{ imageAspectFlags, 0u, 3u, 0u, 1u } },
+		};
+
+		const TestCaseConfig arrayRangeCases[] =
+		{
+			//	name					samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "array_layer_second",		0.0f,			{ imageAspectFlags, 0u, numLevels, 1u, 1u } },
+			{ "array_layer_last",		0.0f,			{ imageAspectFlags, 0u, numLevels, arraySize - 1u, 1u } },
+		};
+
+		const TestCaseConfig mipLevelAndArrayRangeCases[] =
+		{
+			//	name									samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level_array_layer_second",	0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 1u, 1u } },
+			{ "lod_mip_levels_array_layer_second",		4.0f,		{ imageAspectFlags, 0u, 3u, arraySize - 1u, 1u } },
+
+			{ "lod_base_mip_level_array_layer_last",	0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 5u, 1u } },
+			{ "lod_mip_levels_array_layer_last",		4.0f,		{ imageAspectFlags, 0u, 3u, arraySize - 1u, 1u } },
+		};
+
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(arrayRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelAndArrayRangeCases);
+	}
+	else if (viewType == VK_IMAGE_VIEW_TYPE_CUBE)
+	{
+		const TestCaseConfig mipLevelRangeCases[] =
+		{
+			//	name					samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level",		0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 0u, 6u } },
+			{ "lod_mip_levels",			4.0f,		{ imageAspectFlags, 0u, 3u, 0u, 6u } },
+		};
+
+		const TestCaseConfig arrayRangeCases[] =
+		{
+			//	name					samplerLod		subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "array_layer_second",		0.0f,			{ imageAspectFlags, 0u, numLevels, 6u, 6u } },
+			{ "array_layer_last",		0.0f,			{ imageAspectFlags, 0u, numLevels, arraySize - 6u, 6u } },
+		};
+
+		const TestCaseConfig mipLevelAndArrayRangeCases[] =
+		{
+			//	name									samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level_array_layer_second",	0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 6u, 6u } },
+			{ "lod_mip_levels_array_layer_second",		4.0f,		{ imageAspectFlags, 0u, 3u, 6u, 6u } },
+
+			{ "lod_base_mip_level_array_layer_last",	0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, arraySize - 6u, 6u } },
+			{ "lod_mip_levels_array_layer_last",		4.0f,		{ imageAspectFlags, 0u, 3u, arraySize - 6u, 6u } },
+		};
+
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(arrayRangeCases);
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelAndArrayRangeCases);
+	}
+	else if (viewType == VK_IMAGE_VIEW_TYPE_3D)
+	{
+		const TestCaseConfig mipLevelRangeCases[] =
+		{
+			//	name					samplerLod	subresourceRange (aspectMask, baseMipLevel, mipLevels, baseArrayLayer, arraySize)
+			{ "lod_base_mip_level",		0.0f,		{ imageAspectFlags, 2u, numLevels - 2u, 0u, arraySize } },
+			{ "lod_mip_levels",			4.0f,		{ imageAspectFlags, 0u, 3u, 0u, arraySize } },
+		};
+		ADD_SUBRESOURCE_RANGE_TESTS(mipLevelRangeCases);
+	}
+
+#undef ADD_SUBRESOURCE_RANGE_TESTS
+
+	return rangeTests;
+}
+
+static std::vector<VkComponentMapping> getComponentMappingPermutations (const VkComponentMapping& componentMapping)
+{
+	std::vector<VkComponentMapping> mappings;
+
+	const VkComponentSwizzle channelSwizzles[4] = { componentMapping.r, componentMapping.g, componentMapping.b, componentMapping.a };
+
+	// Rearranges the channels by shifting their positions.
+	for (int firstChannelNdx = 0; firstChannelNdx < 4; firstChannelNdx++)
+	{
+		VkComponentSwizzle currentChannel[4];
+
+		for (int channelNdx = 0; channelNdx < 4; channelNdx++)
+			currentChannel[channelNdx] = channelSwizzles[(firstChannelNdx + channelNdx) % 4];
+
+		const VkComponentMapping mappingPermutation  =
+		{
+			currentChannel[0],
+			currentChannel[1],
+			currentChannel[2],
+			currentChannel[3]
+		};
+
+		mappings.push_back(mappingPermutation);
+	}
+
+	return mappings;
+}
+
+static std::string getComponentSwizzleCaseName (VkComponentSwizzle componentSwizzle)
+{
+	const std::string fullName = getComponentSwizzleName(componentSwizzle);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_COMPONENT_SWIZZLE_"));
+
+	return de::toLower(fullName.substr(21));
+}
+
+static std::string getComponentMappingCaseName (const VkComponentMapping& componentMapping)
+{
+	std::ostringstream name;
+
+	name << getComponentSwizzleCaseName(componentMapping.r) << "_"
+		 << getComponentSwizzleCaseName(componentMapping.g) << "_"
+		 << getComponentSwizzleCaseName(componentMapping.b) << "_"
+		 << getComponentSwizzleCaseName(componentMapping.a);
+
+	return name.str();
+}
+
+static de::MovePtr<tcu::TestCaseGroup> createComponentSwizzleTests (tcu::TestContext& testCtx, VkImageViewType viewType, VkFormat imageFormat)
+{
+	deUint32 arraySize = 0;
+
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+		case VK_IMAGE_VIEW_TYPE_2D:
+		case VK_IMAGE_VIEW_TYPE_3D:
+			arraySize = 1;
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			arraySize = 6;
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			arraySize = ImageViewTest::getArraySize(viewType);
+			break;
+
+		default:
+			break;
+	}
+
+	const VkImageSubresourceRange subresourceRange =
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,							// VkImageAspectFlags	aspectMask;
+		0u,													// deUint32				baseMipLevel;
+		(deUint32)ImageViewTest::getNumLevels(viewType),	// deUint32				mipLevels;
+		0u,													// deUint32				baseArrayLayer;
+		arraySize,											// deUint32				arraySize;
+	};
+
+	const std::vector<VkComponentMapping>	componentMappings	= getComponentMappingPermutations(getFormatComponentMapping(imageFormat));
+	de::MovePtr<tcu::TestCaseGroup>			swizzleTests		(new tcu::TestCaseGroup(testCtx, "component_swizzle", ""));
+
+	for (size_t mappingNdx = 0; mappingNdx < componentMappings.size(); mappingNdx++)
+	{
+		swizzleTests->addChild(new ImageViewTest(testCtx,
+												 getComponentMappingCaseName(componentMappings[mappingNdx]).c_str(),
+												 "",
+												 viewType,
+												 imageFormat,
+												 0.0f,
+												 componentMappings[mappingNdx],
+												 subresourceRange));
+	}
+
+	return swizzleTests;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createImageViewTests (tcu::TestContext& testCtx)
+{
+	const struct
+	{
+		VkImageViewType		type;
+		const char*			name;
+	}
+	imageViewTypes[] =
+	{
+		{ VK_IMAGE_VIEW_TYPE_1D,			"1d" },
+		{ VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array" },
+		{ VK_IMAGE_VIEW_TYPE_2D,			"2d" },
+		{ VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array" },
+		{ VK_IMAGE_VIEW_TYPE_3D,			"3d" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE,			"cube" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array" }
+	};
+
+	const VkFormat formats[] =
+	{
+		VK_FORMAT_R4G4_UNORM_PACK8,
+		VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_USCALED,
+		VK_FORMAT_R8_SSCALED,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8_SRGB,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_USCALED,
+		VK_FORMAT_R8G8_SSCALED,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8_SRGB,
+		VK_FORMAT_R8G8B8_UNORM,
+		VK_FORMAT_R8G8B8_SNORM,
+		VK_FORMAT_R8G8B8_USCALED,
+		VK_FORMAT_R8G8B8_SSCALED,
+		VK_FORMAT_R8G8B8_UINT,
+		VK_FORMAT_R8G8B8_SINT,
+		VK_FORMAT_R8G8B8_SRGB,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_USCALED,
+		VK_FORMAT_R8G8B8A8_SSCALED,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+		VK_FORMAT_A2R10G10B10_UINT_PACK32,
+		VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_USCALED,
+		VK_FORMAT_R16_SSCALED,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_USCALED,
+		VK_FORMAT_R16G16_SSCALED,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16_UNORM,
+		VK_FORMAT_R16G16B16_SNORM,
+		VK_FORMAT_R16G16B16_USCALED,
+		VK_FORMAT_R16G16B16_SSCALED,
+		VK_FORMAT_R16G16B16_UINT,
+		VK_FORMAT_R16G16B16_SINT,
+		VK_FORMAT_R16G16B16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_USCALED,
+		VK_FORMAT_R16G16B16A16_SSCALED,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R32G32B32_SINT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+		VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+		VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+		VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+
+		// Compressed formats
+		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+		VK_FORMAT_EAC_R11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11_SNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> imageTests			(new tcu::TestCaseGroup(testCtx, "image_view", "Image tests"));
+	de::MovePtr<tcu::TestCaseGroup> viewTypeTests		(new tcu::TestCaseGroup(testCtx, "view_type", ""));
+
+	for (int viewTypeNdx = 0; viewTypeNdx < DE_LENGTH_OF_ARRAY(imageViewTypes); viewTypeNdx++)
+	{
+		const VkImageViewType			viewType		= imageViewTypes[viewTypeNdx].type;
+		de::MovePtr<tcu::TestCaseGroup>	viewTypeGroup	(new tcu::TestCaseGroup(testCtx, imageViewTypes[viewTypeNdx].name, (std::string("Uses a ") + imageViewTypes[viewTypeNdx].name + " view").c_str()));
+		de::MovePtr<tcu::TestCaseGroup>	formatTests		(new tcu::TestCaseGroup(testCtx, "format", "Uses samplable formats"));
+
+		for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(formats); formatNdx++)
+		{
+			const VkFormat		format		= formats[formatNdx];
+
+			if (isCompressedFormat(format))
+			{
+				// Do not use compressed formats with 1D and 1D array textures.
+				if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY)
+					break;
+			}
+
+			de::MovePtr<tcu::TestCaseGroup>	formatGroup	(new tcu::TestCaseGroup(testCtx,
+																				getFormatCaseName(format).c_str(),
+																				(std::string("Samples a texture of format ") + getFormatName(format)).c_str()));
+
+			de::MovePtr<tcu::TestCaseGroup>	subresourceRangeTests	= createSubresourceRangeTests(testCtx, viewType, format);
+			de::MovePtr<tcu::TestCaseGroup>	componentSwizzleTests	= createComponentSwizzleTests(testCtx, viewType, format);
+
+			formatGroup->addChild(componentSwizzleTests.release());
+			formatGroup->addChild(subresourceRangeTests.release());
+			formatTests->addChild(formatGroup.release());
+		}
+
+		viewTypeGroup->addChild(formatTests.release());
+		viewTypeTests->addChild(viewTypeGroup.release());
+	}
+
+	imageTests->addChild(viewTypeTests.release());
+
+	return imageTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.hpp
new file mode 100644
index 0000000..e3fbe06
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineImageViewTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEIMAGEVIEWTESTS_HPP
+#define _VKTPIPELINEIMAGEVIEWTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image View Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createImageViewTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEIMAGEVIEWTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.cpp
new file mode 100644
index 0000000..45b6f88
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.cpp
@@ -0,0 +1,1648 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Input Assembly Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineInputAssemblyTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deMath.h"
+#include "deMemory.h"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include <algorithm>
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+class InputAssemblyTest : public vkt::TestCase
+{
+public:
+	const static VkPrimitiveTopology	s_primitiveTopologies[];
+	const static deUint32				s_restartIndex32;
+	const static deUint16				s_restartIndex16;
+
+										InputAssemblyTest		(tcu::TestContext&		testContext,
+																 const std::string&		name,
+																 const std::string&		description,
+																 VkPrimitiveTopology	primitiveTopology,
+																 int					primitiveCount,
+																 bool					testPrimitiveRestart,
+																 VkIndexType			indexType);
+	virtual								~InputAssemblyTest		(void) {}
+	virtual void						initPrograms			(SourceCollections& sourceCollections) const;
+	virtual TestInstance*				createInstance			(Context& context) const;
+	static bool							isRestartIndex			(VkIndexType indexType, deUint32 indexValue);
+	static deUint32						getRestartIndex			(VkIndexType indexType);
+
+protected:
+	virtual void						createBufferData		(VkPrimitiveTopology		topology,
+																 int						primitiveCount,
+																 VkIndexType				indexType,
+																 std::vector<deUint32>&		indexData,
+																 std::vector<Vertex4RGBA>&	vertexData) const = 0;
+
+private:
+	VkPrimitiveTopology					m_primitiveTopology;
+	const int							m_primitiveCount;
+	bool								m_testPrimitiveRestart;
+	VkIndexType							m_indexType;
+};
+
+class PrimitiveTopologyTest : public InputAssemblyTest
+{
+public:
+										PrimitiveTopologyTest	(tcu::TestContext&		testContext,
+																 const std::string&		name,
+																 const std::string&		description,
+																 VkPrimitiveTopology	primitiveTopology);
+	virtual								~PrimitiveTopologyTest	(void) {}
+
+protected:
+	virtual void						createBufferData		(VkPrimitiveTopology		topology,
+																 int						primitiveCount,
+																 VkIndexType				indexType,
+																 std::vector<deUint32>&		indexData,
+																 std::vector<Vertex4RGBA>&	vertexData) const;
+
+private:
+};
+
+class PrimitiveRestartTest : public InputAssemblyTest
+{
+public:
+										PrimitiveRestartTest	(tcu::TestContext&		testContext,
+																 const std::string&		name,
+																 const std::string&		description,
+																 VkPrimitiveTopology	primitiveTopology,
+																 VkIndexType			indexType);
+	virtual								~PrimitiveRestartTest	(void) {}
+
+protected:
+	virtual void						createBufferData		(VkPrimitiveTopology		topology,
+																 int						primitiveCount,
+																 VkIndexType				indexType,
+																 std::vector<deUint32>&		indexData,
+																 std::vector<Vertex4RGBA>&	vertexData) const;
+
+private:
+	bool								isRestartPrimitive		(int primitiveIndex) const;
+
+	std::vector<deUint32>				m_restartPrimitives;
+};
+
+class InputAssemblyInstance : public vkt::TestInstance
+{
+public:
+										InputAssemblyInstance	(Context&							context,
+																 VkPrimitiveTopology				primitiveTopology,
+																 bool								testPrimitiveRestart,
+																 VkIndexType						indexType,
+																 const std::vector<deUint32>&		indexBufferData,
+																 const std::vector<Vertex4RGBA>&	vertexBufferData);
+	virtual								~InputAssemblyInstance	(void);
+	virtual tcu::TestStatus				iterate					(void);
+
+private:
+	tcu::TestStatus						verifyImage				(void);
+	void								uploadIndexBufferData16	(deUint16* destPtr, const std::vector<deUint32>& indexBufferData);
+
+	VkPrimitiveTopology					m_primitiveTopology;
+	bool								m_primitiveRestartEnable;
+	VkIndexType							m_indexType;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	std::vector<Vertex4RGBA>			m_vertices;
+	de::MovePtr<Allocation>				m_vertexBufferAlloc;
+
+	Move<VkBuffer>						m_indexBuffer;
+	std::vector<deUint32>				m_indices;
+	de::MovePtr<Allocation>				m_indexBufferAlloc;
+
+	const tcu::UVec2					m_renderSize;
+
+	const VkFormat						m_colorFormat;
+	VkImageCreateInfo					m_colorImageCreateInfo;
+	Move<VkImage>						m_colorImage;
+	de::MovePtr<Allocation>				m_colorImageAlloc;
+	Move<VkImageView>					m_colorAttachmentView;
+	Move<VkRenderPass>					m_renderPass;
+	Move<VkFramebuffer>					m_framebuffer;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkPipelineLayout>				m_pipelineLayout;
+	Move<VkPipeline>					m_graphicsPipeline;
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+
+	Move<VkFence>						m_fence;
+};
+
+
+// InputAssemblyTest
+
+const VkPrimitiveTopology InputAssemblyTest::s_primitiveTopologies[] =
+{
+	VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+	VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+	VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+	VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+	VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+	VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY
+};
+
+const deUint32 InputAssemblyTest::s_restartIndex32	= ~((deUint32)0u);
+const deUint16 InputAssemblyTest::s_restartIndex16	= ~((deUint16)0u);
+
+InputAssemblyTest::InputAssemblyTest (tcu::TestContext&		testContext,
+									  const std::string&	name,
+									  const std::string&	description,
+									  VkPrimitiveTopology	primitiveTopology,
+									  int					primitiveCount,
+									  bool					testPrimitiveRestart,
+									  VkIndexType			indexType)
+
+	: vkt::TestCase				(testContext, name, description)
+	, m_primitiveTopology		(primitiveTopology)
+	, m_primitiveCount			(primitiveCount)
+	, m_testPrimitiveRestart	(testPrimitiveRestart)
+	, m_indexType				(indexType)
+{
+}
+
+TestInstance* InputAssemblyTest::createInstance (Context& context) const
+{
+	std::vector<deUint32>		indexBufferData;
+	std::vector<Vertex4RGBA>	vertexBufferData;
+
+	createBufferData(m_primitiveTopology, m_primitiveCount, m_indexType, indexBufferData, vertexBufferData);
+
+	return new InputAssemblyInstance(context, m_primitiveTopology, m_testPrimitiveRestart, m_indexType, indexBufferData, vertexBufferData);
+}
+
+void InputAssemblyTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream vertexSource;
+
+	vertexSource <<
+		"#version 310 es\n"
+		"layout(location = 0) in vec4 position;\n"
+		"layout(location = 1) in vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	gl_Position = position;\n"
+		<< (m_primitiveTopology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST ? "	gl_PointSize = 3.0;\n"
+																	: "" )
+		<< "	vtxColor = color;\n"
+		"}\n";
+
+	sourceCollections.glslSources.add("color_vert") << glu::VertexSource(vertexSource.str());
+
+	sourceCollections.glslSources.add("color_frag") << glu::FragmentSource(
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n");
+}
+
+bool InputAssemblyTest::isRestartIndex (VkIndexType indexType, deUint32 indexValue)
+{
+	if (indexType == VK_INDEX_TYPE_UINT32)
+		return indexValue == s_restartIndex32;
+	else
+		return indexValue == s_restartIndex16;
+}
+
+deUint32 InputAssemblyTest::getRestartIndex (VkIndexType indexType)
+{
+	if (indexType == VK_INDEX_TYPE_UINT16)
+		return InputAssemblyTest::s_restartIndex16;
+	else
+		return InputAssemblyTest::s_restartIndex32;
+}
+
+
+// PrimitiveTopologyTest
+
+PrimitiveTopologyTest::PrimitiveTopologyTest (tcu::TestContext&		testContext,
+											  const std::string&	name,
+											  const std::string&	description,
+											  VkPrimitiveTopology	primitiveTopology)
+	: InputAssemblyTest	(testContext, name, description, primitiveTopology, 10, false, VK_INDEX_TYPE_UINT32)
+{
+}
+
+void PrimitiveTopologyTest::createBufferData (VkPrimitiveTopology topology, int primitiveCount, VkIndexType indexType, std::vector<deUint32>& indexData, std::vector<Vertex4RGBA>& vertexData) const
+{
+	DE_ASSERT(primitiveCount > 0);
+	DE_UNREF(indexType);
+
+	const tcu::Vec4				red						(1.0f, 0.0f, 0.0f, 1.0f);
+	const tcu::Vec4				green					(0.0f, 1.0f, 0.0f, 1.0f);
+	const float					border					= 0.2f;
+	const float					originX					= -1.0f + border;
+	const float					originY					= -1.0f + border;
+	const Vertex4RGBA			defaultVertex			= { tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f), green };
+	float						primitiveSizeY			= (2.0f - 2.0f * border);
+	float						primitiveSizeX;
+	std::vector<deUint32>		indices;
+	std::vector<Vertex4RGBA>	vertices;
+
+
+	// Calculate primitive size
+	switch (topology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount / 2 + primitiveCount % 2 - 1);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount - 1);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount / 2);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount + primitiveCount / 2 + primitiveCount % 2 - 1);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount / 2 + primitiveCount % 2);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+			primitiveSizeX = 1.0f - border;
+			primitiveSizeY = 1.0f - border;
+			break;
+
+		default:
+			primitiveSizeX = 0.0f; // Garbage
+			DE_ASSERT(false);
+	}
+
+	switch (topology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				const Vertex4RGBA vertex =
+				{
+					tcu::Vec4(originX + float(primitiveNdx / 2) * primitiveSizeX, originY + float(primitiveNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+					red
+				};
+
+				vertices.push_back(vertex);
+				indices.push_back(primitiveNdx);
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				for (int vertexNdx = 0; vertexNdx < 2; vertexNdx++)
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx * 2 + vertexNdx) / 2) * primitiveSizeX, originY + float(vertexNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((primitiveNdx * 2 + vertexNdx));
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (primitiveNdx == 0)
+				{
+					Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX, originY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(0);
+
+					vertex.position = tcu::Vec4(originX, originY + primitiveSizeY, 0.0f, 1.0f);
+					vertices.push_back(vertex);
+					indices.push_back(1);
+				}
+				else
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 1) / 2) * primitiveSizeX, originY + float((primitiveNdx + 1) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx + 1);
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				for (int vertexNdx = 0; vertexNdx < 3; vertexNdx++)
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx * 3 + vertexNdx) / 2) * primitiveSizeX, originY + float((primitiveNdx * 3 + vertexNdx)% 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx * 3 + vertexNdx);
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (primitiveNdx == 0)
+				{
+					for (int vertexNdx = 0; vertexNdx < 3; vertexNdx++)
+					{
+						const Vertex4RGBA vertex =
+						{
+							tcu::Vec4(originX + float(vertexNdx / 2) * primitiveSizeX, originY + float(vertexNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+							red
+						};
+
+						vertices.push_back(vertex);
+						indices.push_back(vertexNdx);
+					}
+				}
+				else
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 2) / 2) * primitiveSizeX, originY + float((primitiveNdx + 2) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx + 2);
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		{
+			const float stepAngle = de::min(DE_PI * 0.5f, (2 * DE_PI) / float(primitiveCount));
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (primitiveNdx == 0)
+				{
+					Vertex4RGBA vertex =
+					{
+						tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(0);
+
+					vertex.position = tcu::Vec4(primitiveSizeX, 0.0f, 0.0f, 1.0f);
+					vertices.push_back(vertex);
+					indices.push_back(1);
+
+					vertex.position = tcu::Vec4(primitiveSizeX * deFloatCos(stepAngle), primitiveSizeY * deFloatSin(stepAngle), 0.0f, 1.0f);
+					vertices.push_back(vertex);
+					indices.push_back(2);
+				}
+				else
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(primitiveSizeX * deFloatCos(stepAngle * float(primitiveNdx + 1)), primitiveSizeY * deFloatSin(stepAngle * float(primitiveNdx + 1)), 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx + 2);
+				}
+			}
+			break;
+		}
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				indices.push_back(0);
+
+				for (int vertexNdx = 0; vertexNdx < 2; vertexNdx++)
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx * 2 + vertexNdx) / 2) * primitiveSizeX, originY + float(vertexNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx * 2 + vertexNdx + 1);
+				}
+
+				indices.push_back(0);
+			}
+			break;
+
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+			indices.push_back(0);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (primitiveNdx == 0)
+				{
+					Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX, originY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(1);
+
+					vertex.position = tcu::Vec4(originX, originY + primitiveSizeY, 0.0f, 1.0f);
+					vertices.push_back(vertex);
+					indices.push_back(2);
+				}
+				else
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 1) / 2) * primitiveSizeX, originY + float((primitiveNdx + 1) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx + 2);
+				}
+			}
+
+			indices.push_back(0);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				for (int vertexNdx = 0; vertexNdx < 3; vertexNdx++)
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx * 3 + vertexNdx) / 2) * primitiveSizeX, originY + float((primitiveNdx * 3 + vertexNdx)% 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx * 3 + vertexNdx + 1);
+					indices.push_back(0);
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (primitiveNdx == 0)
+				{
+					for (int vertexNdx = 0; vertexNdx < 3; vertexNdx++)
+					{
+						const Vertex4RGBA vertex =
+						{
+							tcu::Vec4(originX + float(vertexNdx / 2) * primitiveSizeX, originY + float(vertexNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+							red
+						};
+
+						vertices.push_back(vertex);
+						indices.push_back(vertexNdx + 1);
+						indices.push_back(0);
+					}
+				}
+				else
+				{
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 2) / 2) * primitiveSizeX, originY + float((primitiveNdx + 2) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back(primitiveNdx + 2 + 1);
+					indices.push_back(0);
+				}
+			}
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	vertexData	= vertices;
+	indexData	= indices;
+}
+
+
+// PrimitiveRestartTest
+
+PrimitiveRestartTest::PrimitiveRestartTest (tcu::TestContext&		testContext,
+											const std::string&		name,
+											const std::string&		description,
+											VkPrimitiveTopology		primitiveTopology,
+											VkIndexType				indexType)
+
+	: InputAssemblyTest	(testContext, name, description, primitiveTopology, 10, true, indexType)
+{
+	DE_ASSERT(primitiveTopology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP ||
+			  primitiveTopology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP ||
+			  primitiveTopology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN ||
+			  primitiveTopology == VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY ||
+			  primitiveTopology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY);
+
+	deUint32 restartPrimitives[] = { 1, 5 };
+
+	m_restartPrimitives = std::vector<deUint32>(restartPrimitives, restartPrimitives + sizeof(restartPrimitives) / sizeof(deUint32));
+}
+
+void PrimitiveRestartTest::createBufferData (VkPrimitiveTopology topology, int primitiveCount, VkIndexType indexType, std::vector<deUint32>& indexData, std::vector<Vertex4RGBA>& vertexData) const
+{
+	DE_ASSERT(primitiveCount > 0);
+	DE_UNREF(indexType);
+
+	const tcu::Vec4				red						(1.0f, 0.0f, 0.0f, 1.0f);
+	const tcu::Vec4				green					(0.0f, 1.0f, 0.0f, 1.0f);
+	const float					border					= 0.2f;
+	const float					originX					= -1.0f + border;
+	const float					originY					= -1.0f + border;
+	const Vertex4RGBA			defaultVertex			= { tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f), green };
+	float						primitiveSizeY			= (2.0f - 2.0f * border);
+	float						primitiveSizeX;
+	bool						primitiveStart			= true;
+	std::vector<deUint32>		indices;
+	std::vector<Vertex4RGBA>	vertices;
+
+
+	// Calculate primitive size
+	switch (topology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount / 2);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+			primitiveSizeX = (2.0f - 2.0f * border) / float(primitiveCount / 2 + primitiveCount % 2);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+			primitiveSizeX = 1.0f - border;
+			primitiveSizeY = 1.0f - border;
+			break;
+
+		default:
+			primitiveSizeX = 0.0f; // Garbage
+			DE_ASSERT(false);
+	}
+
+	switch (topology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (isRestartPrimitive(primitiveNdx))
+				{
+					indices.push_back(InputAssemblyTest::getRestartIndex(indexType));
+					primitiveStart = true;
+				}
+				else
+				{
+					if (primitiveStart)
+					{
+						const Vertex4RGBA vertex =
+						{
+							tcu::Vec4(originX + float(primitiveNdx / 2) * primitiveSizeX, originY + float(primitiveNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+							red
+						};
+
+						vertices.push_back(vertex);
+						indices.push_back((deUint32)vertices.size() - 1);
+
+						primitiveStart = false;
+					}
+
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 1) / 2) * primitiveSizeX, originY + float((primitiveNdx + 1) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((deUint32)vertices.size() - 1);
+				}
+			}
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
+		{
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (isRestartPrimitive(primitiveNdx))
+				{
+					indices.push_back(InputAssemblyTest::getRestartIndex(indexType));
+					primitiveStart = true;
+				}
+				else
+				{
+					if (primitiveStart)
+					{
+						for (int vertexNdx = 0; vertexNdx < 2; vertexNdx++)
+						{
+							const Vertex4RGBA vertex =
+							{
+								tcu::Vec4(originX + float((primitiveNdx + vertexNdx) / 2) * primitiveSizeX, originY + float((primitiveNdx + vertexNdx) % 2) * primitiveSizeY, 0.0f, 1.0f),
+								red
+							};
+
+							vertices.push_back(vertex);
+							indices.push_back((deUint32)vertices.size() - 1);
+						}
+
+						primitiveStart = false;
+					}
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 2) / 2) * primitiveSizeX, originY + float((primitiveNdx + 2) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((deUint32)vertices.size() - 1);
+				}
+			}
+			break;
+		}
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
+		{
+			const float stepAngle = de::min(DE_PI * 0.5f, (2 * DE_PI) / float(primitiveCount));
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (isRestartPrimitive(primitiveNdx))
+				{
+					indices.push_back(InputAssemblyTest::getRestartIndex(indexType));
+					primitiveStart = true;
+				}
+				else
+				{
+					if (primitiveStart)
+					{
+						Vertex4RGBA vertex =
+						{
+							tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f),
+							red
+						};
+
+						vertices.push_back(vertex);
+						indices.push_back((deUint32)vertices.size() - 1);
+
+						vertex.position = tcu::Vec4(primitiveSizeX * deFloatCos(stepAngle * float(primitiveNdx)), primitiveSizeY * deFloatSin(stepAngle * float(primitiveNdx)), 0.0f, 1.0f),
+						vertices.push_back(vertex);
+						indices.push_back((deUint32)vertices.size() - 1);
+
+						primitiveStart = false;
+					}
+
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(primitiveSizeX * deFloatCos(stepAngle * float(primitiveNdx + 1)), primitiveSizeY * deFloatSin(stepAngle * float(primitiveNdx + 1)), 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((deUint32)vertices.size() - 1);
+				}
+			}
+			break;
+		}
+
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (isRestartPrimitive(primitiveNdx))
+				{
+					indices.push_back(0);
+					indices.push_back(InputAssemblyTest::getRestartIndex(indexType));
+					primitiveStart = true;
+				}
+				else
+				{
+					if (primitiveStart)
+					{
+						indices.push_back(0);
+
+						const Vertex4RGBA vertex =
+						{
+							tcu::Vec4(originX + float(primitiveNdx / 2) * primitiveSizeX, originY + float(primitiveNdx % 2) * primitiveSizeY, 0.0f, 1.0f),
+							red
+						};
+
+						vertices.push_back(vertex);
+						indices.push_back((deUint32)vertices.size() - 1);
+
+						primitiveStart = false;
+					}
+
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 1) / 2) * primitiveSizeX, originY + float((primitiveNdx + 1) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((deUint32)vertices.size() - 1);
+				}
+			}
+
+			indices.push_back(0);
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+			vertices.push_back(defaultVertex);
+
+			for (int primitiveNdx = 0; primitiveNdx < primitiveCount; primitiveNdx++)
+			{
+				if (isRestartPrimitive(primitiveNdx))
+				{
+					indices.push_back(InputAssemblyTest::getRestartIndex(indexType));
+					primitiveStart = true;
+				}
+				else
+				{
+					if (primitiveStart)
+					{
+						for (int vertexNdx = 0; vertexNdx < 2; vertexNdx++)
+						{
+							const Vertex4RGBA vertex =
+							{
+								tcu::Vec4(originX + float((primitiveNdx + vertexNdx) / 2) * primitiveSizeX, originY + float((primitiveNdx + vertexNdx) % 2) * primitiveSizeY, 0.0f, 1.0f),
+								red
+							};
+
+							vertices.push_back(vertex);
+							indices.push_back((deUint32)vertices.size() - 1);
+							indices.push_back(0);
+						}
+
+						primitiveStart = false;
+					}
+
+					const Vertex4RGBA vertex =
+					{
+						tcu::Vec4(originX + float((primitiveNdx + 2) / 2) * primitiveSizeX, originY + float((primitiveNdx + 2) % 2) * primitiveSizeY, 0.0f, 1.0f),
+						red
+					};
+
+					vertices.push_back(vertex);
+					indices.push_back((deUint32)vertices.size() - 1);
+					indices.push_back(0);
+				}
+			}
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	vertexData	= vertices;
+	indexData	= indices;
+}
+
+bool PrimitiveRestartTest::isRestartPrimitive (int primitiveIndex) const
+{
+	return std::find(m_restartPrimitives.begin(), m_restartPrimitives.end(), primitiveIndex) != m_restartPrimitives.end();
+}
+
+
+// InputAssemblyInstance
+
+InputAssemblyInstance::InputAssemblyInstance (Context&							context,
+											  VkPrimitiveTopology				primitiveTopology,
+											  bool								testPrimitiveRestart,
+											  VkIndexType						indexType,
+											  const std::vector<deUint32>&		indexBufferData,
+											  const std::vector<Vertex4RGBA>&	vertexBufferData)
+
+	: vkt::TestInstance			(context)
+	, m_primitiveTopology		(primitiveTopology)
+	, m_primitiveRestartEnable	(testPrimitiveRestart)
+	, m_indexType				(indexType)
+	, m_vertices				(vertexBufferData)
+	, m_indices					(indexBufferData)
+	, m_renderSize				((primitiveTopology == VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN) ? tcu::UVec2(32, 32) : tcu::UVec2(64, 16))
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+{
+	const DeviceInterface&			vk						= context.getDeviceInterface();
+	const VkDevice					vkDevice				= context.getDevice();
+	const deUint32					queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator					memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping		componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	switch (m_primitiveTopology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:
+			if (!context.getDeviceFeatures().geometryShader)
+				throw tcu::NotSupportedError("Geometry shaders are not supported");
+			break;
+
+		case VK_PRIMITIVE_TOPOLOGY_PATCH_LIST:
+			if (!context.getDeviceFeatures().tessellationShader)
+				throw tcu::NotSupportedError("Tessellation shaders are not supported");
+			break;
+
+		default:
+			break;
+	}
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImageCreateInfo	= colorImageParams;
+		m_colorImage			= createImage(vk, vkDevice, &m_colorImageCreateInfo);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			componentMappingRGBA,								// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u },		// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,													// deUint32						preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			&colorAttachmentDescription,						// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkFramebufferCreateFlags	flags;
+			*m_renderPass,										// VkRenderPass				renderPass;
+			1u,													// deUint32					attachmentCount;
+			&m_colorAttachmentView.get(),						// const VkImageView*		pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32					width;
+			(deUint32)m_renderSize.y(),							// deUint32					height;
+			1u													// deUint32					layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkPipelineLayoutCreateFlags		flags;
+			0u,													// deUint32							setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+			0u,													// deUint32							pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*		pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStageParams[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,										// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,									// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,								// deUint32					binding;
+			sizeof(Vertex4RGBA),			// deUint32					stride;
+			VK_VERTEX_INPUT_RATE_VERTEX		// VkVertexInputRate		inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offset;
+			},
+			{
+				1u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				DE_OFFSET_OF(Vertex4RGBA, color),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType								sType;
+			DE_NULL,														// const void*									pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags		flags;
+			1u,																// deUint32										vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*		pVertexBindingDescriptions;
+			2u,																// deUint32										vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*		pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			m_primitiveTopology,											// VkPrimitiveTopology						topology;
+			m_primitiveRestartEnable										// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor,														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,															// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,											// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,											// VkBlendFactor			srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |			// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f														// float			maxDepthBounds;
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipeline = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex and index buffer
+	{
+		const VkBufferCreateInfo indexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_indices.size() * sizeof(deUint32),		// VkDeviceSize			size;
+			VK_BUFFER_USAGE_INDEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			m_vertices.size() * sizeof(Vertex4RGBA),	// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_indexBuffer		= createBuffer(vk, vkDevice, &indexBufferParams);
+		m_indexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_indexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_indexBuffer, m_indexBufferAlloc->getMemory(), m_indexBufferAlloc->getOffset()));
+
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Load vertices into index buffer
+		if (m_indexType == VK_INDEX_TYPE_UINT32)
+		{
+			deMemcpy(m_indexBufferAlloc->getHostPtr(), m_indices.data(), m_indices.size() * sizeof(deUint32));
+		}
+		else // m_indexType == VK_INDEX_TYPE_UINT16
+		{
+			uploadIndexBufferData16((deUint16*)m_indexBufferAlloc->getHostPtr(), m_indices);
+		}
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+
+		// Flush memory
+		const VkMappedMemoryRange flushMemoryRanges[] =
+		{
+			{
+				VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// VkStructureType	sType;
+				DE_NULL,								// const void*		pNext;
+				m_indexBufferAlloc->getMemory(),		// VkDeviceMemory	memory;
+				m_indexBufferAlloc->getOffset(),		// VkDeviceSize		offset;
+				indexBufferParams.size					// VkDeviceSize		size;
+			},
+			{
+				VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// VkStructureType	sType;
+				DE_NULL,								// const void*		pNext;
+				m_vertexBufferAlloc->getMemory(),		// VkDeviceMemory	memory;
+				m_vertexBufferAlloc->getOffset(),		// VkDeviceSize		offset;
+				vertexBufferParams.size					// VkDeviceSize		size;
+			},
+		};
+
+		vk.flushMappedMemoryRanges(vkDevice, 2, flushMemoryRanges);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,										// const void*					pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags		flags;
+			queueFamilyIndex								// deUint32						queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValue = defaultClearValue(m_colorFormat);
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 } , { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			1u,														// deUint32				clearValueCount;
+			&attachmentClearValue									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		const VkDeviceSize vertexBufferOffset = 0;
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
+		vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+		vk.cmdBindIndexBuffer(*m_cmdBuffer, *m_indexBuffer, 0, m_indexType);
+		vk.cmdDrawIndexed(*m_cmdBuffer, (deUint32)m_indices.size(), 1, 0, 0, 0);
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+InputAssemblyInstance::~InputAssemblyInstance (void)
+{
+}
+
+tcu::TestStatus InputAssemblyInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return verifyImage();
+}
+
+tcu::TestStatus InputAssemblyInstance::verifyImage (void)
+{
+	const tcu::TextureFormat	tcuColorFormat		= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat	tcuStencilFormat	= tcu::TextureFormat();
+	const ColorVertexShader		vertexShader;
+	const ColorFragmentShader	fragmentShader		(tcuColorFormat, tcuStencilFormat);
+	const rr::Program			program				(&vertexShader, &fragmentShader);
+	ReferenceRenderer			refRenderer			(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuStencilFormat, &program);
+	bool						compareOk			= false;
+
+	// Render reference image
+	{
+		const rr::PrimitiveType		topology	= mapVkPrimitiveTopology(m_primitiveTopology);
+		rr::RenderState				renderState	(refRenderer.getViewportState());
+
+		if (m_primitiveTopology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
+			renderState.point.pointSize = 3.0f;
+
+		if (m_primitiveRestartEnable)
+		{
+			std::vector<deUint32> indicesRange;
+
+			for (size_t indexNdx = 0; indexNdx < m_indices.size(); indexNdx++)
+			{
+				const bool isRestart = InputAssemblyTest::isRestartIndex(m_indexType, m_indices[indexNdx]);
+
+				if (!isRestart)
+					indicesRange.push_back(m_indices[indexNdx]);
+
+				if (isRestart || indexNdx == (m_indices.size() - 1))
+				{
+					// Draw the range of indices found so far
+
+					std::vector<Vertex4RGBA> nonIndexedVertices;
+					for (size_t i = 0; i < indicesRange.size(); i++)
+						nonIndexedVertices.push_back(m_vertices[indicesRange[i]]);
+
+					refRenderer.draw(renderState, topology, nonIndexedVertices);
+					indicesRange.clear();
+				}
+			}
+		}
+		else
+		{
+			std::vector<Vertex4RGBA> nonIndexedVertices;
+			for (size_t i = 0; i < m_indices.size(); i++)
+				nonIndexedVertices.push_back(m_vertices[m_indices[i]]);
+
+			refRenderer.draw(renderState, topology, nonIndexedVertices);
+		}
+	}
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&				vk					= m_context.getDeviceInterface();
+		const VkDevice						vkDevice			= m_context.getDevice();
+		const VkQueue						queue				= m_context.getUniversalQueue();
+		const deUint32						queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator						allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::UniquePtr<tcu::TextureLevel>	result				(readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize).release());
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  refRenderer.getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(2, 2, 2, 2),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+void InputAssemblyInstance::uploadIndexBufferData16	(deUint16* destPtr, const std::vector<deUint32>& indexBufferData)
+{
+	for (size_t i = 0; i < indexBufferData.size(); i++)
+	{
+		DE_ASSERT(indexBufferData[i] <= 0xFFFF);
+		destPtr[i] = (deUint16)indexBufferData[i];
+	}
+}
+
+
+// Utilities for test names
+
+std::string getPrimitiveTopologyCaseName (VkPrimitiveTopology topology)
+{
+	const std::string  fullName = getPrimitiveTopologyName(topology);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_PRIMITIVE_TOPOLOGY_"));
+
+	return de::toLower(fullName.substr(22));
+}
+
+de::MovePtr<tcu::TestCaseGroup> createPrimitiveTopologyTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> primitiveTopologyTests (new tcu::TestCaseGroup(testCtx, "primitive_topology", ""));
+
+	for (int topologyNdx = 0; topologyNdx < DE_LENGTH_OF_ARRAY(InputAssemblyTest::s_primitiveTopologies); topologyNdx++)
+	{
+		const VkPrimitiveTopology topology = InputAssemblyTest::s_primitiveTopologies[topologyNdx];
+
+		primitiveTopologyTests->addChild(new PrimitiveTopologyTest(testCtx,
+																   getPrimitiveTopologyCaseName(topology),
+																   "",
+																   topology));
+	}
+
+	return primitiveTopologyTests;
+}
+
+de::MovePtr<tcu::TestCaseGroup> createPrimitiveRestartTests (tcu::TestContext& testCtx)
+{
+	const VkPrimitiveTopology primitiveRestartTopologies[] =
+	{
+		VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+		VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> primitiveRestartTests (new tcu::TestCaseGroup(testCtx, "primitive_restart", "Restarts indices of "));
+
+	de::MovePtr<tcu::TestCaseGroup> indexUint16Tests (new tcu::TestCaseGroup(testCtx, "index_type_uint16", ""));
+	de::MovePtr<tcu::TestCaseGroup> indexUint32Tests (new tcu::TestCaseGroup(testCtx, "index_type_uint32", ""));
+
+	for (int topologyNdx = 0; topologyNdx < DE_LENGTH_OF_ARRAY(primitiveRestartTopologies); topologyNdx++)
+	{
+		const VkPrimitiveTopology topology = primitiveRestartTopologies[topologyNdx];
+
+		indexUint16Tests->addChild(new PrimitiveRestartTest(testCtx,
+															getPrimitiveTopologyCaseName(topology),
+															"",
+															topology,
+															VK_INDEX_TYPE_UINT16));
+
+		indexUint32Tests->addChild(new PrimitiveRestartTest(testCtx,
+															getPrimitiveTopologyCaseName(topology),
+															"",
+															topology,
+															VK_INDEX_TYPE_UINT32));
+	}
+
+	primitiveRestartTests->addChild(indexUint16Tests.release());
+	primitiveRestartTests->addChild(indexUint32Tests.release());
+
+	return primitiveRestartTests;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createInputAssemblyTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>		inputAssemblyTests (new tcu::TestCaseGroup(testCtx, "input_assembly", "Input assembly tests"));
+
+	inputAssemblyTests->addChild(createPrimitiveTopologyTests(testCtx).release());
+	inputAssemblyTests->addChild(createPrimitiveRestartTests(testCtx).release());
+
+	return inputAssemblyTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.hpp
new file mode 100644
index 0000000..894a31b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineInputAssemblyTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEINPUTASSEMBLYTESTS_HPP
+#define _VKTPIPELINEINPUTASSEMBLYTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Input Assembly Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createInputAssemblyTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEINPUTASSEMBLYTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.cpp
new file mode 100644
index 0000000..9acc023
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.cpp
@@ -0,0 +1,2023 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Multisample Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineMultisampleTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+
+#include <sstream>
+#include <vector>
+#include <map>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+enum GeometryType
+{
+	GEOMETRY_TYPE_OPAQUE_TRIANGLE,
+	GEOMETRY_TYPE_OPAQUE_LINE,
+	GEOMETRY_TYPE_OPAQUE_POINT,
+	GEOMETRY_TYPE_OPAQUE_QUAD,
+	GEOMETRY_TYPE_TRANSLUCENT_QUAD,
+	GEOMETRY_TYPE_INVISIBLE_QUAD,
+	GEOMETRY_TYPE_GRADIENT_QUAD
+};
+
+
+bool									isSupportedSampleCount				(const InstanceInterface& instanceInterface, VkPhysicalDevice physicalDevice, VkSampleCountFlagBits rasterizationSamples);
+VkPipelineColorBlendAttachmentState		getDefaultColorBlendAttachmentState	(void);
+deUint32								getUniqueColorsCount				(const tcu::ConstPixelBufferAccess& image);
+void									initMultisamplePrograms				(SourceCollections& sources, GeometryType geometryType);
+
+class MultisampleTest : public vkt::TestCase
+{
+public:
+
+												MultisampleTest						(tcu::TestContext&								testContext,
+																					 const std::string&								name,
+																					 const std::string&								description,
+																					 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																					 const VkPipelineColorBlendAttachmentState&		blendState,
+																					 GeometryType									geometryType);
+	virtual										~MultisampleTest					(void);
+
+	virtual void								initPrograms						(SourceCollections& programCollection) const;
+	virtual TestInstance*						createInstance						(Context& context) const;
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance		(Context&										context,
+																					 VkPrimitiveTopology							topology,
+																					 const std::vector<Vertex4RGBA>&				vertices,
+																					 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																					 const VkPipelineColorBlendAttachmentState&		colorBlendState) const = 0;
+	VkPipelineMultisampleStateCreateInfo		m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+	const GeometryType							m_geometryType;
+	std::vector<VkSampleMask>					m_sampleMask;
+};
+
+class RasterizationSamplesTest : public MultisampleTest
+{
+public:
+												RasterizationSamplesTest			(tcu::TestContext&		testContext,
+																					 const std::string&		name,
+																					 const std::string&		description,
+																					 VkSampleCountFlagBits	rasterizationSamples,
+																					 GeometryType			geometryType);
+	virtual										~RasterizationSamplesTest			(void) {}
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance		(Context&										context,
+																					 VkPrimitiveTopology							topology,
+																					 const std::vector<Vertex4RGBA>&				vertices,
+																					 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																					 const VkPipelineColorBlendAttachmentState&		colorBlendState) const;
+
+	static VkPipelineMultisampleStateCreateInfo	getRasterizationSamplesStateParams	(VkSampleCountFlagBits rasterizationSamples);
+};
+
+class MinSampleShadingTest : public MultisampleTest
+{
+public:
+												MinSampleShadingTest				(tcu::TestContext&		testContext,
+																					 const std::string&		name,
+																					 const std::string&		description,
+																					 VkSampleCountFlagBits	rasterizationSamples,
+																					 float					minSampleShading,
+																					 GeometryType			geometryType);
+	virtual										~MinSampleShadingTest				(void) {}
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance		(Context&										context,
+																					 VkPrimitiveTopology							topology,
+																					 const std::vector<Vertex4RGBA>&				vertices,
+																					 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																					 const VkPipelineColorBlendAttachmentState&		colorBlendState) const;
+
+	static VkPipelineMultisampleStateCreateInfo	getMinSampleShadingStateParams		(VkSampleCountFlagBits rasterizationSamples, float minSampleShading);
+};
+
+class SampleMaskTest : public MultisampleTest
+{
+public:
+												SampleMaskTest						(tcu::TestContext&					testContext,
+																					 const std::string&					name,
+																					 const std::string&					description,
+																					 VkSampleCountFlagBits				rasterizationSamples,
+																					 const std::vector<VkSampleMask>&	sampleMask,
+																					 GeometryType						geometryType);
+
+	virtual										~SampleMaskTest						(void) {}
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance		(Context&										context,
+																					 VkPrimitiveTopology							topology,
+																					 const std::vector<Vertex4RGBA>&				vertices,
+																					 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																					 const VkPipelineColorBlendAttachmentState&		colorBlendState) const;
+
+	static VkPipelineMultisampleStateCreateInfo	getSampleMaskStateParams			(VkSampleCountFlagBits rasterizationSamples, const std::vector<VkSampleMask>& sampleMask);
+};
+
+class AlphaToOneTest : public MultisampleTest
+{
+public:
+												AlphaToOneTest					(tcu::TestContext&					testContext,
+																				 const std::string&					name,
+																				 const std::string&					description,
+																				 VkSampleCountFlagBits				rasterizationSamples);
+
+	virtual										~AlphaToOneTest					(void) {}
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance	(Context&										context,
+																				 VkPrimitiveTopology							topology,
+																				 const std::vector<Vertex4RGBA>&				vertices,
+																				 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																				 const VkPipelineColorBlendAttachmentState&		colorBlendState) const;
+
+	static VkPipelineMultisampleStateCreateInfo	getAlphaToOneStateParams		(VkSampleCountFlagBits rasterizationSamples);
+	static VkPipelineColorBlendAttachmentState	getAlphaToOneBlendState			(void);
+};
+
+class AlphaToCoverageTest : public MultisampleTest
+{
+public:
+												AlphaToCoverageTest				(tcu::TestContext&		testContext,
+																				 const std::string&		name,
+																				 const std::string&		description,
+																				 VkSampleCountFlagBits	rasterizationSamples,
+																				 GeometryType			geometryType);
+
+	virtual										~AlphaToCoverageTest			(void) {}
+
+protected:
+	virtual TestInstance*						createMultisampleTestInstance	(Context&										context,
+																				 VkPrimitiveTopology							topology,
+																				 const std::vector<Vertex4RGBA>&				vertices,
+																				 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																				 const VkPipelineColorBlendAttachmentState&		colorBlendState) const;
+
+	static VkPipelineMultisampleStateCreateInfo	getAlphaToCoverageStateParams	(VkSampleCountFlagBits rasterizationSamples);
+
+	GeometryType								m_geometryType;
+};
+
+class MultisampleRenderer
+{
+public:
+												MultisampleRenderer			(Context&										context,
+																			 VkFormat										colorFormat,
+																			 const tcu::IVec2&								renderSize,
+																			 VkPrimitiveTopology							topology,
+																			 const std::vector<Vertex4RGBA>&				vertices,
+																			 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																			 const VkPipelineColorBlendAttachmentState&		blendState);
+
+	virtual										~MultisampleRenderer		(void);
+
+	de::MovePtr<tcu::TextureLevel>				render						(void);
+
+protected:
+	Context&									m_context;
+
+	const VkFormat								m_colorFormat;
+	tcu::IVec2									m_renderSize;
+
+	const VkPipelineMultisampleStateCreateInfo	m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+
+	Move<VkImage>								m_colorImage;
+	de::MovePtr<Allocation>						m_colorImageAlloc;
+	Move<VkImageView>							m_colorAttachmentView;
+
+	Move<VkImage>								m_resolveImage;
+	de::MovePtr<Allocation>						m_resolveImageAlloc;
+	Move<VkImageView>							m_resolveAttachmentView;
+
+	Move<VkRenderPass>							m_renderPass;
+	Move<VkFramebuffer>							m_framebuffer;
+
+	Move<VkShaderModule>						m_vertexShaderModule;
+	Move<VkShaderModule>						m_fragmentShaderModule;
+
+	Move<VkBuffer>								m_vertexBuffer;
+	de::MovePtr<Allocation>						m_vertexBufferAlloc;
+
+	Move<VkPipelineLayout>						m_pipelineLayout;
+	Move<VkPipeline>							m_graphicsPipeline;
+
+	Move<VkCommandPool>							m_cmdPool;
+	Move<VkCommandBuffer>						m_cmdBuffer;
+
+	Move<VkFence>								m_fence;
+};
+
+class RasterizationSamplesInstance : public vkt::TestInstance
+{
+public:
+									RasterizationSamplesInstance	(Context&										context,
+																	 VkPrimitiveTopology							topology,
+																	 const std::vector<Vertex4RGBA>&				vertices,
+																	 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																	 const VkPipelineColorBlendAttachmentState&		blendState);
+	virtual							~RasterizationSamplesInstance	(void) {}
+
+	virtual tcu::TestStatus			iterate							(void);
+
+protected:
+	virtual tcu::TestStatus			verifyImage						(const tcu::ConstPixelBufferAccess& result);
+
+	const VkFormat					m_colorFormat;
+	const tcu::IVec2				m_renderSize;
+	const VkPrimitiveTopology		m_primitiveTopology;
+	const std::vector<Vertex4RGBA>	m_vertices;
+	MultisampleRenderer				m_multisampleRenderer;
+};
+
+class MinSampleShadingInstance : public vkt::TestInstance
+{
+public:
+												MinSampleShadingInstance	(Context&										context,
+																			 VkPrimitiveTopology							topology,
+																			 const std::vector<Vertex4RGBA>&				vertices,
+																			 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																			 const VkPipelineColorBlendAttachmentState&		blendState);
+	virtual										~MinSampleShadingInstance	(void) {}
+
+	virtual tcu::TestStatus						iterate						(void);
+
+protected:
+	virtual tcu::TestStatus						verifyImage					(const tcu::ConstPixelBufferAccess& testShadingImage,
+																			 const tcu::ConstPixelBufferAccess& minShadingImage,
+																			 const tcu::ConstPixelBufferAccess& maxShadingImage);
+	const VkFormat								m_colorFormat;
+	const tcu::IVec2							m_renderSize;
+	const VkPrimitiveTopology					m_primitiveTopology;
+	const std::vector<Vertex4RGBA>				m_vertices;
+	const VkPipelineMultisampleStateCreateInfo	m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+};
+
+class SampleMaskInstance : public vkt::TestInstance
+{
+public:
+												SampleMaskInstance			(Context&										context,
+																			 VkPrimitiveTopology							topology,
+																			 const std::vector<Vertex4RGBA>&				vertices,
+																			 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																			 const VkPipelineColorBlendAttachmentState&		blendState);
+	virtual										~SampleMaskInstance			(void) {}
+
+	virtual tcu::TestStatus						iterate						(void);
+
+protected:
+	virtual tcu::TestStatus						verifyImage					(const tcu::ConstPixelBufferAccess& testShadingImage,
+																			 const tcu::ConstPixelBufferAccess& minShadingImage,
+																			 const tcu::ConstPixelBufferAccess& maxShadingImage);
+	const VkFormat								m_colorFormat;
+	const tcu::IVec2							m_renderSize;
+	const VkPrimitiveTopology					m_primitiveTopology;
+	const std::vector<Vertex4RGBA>				m_vertices;
+	const VkPipelineMultisampleStateCreateInfo	m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+};
+
+class AlphaToOneInstance : public vkt::TestInstance
+{
+public:
+												AlphaToOneInstance			(Context&										context,
+																			 VkPrimitiveTopology							topology,
+																			 const std::vector<Vertex4RGBA>&				vertices,
+																			 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																			 const VkPipelineColorBlendAttachmentState&		blendState);
+	virtual										~AlphaToOneInstance			(void) {}
+
+	virtual tcu::TestStatus						iterate						(void);
+
+protected:
+	virtual tcu::TestStatus						verifyImage					(const tcu::ConstPixelBufferAccess& alphaOneImage,
+																			 const tcu::ConstPixelBufferAccess& noAlphaOneImage);
+	const VkFormat								m_colorFormat;
+	const tcu::IVec2							m_renderSize;
+	const VkPrimitiveTopology					m_primitiveTopology;
+	const std::vector<Vertex4RGBA>				m_vertices;
+	const VkPipelineMultisampleStateCreateInfo	m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+};
+
+class AlphaToCoverageInstance : public vkt::TestInstance
+{
+public:
+												AlphaToCoverageInstance		(Context&										context,
+																			 VkPrimitiveTopology							topology,
+																			 const std::vector<Vertex4RGBA>&				vertices,
+																			 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																			 const VkPipelineColorBlendAttachmentState&		blendState,
+																			 GeometryType									geometryType);
+	virtual										~AlphaToCoverageInstance	(void) {}
+
+	virtual tcu::TestStatus						iterate						(void);
+
+protected:
+	virtual tcu::TestStatus						verifyImage					(const tcu::ConstPixelBufferAccess& result);
+	const VkFormat								m_colorFormat;
+	const tcu::IVec2							m_renderSize;
+	const VkPrimitiveTopology					m_primitiveTopology;
+	const std::vector<Vertex4RGBA>				m_vertices;
+	const VkPipelineMultisampleStateCreateInfo	m_multisampleStateParams;
+	const VkPipelineColorBlendAttachmentState	m_colorBlendState;
+	const GeometryType							m_geometryType;
+};
+
+
+// Helper functions
+
+void initMultisamplePrograms (SourceCollections& sources, GeometryType geometryType)
+{
+	std::ostringstream vertexSource;
+
+	vertexSource <<
+		"#version 310 es\n"
+		"layout(location = 0) in vec4 position;\n"
+		"layout(location = 1) in vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	gl_Position = position;\n"
+		"	vtxColor = color;\n"
+		<< (geometryType == GEOMETRY_TYPE_OPAQUE_POINT ? "	gl_PointSize = 3.0f;\n"
+														 : "" )
+		<< "}\n";
+
+	static const char* fragmentSource =
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n";
+
+	sources.glslSources.add("color_vert") << glu::VertexSource(vertexSource.str());
+	sources.glslSources.add("color_frag") << glu::FragmentSource(fragmentSource);
+}
+
+bool isSupportedSampleCount (const InstanceInterface& instanceInterface, VkPhysicalDevice physicalDevice, VkSampleCountFlagBits rasterizationSamples)
+{
+	VkPhysicalDeviceProperties deviceProperties;
+
+	instanceInterface.getPhysicalDeviceProperties(physicalDevice, &deviceProperties);
+
+	return !!(deviceProperties.limits.framebufferColorSampleCounts & rasterizationSamples);
+}
+
+VkPipelineColorBlendAttachmentState getDefaultColorBlendAttachmentState (void)
+{
+	const VkPipelineColorBlendAttachmentState colorBlendState =
+	{
+		false,														// VkBool32					blendEnable;
+		VK_BLEND_FACTOR_ONE,										// VkBlendFactor			srcColorBlendFactor;
+		VK_BLEND_FACTOR_ZERO,										// VkBlendFactor			dstColorBlendFactor;
+		VK_BLEND_OP_ADD,											// VkBlendOp				colorBlendOp;
+		VK_BLEND_FACTOR_ONE,										// VkBlendFactor			srcAlphaBlendFactor;
+		VK_BLEND_FACTOR_ZERO,										// VkBlendFactor			dstAlphaBlendFactor;
+		VK_BLEND_OP_ADD,											// VkBlendOp				alphaBlendOp;
+		VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |		// VkColorComponentFlags	colorWriteMask;
+			VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+	};
+
+	return colorBlendState;
+}
+
+deUint32 getUniqueColorsCount (const tcu::ConstPixelBufferAccess& image)
+{
+	DE_ASSERT(image.getFormat().getPixelSize() == 4);
+
+	std::map<deUint32, deUint32>	histogram; // map<pixel value, number of occurrences>
+	const deUint32					pixelCount	= image.getWidth() * image.getHeight() * image.getDepth();
+
+	for (deUint32 pixelNdx = 0; pixelNdx < pixelCount; pixelNdx++)
+	{
+		const deUint32 pixelValue = *((const deUint32*)image.getDataPtr() + pixelNdx);
+
+		if (histogram.find(pixelValue) != histogram.end())
+			histogram[pixelValue]++;
+		else
+			histogram[pixelValue] = 1;
+	}
+
+	return (deUint32)histogram.size();
+}
+
+
+// MultisampleTest
+
+MultisampleTest::MultisampleTest (tcu::TestContext&								testContext,
+								  const std::string&							name,
+								  const std::string&							description,
+								  const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+								  const VkPipelineColorBlendAttachmentState&	blendState,
+								  GeometryType									geometryType)
+	: vkt::TestCase				(testContext, name, description)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(blendState)
+	, m_geometryType			(geometryType)
+{
+	if (m_multisampleStateParams.pSampleMask)
+	{
+		// Copy pSampleMask to avoid dependencies with other classes
+
+		const deUint32 maskCount = deCeilFloatToInt32(float(m_multisampleStateParams.rasterizationSamples) / 32);
+
+		for (deUint32 maskNdx = 0; maskNdx < maskCount; maskNdx++)
+			m_sampleMask.push_back(m_multisampleStateParams.pSampleMask[maskNdx]);
+
+		m_multisampleStateParams.pSampleMask = m_sampleMask.data();
+	}
+}
+
+MultisampleTest::~MultisampleTest (void)
+{
+}
+
+void MultisampleTest::initPrograms (SourceCollections& programCollection) const
+{
+	initMultisamplePrograms(programCollection, m_geometryType);
+}
+
+TestInstance* MultisampleTest::createInstance (Context& context) const
+{
+	VkPrimitiveTopology			topology;
+	std::vector<Vertex4RGBA>	vertices;
+
+	switch (m_geometryType)
+	{
+		case GEOMETRY_TYPE_OPAQUE_TRIANGLE:
+		{
+			topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+			const Vertex4RGBA vertexData[3] =
+			{
+				{
+					tcu::Vec4(-0.75f, 0.0f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(0.75f, 0.125f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(0.75f, -0.125f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				}
+			};
+
+			vertices = std::vector<Vertex4RGBA>(vertexData, vertexData + 3);
+			break;
+		}
+
+		case GEOMETRY_TYPE_OPAQUE_LINE:
+		{
+			topology = VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
+
+			const Vertex4RGBA vertexData[2] =
+			{
+				{
+					tcu::Vec4(-0.75f, 0.25f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(0.75f, -0.25f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				}
+			};
+
+			vertices = std::vector<Vertex4RGBA>(vertexData, vertexData + 2);
+			break;
+		}
+
+		case GEOMETRY_TYPE_OPAQUE_POINT:
+		{
+			topology = VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+
+			const Vertex4RGBA vertex =
+			{
+				tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f),
+				tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+			};
+
+			vertices = std::vector<Vertex4RGBA>(1, vertex);
+			break;
+		}
+
+		case GEOMETRY_TYPE_OPAQUE_QUAD:
+		case GEOMETRY_TYPE_TRANSLUCENT_QUAD:
+		case GEOMETRY_TYPE_INVISIBLE_QUAD:
+		case GEOMETRY_TYPE_GRADIENT_QUAD:
+		{
+			topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
+
+			Vertex4RGBA vertexData[4] =
+			{
+				{
+					tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(1.0f, -1.0f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				},
+				{
+					tcu::Vec4(1.0f, 1.0f, 0.0f, 1.0f),
+					tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+				}
+			};
+
+			if (m_geometryType == GEOMETRY_TYPE_TRANSLUCENT_QUAD)
+			{
+				for (int i = 0; i < 4; i++)
+					vertexData[i].color.w() = 0.25f;
+			}
+			else if (m_geometryType == GEOMETRY_TYPE_INVISIBLE_QUAD)
+			{
+				for (int i = 0; i < 4; i++)
+					vertexData[i].color.w() = 0.0f;
+			}
+			else if (m_geometryType == GEOMETRY_TYPE_GRADIENT_QUAD)
+			{
+				vertexData[0].color.w() = 0.0f;
+				vertexData[2].color.w() = 0.0f;
+			}
+
+			vertices = std::vector<Vertex4RGBA>(vertexData, vertexData + 4);
+			break;
+		}
+
+		default:
+			topology = VK_PRIMITIVE_TOPOLOGY_LAST;
+			DE_ASSERT(false);
+	}
+
+	return createMultisampleTestInstance(context, topology, vertices, m_multisampleStateParams, m_colorBlendState);
+}
+
+
+// RasterizationSamplesTest
+
+RasterizationSamplesTest::RasterizationSamplesTest (tcu::TestContext&		testContext,
+													const std::string&		name,
+													const std::string&		description,
+													VkSampleCountFlagBits	rasterizationSamples,
+													GeometryType			geometryType)
+	: MultisampleTest	(testContext, name, description, getRasterizationSamplesStateParams(rasterizationSamples), getDefaultColorBlendAttachmentState(), geometryType)
+{
+}
+
+VkPipelineMultisampleStateCreateInfo RasterizationSamplesTest::getRasterizationSamplesStateParams (VkSampleCountFlagBits rasterizationSamples)
+{
+	const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		rasterizationSamples,										// VkSampleCountFlagBits					rasterizationSamples;
+		false,														// VkBool32									sampleShadingEnable;
+		0.0f,														// float									minSampleShading;
+		DE_NULL,													// const VkSampleMask*						pSampleMask;
+		false,														// VkBool32									alphaToCoverageEnable;
+		false														// VkBool32									alphaToOneEnable;
+	};
+
+	return multisampleStateParams;
+}
+
+TestInstance* RasterizationSamplesTest::createMultisampleTestInstance (Context&										context,
+																	   VkPrimitiveTopology							topology,
+																	   const std::vector<Vertex4RGBA>&				vertices,
+																	   const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																	   const VkPipelineColorBlendAttachmentState&	colorBlendState) const
+{
+	return new RasterizationSamplesInstance(context, topology, vertices, multisampleStateParams, colorBlendState);
+}
+
+
+// MinSampleShadingTest
+
+MinSampleShadingTest::MinSampleShadingTest (tcu::TestContext&		testContext,
+											const std::string&		name,
+											const std::string&		description,
+											VkSampleCountFlagBits	rasterizationSamples,
+											float					minSampleShading,
+											GeometryType			geometryType)
+	: MultisampleTest	(testContext, name, description, getMinSampleShadingStateParams(rasterizationSamples, minSampleShading), getDefaultColorBlendAttachmentState(), geometryType)
+{
+}
+
+TestInstance* MinSampleShadingTest::createMultisampleTestInstance (Context&										context,
+																   VkPrimitiveTopology							topology,
+																   const std::vector<Vertex4RGBA>&				vertices,
+																   const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																   const VkPipelineColorBlendAttachmentState&	colorBlendState) const
+{
+	return new MinSampleShadingInstance(context, topology, vertices, multisampleStateParams, colorBlendState);
+}
+
+VkPipelineMultisampleStateCreateInfo MinSampleShadingTest::getMinSampleShadingStateParams (VkSampleCountFlagBits rasterizationSamples, float minSampleShading)
+{
+	const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		rasterizationSamples,										// VkSampleCountFlagBits					rasterizationSamples;
+		true,														// VkBool32									sampleShadingEnable;
+		minSampleShading,											// float									minSampleShading;
+		DE_NULL,													// const VkSampleMask*						pSampleMask;
+		false,														//  VkBool32								alphaToCoverageEnable;
+		false														//  VkBool32								alphaToOneEnable;
+	};
+
+	return multisampleStateParams;
+}
+
+
+// SampleMaskTest
+
+SampleMaskTest::SampleMaskTest (tcu::TestContext&					testContext,
+								const std::string&					name,
+								const std::string&					description,
+								VkSampleCountFlagBits				rasterizationSamples,
+								const std::vector<VkSampleMask>&	sampleMask,
+								GeometryType						geometryType)
+	: MultisampleTest	(testContext, name, description, getSampleMaskStateParams(rasterizationSamples, sampleMask), getDefaultColorBlendAttachmentState(), geometryType)
+{
+}
+
+TestInstance* SampleMaskTest::createMultisampleTestInstance (Context&										context,
+															 VkPrimitiveTopology							topology,
+															 const std::vector<Vertex4RGBA>&				vertices,
+															 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+															 const VkPipelineColorBlendAttachmentState&		colorBlendState) const
+{
+	return new SampleMaskInstance(context, topology,vertices, multisampleStateParams, colorBlendState);
+}
+
+VkPipelineMultisampleStateCreateInfo SampleMaskTest::getSampleMaskStateParams (VkSampleCountFlagBits rasterizationSamples, const std::vector<VkSampleMask>& sampleMask)
+{
+	const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		rasterizationSamples,										// VkSampleCountFlagBits					rasterizationSamples;
+		false,														// VkBool32									sampleShadingEnable;
+		0.0f,														// float									minSampleShading;
+		sampleMask.data(),											// const VkSampleMask*						pSampleMask;
+		false,														// VkBool32									alphaToCoverageEnable;
+		false														// VkBool32									alphaToOneEnable;
+	};
+
+	return multisampleStateParams;
+}
+
+
+// AlphaToOneTest
+
+AlphaToOneTest::AlphaToOneTest (tcu::TestContext&		testContext,
+								const std::string&		name,
+								const std::string&		description,
+								VkSampleCountFlagBits	rasterizationSamples)
+	: MultisampleTest	(testContext, name, description, getAlphaToOneStateParams(rasterizationSamples), getAlphaToOneBlendState(), GEOMETRY_TYPE_GRADIENT_QUAD)
+{
+}
+
+TestInstance* AlphaToOneTest::createMultisampleTestInstance (Context&										context,
+															 VkPrimitiveTopology							topology,
+															 const std::vector<Vertex4RGBA>&				vertices,
+															 const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+															 const VkPipelineColorBlendAttachmentState&		colorBlendState) const
+{
+	return new AlphaToOneInstance(context, topology, vertices, multisampleStateParams, colorBlendState);
+}
+
+VkPipelineMultisampleStateCreateInfo AlphaToOneTest::getAlphaToOneStateParams (VkSampleCountFlagBits rasterizationSamples)
+{
+	const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		rasterizationSamples,										// VkSampleCountFlagBits					rasterizationSamples;
+		false,														// VkBool32									sampleShadingEnable;
+		0.0f,														// float									minSampleShading;
+		DE_NULL,													// const VkSampleMask*						pSampleMask;
+		false,														// VkBool32									alphaToCoverageEnable;
+		true														// VkBool32									alphaToOneEnable;
+	};
+
+	return multisampleStateParams;
+}
+
+VkPipelineColorBlendAttachmentState AlphaToOneTest::getAlphaToOneBlendState (void)
+{
+	const VkPipelineColorBlendAttachmentState colorBlendState =
+	{
+		true,														// VkBool32					blendEnable;
+		VK_BLEND_FACTOR_SRC_ALPHA,									// VkBlendFactor			srcColorBlendFactor;
+		VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,						// VkBlendFactor			dstColorBlendFactor;
+		VK_BLEND_OP_ADD,											// VkBlendOp				colorBlendOp;
+		VK_BLEND_FACTOR_SRC_ALPHA,									// VkBlendFactor			srcAlphaBlendFactor;
+		VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,						// VkBlendFactor			dstAlphaBlendFactor;
+		VK_BLEND_OP_ADD,											// VkBlendOp				alphaBlendOp;
+		VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |		// VkColorComponentFlags	colorWriteMask;
+			VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+	};
+
+	return colorBlendState;
+}
+
+
+// AlphaToCoverageTest
+
+AlphaToCoverageTest::AlphaToCoverageTest (tcu::TestContext&			testContext,
+										  const std::string&		name,
+										  const std::string&		description,
+										  VkSampleCountFlagBits		rasterizationSamples,
+										  GeometryType				geometryType)
+	: MultisampleTest	(testContext, name, description, getAlphaToCoverageStateParams(rasterizationSamples), getDefaultColorBlendAttachmentState(), geometryType)
+	, m_geometryType	(geometryType)
+{
+}
+
+TestInstance* AlphaToCoverageTest::createMultisampleTestInstance (Context&										context,
+																  VkPrimitiveTopology							topology,
+																  const std::vector<Vertex4RGBA>&				vertices,
+																  const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+																  const VkPipelineColorBlendAttachmentState&	colorBlendState) const
+{
+	return new AlphaToCoverageInstance(context, topology, vertices, multisampleStateParams, colorBlendState, m_geometryType);
+}
+
+VkPipelineMultisampleStateCreateInfo AlphaToCoverageTest::getAlphaToCoverageStateParams (VkSampleCountFlagBits rasterizationSamples)
+{
+	const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		rasterizationSamples,										// VkSampleCountFlagBits					rasterizationSamples;
+		false,														// VkBool32									sampleShadingEnable;
+		0.0f,														// float									minSampleShading;
+		DE_NULL,													// const VkSampleMask*						pSampleMask;
+		true,														// VkBool32									alphaToCoverageEnable;
+		false														// VkBool32									alphaToOneEnable;
+	};
+
+	return multisampleStateParams;
+}
+
+// RasterizationSamplesInstance
+
+RasterizationSamplesInstance::RasterizationSamplesInstance (Context&										context,
+															VkPrimitiveTopology								topology,
+															const std::vector<Vertex4RGBA>&					vertices,
+															const VkPipelineMultisampleStateCreateInfo&		multisampleStateParams,
+															const VkPipelineColorBlendAttachmentState&		blendState)
+	: vkt::TestInstance		(context)
+	, m_colorFormat			(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_renderSize			(32, 32)
+	, m_primitiveTopology	(topology)
+	, m_vertices			(vertices)
+	, m_multisampleRenderer	(context, m_colorFormat, m_renderSize, topology, vertices, multisampleStateParams, blendState)
+{
+}
+
+tcu::TestStatus RasterizationSamplesInstance::iterate (void)
+{
+	de::MovePtr<tcu::TextureLevel> level(m_multisampleRenderer.render());
+	return verifyImage(level->getAccess());
+}
+
+tcu::TestStatus RasterizationSamplesInstance::verifyImage (const tcu::ConstPixelBufferAccess& result)
+{
+	// Verify range of unique pixels
+	{
+		const deUint32	numUniqueColors = getUniqueColorsCount(result);
+		const deUint32	minUniqueColors	= 3;
+
+		tcu::TestLog& log = m_context.getTestContext().getLog();
+
+		log << tcu::TestLog::Message
+			<< "\nMin. unique colors expected: " << minUniqueColors << "\n"
+			<< "Unique colors found: " << numUniqueColors << "\n"
+			<< tcu::TestLog::EndMessage;
+
+		if (numUniqueColors < minUniqueColors)
+			return tcu::TestStatus::fail("Unique colors out of expected bounds");
+	}
+
+	// Verify shape of the rendered primitive (fuzzy-compare)
+	{
+		const tcu::TextureFormat	tcuColorFormat	= mapVkFormat(m_colorFormat);
+		const tcu::TextureFormat	tcuDepthFormat	= tcu::TextureFormat();
+		const ColorVertexShader		vertexShader;
+		const ColorFragmentShader	fragmentShader	(tcuColorFormat, tcuDepthFormat);
+		const rr::Program			program			(&vertexShader, &fragmentShader);
+		ReferenceRenderer			refRenderer		(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuDepthFormat, &program);
+		rr::RenderState				renderState		(refRenderer.getViewportState());
+
+		if (m_primitiveTopology == VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
+		{
+			VkPhysicalDeviceProperties deviceProperties;
+
+			m_context.getInstanceInterface().getPhysicalDeviceProperties(m_context.getPhysicalDevice(), &deviceProperties);
+
+			// gl_PointSize is clamped to pointSizeRange
+			renderState.point.pointSize = deFloatMin(3.0f, deviceProperties.limits.pointSizeRange[1]);
+		}
+
+		refRenderer.colorClear(tcu::Vec4(0.0f));
+		refRenderer.draw(renderState, mapVkPrimitiveTopology(m_primitiveTopology), m_vertices);
+
+		if (!tcu::fuzzyCompare(m_context.getTestContext().getLog(), "FuzzyImageCompare", "Image comparison", refRenderer.getAccess(), result, 0.05f, tcu::COMPARE_LOG_RESULT))
+			return tcu::TestStatus::fail("Primitive has unexpected shape");
+
+	}
+
+	return tcu::TestStatus::pass("Primitive rendered, unique colors within expected bounds");
+}
+
+
+// MinSampleShadingInstance
+
+MinSampleShadingInstance::MinSampleShadingInstance (Context&									context,
+													VkPrimitiveTopology							topology,
+													const std::vector<Vertex4RGBA>&				vertices,
+													const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+													const VkPipelineColorBlendAttachmentState&	colorBlendState)
+	: vkt::TestInstance			(context)
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_renderSize				(32, 32)
+	, m_primitiveTopology		(topology)
+	, m_vertices				(vertices)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(colorBlendState)
+{
+	VkPhysicalDeviceFeatures deviceFeatures;
+
+	m_context.getInstanceInterface().getPhysicalDeviceFeatures(m_context.getPhysicalDevice(), &deviceFeatures);
+
+	if (!deviceFeatures.sampleRateShading)
+		throw tcu::NotSupportedError("Sample shading is not supported");
+}
+
+tcu::TestStatus MinSampleShadingInstance::iterate (void)
+{
+	de::MovePtr<tcu::TextureLevel>				testShadingImage;
+	de::MovePtr<tcu::TextureLevel>				minShadingImage;
+	de::MovePtr<tcu::TextureLevel>				maxShadingImage;
+
+	// Render with test minSampleShading
+	{
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, m_multisampleStateParams, m_colorBlendState);
+		testShadingImage = renderer.render();
+	}
+
+	// Render with minSampleShading = 0.0f
+	{
+		VkPipelineMultisampleStateCreateInfo	multisampleParams	= m_multisampleStateParams;
+		multisampleParams.minSampleShading = 0.0f;
+
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, multisampleParams, m_colorBlendState);
+		minShadingImage = renderer.render();
+	}
+
+	// Render with minSampleShading = 1.0f
+	{
+		VkPipelineMultisampleStateCreateInfo	multisampleParams	= m_multisampleStateParams;
+		multisampleParams.minSampleShading = 1.0f;
+
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, multisampleParams, m_colorBlendState);
+		maxShadingImage = renderer.render();
+	}
+
+	return verifyImage(testShadingImage->getAccess(), minShadingImage->getAccess(), maxShadingImage->getAccess());
+}
+
+tcu::TestStatus MinSampleShadingInstance::verifyImage (const tcu::ConstPixelBufferAccess& testShadingImage, const tcu::ConstPixelBufferAccess& minShadingImage, const tcu::ConstPixelBufferAccess& maxShadingImage)
+{
+	const deUint32	testColorCount	= getUniqueColorsCount(testShadingImage);
+	const deUint32	minColorCount	= getUniqueColorsCount(minShadingImage);
+	const deUint32	maxColorCount	= getUniqueColorsCount(maxShadingImage);
+
+	tcu::TestLog& log = m_context.getTestContext().getLog();
+
+	log << tcu::TestLog::Message
+		<< "\nColors found: " << testColorCount << "\n"
+		<< "Min. colors expected: " << minColorCount << "\n"
+		<< "Max. colors expected: " << maxColorCount << "\n"
+		<< tcu::TestLog::EndMessage;
+
+	if (minColorCount > testColorCount || testColorCount > maxColorCount)
+		return tcu::TestStatus::fail("Unique colors out of expected bounds");
+	else
+		return tcu::TestStatus::pass("Unique colors within expected bounds");
+}
+
+SampleMaskInstance::SampleMaskInstance (Context&										context,
+										VkPrimitiveTopology								topology,
+										const std::vector<Vertex4RGBA>&					vertices,
+										const VkPipelineMultisampleStateCreateInfo&		multisampleStateParams,
+										const VkPipelineColorBlendAttachmentState&		blendState)
+	: vkt::TestInstance			(context)
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_renderSize				(32, 32)
+	, m_primitiveTopology		(topology)
+	, m_vertices				(vertices)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(blendState)
+{
+}
+
+tcu::TestStatus SampleMaskInstance::iterate (void)
+{
+	de::MovePtr<tcu::TextureLevel>				testSampleMaskImage;
+	de::MovePtr<tcu::TextureLevel>				minSampleMaskImage;
+	de::MovePtr<tcu::TextureLevel>				maxSampleMaskImage;
+
+	// Render with test flags
+	{
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, m_multisampleStateParams, m_colorBlendState);
+		testSampleMaskImage = renderer.render();
+	}
+
+	// Render with all flags off
+	{
+		VkPipelineMultisampleStateCreateInfo	multisampleParams	= m_multisampleStateParams;
+		const std::vector<VkSampleMask>			sampleMask			(multisampleParams.rasterizationSamples / 32, (VkSampleMask)0);
+
+		multisampleParams.pSampleMask = sampleMask.data();
+
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, multisampleParams, m_colorBlendState);
+		minSampleMaskImage = renderer.render();
+	}
+
+	// Render with all flags on
+	{
+		VkPipelineMultisampleStateCreateInfo	multisampleParams	= m_multisampleStateParams;
+		const std::vector<VkSampleMask>			sampleMask			(multisampleParams.rasterizationSamples / 32, ~((VkSampleMask)0));
+
+		multisampleParams.pSampleMask = sampleMask.data();
+
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, multisampleParams, m_colorBlendState);
+		maxSampleMaskImage = renderer.render();
+	}
+
+	return verifyImage(testSampleMaskImage->getAccess(), minSampleMaskImage->getAccess(), maxSampleMaskImage->getAccess());
+}
+
+tcu::TestStatus SampleMaskInstance::verifyImage (const tcu::ConstPixelBufferAccess& testSampleMaskImage,
+												 const tcu::ConstPixelBufferAccess& minSampleMaskImage,
+												 const tcu::ConstPixelBufferAccess& maxSampleMaskImage)
+{
+	const deUint32	testColorCount	= getUniqueColorsCount(testSampleMaskImage);
+	const deUint32	minColorCount	= getUniqueColorsCount(minSampleMaskImage);
+	const deUint32	maxColorCount	= getUniqueColorsCount(maxSampleMaskImage);
+
+	tcu::TestLog& log = m_context.getTestContext().getLog();
+
+	log << tcu::TestLog::Message
+		<< "\nColors found: " << testColorCount << "\n"
+		<< "Min. colors expected: " << minColorCount << "\n"
+		<< "Max. colors expected: " << maxColorCount << "\n"
+		<< tcu::TestLog::EndMessage;
+
+	if (minColorCount > testColorCount || testColorCount > maxColorCount)
+		return tcu::TestStatus::fail("Unique colors out of expected bounds");
+	else
+		return tcu::TestStatus::pass("Unique colors within expected bounds");
+}
+
+tcu::TestStatus testRasterSamplesConsistency (Context& context, GeometryType geometryType)
+{
+	// Use triangle only.
+	DE_UNREF(geometryType);
+
+	const VkSampleCountFlagBits samples[] =
+	{
+		VK_SAMPLE_COUNT_1_BIT,
+		VK_SAMPLE_COUNT_2_BIT,
+		VK_SAMPLE_COUNT_4_BIT,
+		VK_SAMPLE_COUNT_8_BIT,
+		VK_SAMPLE_COUNT_16_BIT,
+		VK_SAMPLE_COUNT_32_BIT,
+		VK_SAMPLE_COUNT_64_BIT
+	};
+
+	const Vertex4RGBA vertexData[3] =
+	{
+		{
+			tcu::Vec4(-0.75f, 0.0f, 0.0f, 1.0f),
+			tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+		},
+		{
+			tcu::Vec4(0.75f, 0.125f, 0.0f, 1.0f),
+			tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+		},
+		{
+			tcu::Vec4(0.75f, -0.125f, 0.0f, 1.0f),
+			tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f)
+		}
+	};
+
+	const std::vector<Vertex4RGBA>	vertices			(vertexData, vertexData + 3);
+	deUint32						prevUniqueColors	= 2;
+	int								renderCount			= 0;
+
+	// Do not render with 1 sample (start with samplesNdx = 1).
+	for (int samplesNdx = 1; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+	{
+		if (!isSupportedSampleCount(context.getInstanceInterface(), context.getPhysicalDevice(), samples[samplesNdx]))
+			continue;
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			samples[samplesNdx],										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		MultisampleRenderer				renderer		(context, VK_FORMAT_R8G8B8A8_UNORM, tcu::IVec2(32, 32), VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, vertices, multisampleStateParams, getDefaultColorBlendAttachmentState());
+		de::MovePtr<tcu::TextureLevel>	result			= renderer.render();
+		const deUint32					uniqueColors	= getUniqueColorsCount(result->getAccess());
+
+		renderCount++;
+
+		if (prevUniqueColors > uniqueColors)
+		{
+			std::ostringstream message;
+
+			message << "More unique colors generated with " << samples[samplesNdx - 1] << " than with " << samples[samplesNdx];
+			return tcu::TestStatus::fail(message.str());
+		}
+
+		prevUniqueColors = uniqueColors;
+	}
+
+	if (renderCount == 0)
+		throw tcu::NotSupportedError("Multisampling is unsupported");
+
+	return tcu::TestStatus::pass("Number of unique colors increases as the sample count increases");
+}
+
+
+// AlphaToOneInstance
+
+AlphaToOneInstance::AlphaToOneInstance (Context&									context,
+										VkPrimitiveTopology							topology,
+										const std::vector<Vertex4RGBA>&				vertices,
+										const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+										const VkPipelineColorBlendAttachmentState&	blendState)
+	: vkt::TestInstance			(context)
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_renderSize				(32, 32)
+	, m_primitiveTopology		(topology)
+	, m_vertices				(vertices)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(blendState)
+{
+	VkPhysicalDeviceFeatures deviceFeatures;
+
+	context.getInstanceInterface().getPhysicalDeviceFeatures(context.getPhysicalDevice(), &deviceFeatures);
+
+	if (!deviceFeatures.alphaToOne)
+		throw tcu::NotSupportedError("Alpha-to-one is not supported");
+}
+
+tcu::TestStatus AlphaToOneInstance::iterate	(void)
+{
+	DE_ASSERT(m_multisampleStateParams.alphaToOneEnable);
+	DE_ASSERT(m_colorBlendState.blendEnable);
+
+	de::MovePtr<tcu::TextureLevel>	alphaOneImage;
+	de::MovePtr<tcu::TextureLevel>	noAlphaOneImage;
+
+	// Render with blend enabled and alpha to one on
+	{
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, m_multisampleStateParams, m_colorBlendState);
+		alphaOneImage = renderer.render();
+	}
+
+	// Render with blend enabled and alpha to one off
+	{
+		VkPipelineMultisampleStateCreateInfo	multisampleParams	= m_multisampleStateParams;
+		multisampleParams.alphaToOneEnable = false;
+
+		MultisampleRenderer renderer (m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, multisampleParams, m_colorBlendState);
+		noAlphaOneImage = renderer.render();
+	}
+
+	return verifyImage(alphaOneImage->getAccess(), noAlphaOneImage->getAccess());
+}
+
+tcu::TestStatus AlphaToOneInstance::verifyImage (const tcu::ConstPixelBufferAccess&	alphaOneImage,
+												 const tcu::ConstPixelBufferAccess&	noAlphaOneImage)
+{
+	for (int y = 0; y < m_renderSize.y(); y++)
+	{
+		for (int x = 0; x < m_renderSize.x(); x++)
+		{
+			if (!tcu::boolAll(tcu::greaterThanEqual(alphaOneImage.getPixel(x, y), noAlphaOneImage.getPixel(x, y))))
+			{
+				std::ostringstream message;
+				message << "Unsatisfied condition: " << alphaOneImage.getPixel(x, y) << " >= " << noAlphaOneImage.getPixel(x, y);
+				return tcu::TestStatus::fail(message.str());
+			}
+		}
+	}
+
+	return tcu::TestStatus::pass("Image rendered with alpha-to-one contains pixels of image rendered with no alpha-to-one");
+}
+
+
+// AlphaToCoverageInstance
+
+AlphaToCoverageInstance::AlphaToCoverageInstance (Context&										context,
+												  VkPrimitiveTopology							topology,
+												  const std::vector<Vertex4RGBA>&				vertices,
+												  const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+												  const VkPipelineColorBlendAttachmentState&	blendState,
+												  GeometryType									geometryType)
+	: vkt::TestInstance			(context)
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_renderSize				(32, 32)
+	, m_primitiveTopology		(topology)
+	, m_vertices				(vertices)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(blendState)
+	, m_geometryType			(geometryType)
+{
+}
+
+tcu::TestStatus AlphaToCoverageInstance::iterate (void)
+{
+	DE_ASSERT(m_multisampleStateParams.alphaToCoverageEnable);
+
+	de::MovePtr<tcu::TextureLevel>	result;
+	MultisampleRenderer				renderer	(m_context, m_colorFormat, m_renderSize, m_primitiveTopology, m_vertices, m_multisampleStateParams, m_colorBlendState);
+
+	result = renderer.render();
+
+	return verifyImage(result->getAccess());
+}
+
+tcu::TestStatus AlphaToCoverageInstance::verifyImage (const tcu::ConstPixelBufferAccess&	result)
+{
+	float maxColorValue;
+
+	switch (m_geometryType)
+	{
+		case GEOMETRY_TYPE_OPAQUE_QUAD:
+			maxColorValue = 1.01f;
+			break;
+
+		case GEOMETRY_TYPE_TRANSLUCENT_QUAD:
+			maxColorValue = 0.52f;
+			break;
+
+		case GEOMETRY_TYPE_INVISIBLE_QUAD:
+			maxColorValue = 0.01f;
+			break;
+
+		default:
+			maxColorValue = 0.0f;
+			DE_ASSERT(false);
+	}
+
+	for (int y = 0; y < m_renderSize.y(); y++)
+	{
+		for (int x = 0; x < m_renderSize.x(); x++)
+		{
+			if (result.getPixel(x, y).x() > maxColorValue)
+			{
+				std::ostringstream message;
+				message << "Pixel is not below the threshold value (" << result.getPixel(x, y).x() << " > " << maxColorValue << ")";
+				return tcu::TestStatus::fail(message.str());
+			}
+		}
+	}
+
+	return tcu::TestStatus::pass("Image matches reference value");
+}
+
+
+// MultisampleRenderer
+
+MultisampleRenderer::MultisampleRenderer (Context&										context,
+										  VkFormat										colorFormat,
+										  const tcu::IVec2&								renderSize,
+										  VkPrimitiveTopology							topology,
+										  const std::vector<Vertex4RGBA>&				vertices,
+										  const VkPipelineMultisampleStateCreateInfo&	multisampleStateParams,
+										  const VkPipelineColorBlendAttachmentState&	blendState)
+
+	: m_context					(context)
+	, m_colorFormat				(colorFormat)
+	, m_renderSize				(renderSize)
+	, m_multisampleStateParams	(multisampleStateParams)
+	, m_colorBlendState			(blendState)
+{
+	if (!isSupportedSampleCount(context.getInstanceInterface(), context.getPhysicalDevice(), multisampleStateParams.rasterizationSamples))
+		throw tcu::NotSupportedError("Unsupported number of rasterization samples");
+
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y(), 1u },				// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			m_multisampleStateParams.rasterizationSamples,								// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImage			= createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create resolve image
+	{
+		const VkImageCreateInfo resolveImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,											// VkStructureType			sType;
+			DE_NULL,																		// const void*				pNext;
+			0u,																				// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,																// VkImageType				imageType;
+			m_colorFormat,																	// VkFormat					format;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y(), 1u },					// VkExtent3D				extent;
+			1u,																				// deUint32					mipLevels;
+			1u,																				// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,															// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,														// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |			// VkImageUsageFlags		usage;
+				VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+			VK_SHARING_MODE_EXCLUSIVE,														// VkSharingMode			sharingMode;
+			1u,																				// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,																// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED														// VkImageLayout			initialLayout;
+		};
+
+		m_resolveImage = createImage(vk, vkDevice, &resolveImageParams);
+
+		// Allocate and bind resolve image memory
+		m_resolveImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_resolveImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_resolveImage, m_resolveImageAlloc->getMemory(), m_resolveImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageViewCreateFlags	flags;
+			*m_colorImage,									// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,							// VkImageViewType			viewType;
+			m_colorFormat,									// VkFormat					format;
+			componentMappingRGBA,							// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }	// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create resolve attachment view
+	{
+		const VkImageViewCreateInfo resolveAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageViewCreateFlags	flags;
+			*m_resolveImage,								// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,							// VkImageViewType			viewType;
+			m_colorFormat,									// VkFormat					format;
+			componentMappingRGBA,							// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }	// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_resolveAttachmentView = createImageView(vk, vkDevice, &resolveAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription attachmentDescriptions[2] =
+		{
+			{
+				0u,													// VkAttachmentDescriptionFlags		flags;
+				m_colorFormat,										// VkFormat							format;
+				m_multisampleStateParams.rasterizationSamples,		// VkSampleCountFlagBits			samples;
+				VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+				VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+				VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+			},
+			{
+				0u,													// VkAttachmentDescriptionFlags		flags;
+				m_colorFormat,										// VkFormat							format;
+				VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+				VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+				VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+				VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+			}
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkAttachmentReference resolveAttachmentReference =
+		{
+			1u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,														// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,						// VkPipelineBindPoint			pipelineBindPoint;
+			0u,														// deUint32						inputAttachmentCount;
+			DE_NULL,												// const VkAttachmentReference*	pInputAttachments;
+			1u,														// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,								// const VkAttachmentReference*	pColorAttachments;
+			&resolveAttachmentReference,							// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,												// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,														// deUint32						preserveAttachmentCount;
+			DE_NULL													// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			2u,													// deUint32							attachmentCount;
+			attachmentDescriptions,								// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachments[2] =
+		{
+			*m_colorAttachmentView,
+			*m_resolveAttachmentView
+		};
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkFramebufferCreateFlags		flags;
+			*m_renderPass,										// VkRenderPass					renderPass;
+			2u,													// deUint32						attachmentCount;
+			attachments,										// const VkImageView*			pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32						width;
+			(deUint32)m_renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkPipelineLayoutCreateFlags		flags;
+			0u,													// deUint32							setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+			0u,													// deUint32							pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*		pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStageParams[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,										// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,									// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,									// deUint32				binding;
+			sizeof(Vertex4RGBA),				// deUint32				stride;
+			VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputRate	inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offset;
+			},
+			{
+				1u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				DE_OFFSET_OF(Vertex4RGBA, color),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			topology,														// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor =
+		{
+			{ 0, 0 },													// VkOffset2D  offset;
+			{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() }	// VkExtent2D  extent;
+		};
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&m_colorBlendState,											// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }									// float										blendConstants[4];
+		};
+
+		const VkPipelineDynamicStateCreateInfo	dynamicStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f,														// float			maxDepthBounds;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&m_multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipeline	= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex buffer
+	{
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			1024u,										// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), vertices.data(), vertices.size() * sizeof(Vertex4RGBA));
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), vertexBufferParams.size);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,										// const void*					pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags		flags;
+			queueFamilyIndex,								// deUint32						queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel	level;
+			1u												// deUint32				bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		VkClearValue colorClearValue;
+		colorClearValue.color.float32[0] = 0.0f;
+		colorClearValue.color.float32[1] = 0.0f;
+		colorClearValue.color.float32[2] = 0.0f;
+		colorClearValue.color.float32[3] = 0.0f;
+
+		const VkClearValue clearValues[2] =
+		{
+			colorClearValue,
+			colorClearValue
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{
+				{ 0, 0 },
+				{ (deUint32)m_renderSize.x(), (deUint32)m_renderSize.y() }
+			},														// VkRect2D				renderArea;
+			2,														// deUint32				clearValueCount;
+			clearValues												// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		VkDeviceSize vertexBufferOffset = 0u;
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
+		vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+		vk.cmdDraw(*m_cmdBuffer, (deUint32)vertices.size(), 1, 0, 0);
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+MultisampleRenderer::~MultisampleRenderer (void)
+{
+}
+
+de::MovePtr<tcu::TextureLevel> MultisampleRenderer::render (void)
+{
+	const DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const VkDevice				vkDevice			= m_context.getDevice();
+	const VkQueue				queue				= m_context.getUniversalQueue();
+	const deUint32				queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_resolveImage, m_colorFormat, m_renderSize.cast<deUint32>());
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createMultisampleTests (tcu::TestContext& testCtx)
+{
+	const VkSampleCountFlagBits samples[] =
+	{
+		VK_SAMPLE_COUNT_2_BIT,
+		VK_SAMPLE_COUNT_4_BIT,
+		VK_SAMPLE_COUNT_8_BIT,
+		VK_SAMPLE_COUNT_16_BIT,
+		VK_SAMPLE_COUNT_32_BIT,
+		VK_SAMPLE_COUNT_64_BIT
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> multisampleTests (new tcu::TestCaseGroup(testCtx, "multisample", ""));
+
+	// Rasterization samples tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> rasterizationSamplesTests(new tcu::TestCaseGroup(testCtx, "raster_samples", ""));
+
+		for (int samplesNdx = 0; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+		{
+			std::ostringstream caseName;
+			caseName << "samples_" << samples[samplesNdx];
+
+			de::MovePtr<tcu::TestCaseGroup> samplesTests	(new tcu::TestCaseGroup(testCtx, caseName.str().c_str(), ""));
+
+			samplesTests->addChild(new RasterizationSamplesTest(testCtx, "primitive_triangle", "", samples[samplesNdx], GEOMETRY_TYPE_OPAQUE_TRIANGLE));
+			samplesTests->addChild(new RasterizationSamplesTest(testCtx, "primitive_line", "", samples[samplesNdx], GEOMETRY_TYPE_OPAQUE_LINE));
+			samplesTests->addChild(new RasterizationSamplesTest(testCtx, "primitive_point", "", samples[samplesNdx], GEOMETRY_TYPE_OPAQUE_POINT));
+
+			rasterizationSamplesTests->addChild(samplesTests.release());
+		}
+
+		multisampleTests->addChild(rasterizationSamplesTests.release());
+	}
+
+	// Raster samples consistency check
+	{
+		de::MovePtr<tcu::TestCaseGroup> rasterSamplesConsistencyTests(new tcu::TestCaseGroup(testCtx, "raster_samples_consistency", ""));
+
+		addFunctionCaseWithPrograms(rasterSamplesConsistencyTests.get(),
+									"unique_colors_check",
+									"",
+									initMultisamplePrograms,
+									testRasterSamplesConsistency,
+									GEOMETRY_TYPE_OPAQUE_TRIANGLE);
+
+		multisampleTests->addChild(rasterSamplesConsistencyTests.release());
+	}
+
+	// minSampleShading tests
+	{
+		struct TestConfig
+		{
+			const char*	name;
+			float		minSampleShading;
+		};
+
+		const TestConfig testConfigs[] =
+		{
+			{ "min_0_0",	0.0f },
+			{ "min_0_25",	0.25f },
+			{ "min_0_5",	0.5f },
+			{ "min_0_75",	0.75f },
+			{ "min_1_0",	1.0f }
+		};
+
+		de::MovePtr<tcu::TestCaseGroup> minSampleShadingTests(new tcu::TestCaseGroup(testCtx, "min_sample_shading", ""));
+
+		for (int configNdx = 0; configNdx < DE_LENGTH_OF_ARRAY(testConfigs); configNdx++)
+		{
+			const TestConfig&				testConfig				= testConfigs[configNdx];
+			de::MovePtr<tcu::TestCaseGroup>	minShadingValueTests	(new tcu::TestCaseGroup(testCtx, testConfigs[configNdx].name, ""));
+
+			for (int samplesNdx = 0; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+			{
+				std::ostringstream caseName;
+				caseName << "samples_" << samples[samplesNdx];
+
+				de::MovePtr<tcu::TestCaseGroup> samplesTests	(new tcu::TestCaseGroup(testCtx, caseName.str().c_str(), ""));
+
+				samplesTests->addChild(new MinSampleShadingTest(testCtx, "primitive_triangle", "", samples[samplesNdx], testConfig.minSampleShading, GEOMETRY_TYPE_OPAQUE_TRIANGLE));
+				samplesTests->addChild(new MinSampleShadingTest(testCtx, "primitive_line", "", samples[samplesNdx], testConfig.minSampleShading, GEOMETRY_TYPE_OPAQUE_LINE));
+				samplesTests->addChild(new MinSampleShadingTest(testCtx, "primitive_point", "", samples[samplesNdx], testConfig.minSampleShading, GEOMETRY_TYPE_OPAQUE_POINT));
+
+				minShadingValueTests->addChild(samplesTests.release());
+			}
+
+			minSampleShadingTests->addChild(minShadingValueTests.release());
+		}
+
+		multisampleTests->addChild(minSampleShadingTests.release());
+	}
+
+	// pSampleMask tests
+	{
+		struct TestConfig
+		{
+			const char*		name;
+			const char*		description;
+			VkSampleMask	sampleMask;
+		};
+
+		const TestConfig testConfigs[] =
+		{
+			{ "mask_all_on",	"All mask bits are off",			0x0 },
+			{ "mask_all_off",	"All mask bits are on",				0xFFFFFFFF },
+			{ "mask_one",		"All mask elements are 0x1",		0x1},
+			{ "mask_random",	"All mask elements are 0xAAAAAAAA",	0xAAAAAAAA },
+		};
+
+		de::MovePtr<tcu::TestCaseGroup> sampleMaskTests(new tcu::TestCaseGroup(testCtx, "sample_mask", ""));
+
+		for (int configNdx = 0; configNdx < DE_LENGTH_OF_ARRAY(testConfigs); configNdx++)
+		{
+			const TestConfig&				testConfig				= testConfigs[configNdx];
+			de::MovePtr<tcu::TestCaseGroup>	sampleMaskValueTests	(new tcu::TestCaseGroup(testCtx, testConfig.name, testConfig.description));
+
+			for (int samplesNdx = 0; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+			{
+				std::ostringstream caseName;
+				caseName << "samples_" << samples[samplesNdx];
+
+				const deUint32					sampleMaskCount	= samples[samplesNdx] / 32;
+				de::MovePtr<tcu::TestCaseGroup> samplesTests	(new tcu::TestCaseGroup(testCtx, caseName.str().c_str(), ""));
+
+				std::vector<VkSampleMask> mask;
+				for (deUint32 maskNdx = 0; maskNdx < sampleMaskCount; maskNdx++)
+					mask.push_back(testConfig.sampleMask);
+
+				samplesTests->addChild(new SampleMaskTest(testCtx, "primitive_triangle", "", samples[samplesNdx], mask, GEOMETRY_TYPE_OPAQUE_TRIANGLE));
+				samplesTests->addChild(new SampleMaskTest(testCtx, "primitive_line", "", samples[samplesNdx], mask, GEOMETRY_TYPE_OPAQUE_LINE));
+				samplesTests->addChild(new SampleMaskTest(testCtx, "primitive_point", "", samples[samplesNdx], mask, GEOMETRY_TYPE_OPAQUE_POINT));
+
+				sampleMaskValueTests->addChild(samplesTests.release());
+			}
+
+			sampleMaskTests->addChild(sampleMaskValueTests.release());
+		}
+
+		multisampleTests->addChild(sampleMaskTests.release());
+
+	}
+
+	// AlphaToOne tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> alphaToOneTests(new tcu::TestCaseGroup(testCtx, "alpha_to_one", ""));
+
+		for (int samplesNdx = 0; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+		{
+			std::ostringstream caseName;
+			caseName << "samples_" << samples[samplesNdx];
+
+			alphaToOneTests->addChild(new AlphaToOneTest(testCtx, caseName.str(), "", samples[samplesNdx]));
+		}
+
+		multisampleTests->addChild(alphaToOneTests.release());
+	}
+
+	// AlphaToCoverageEnable tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> alphaToCoverageTests (new tcu::TestCaseGroup(testCtx, "alpha_to_coverage", ""));
+
+		for (int samplesNdx = 0; samplesNdx < DE_LENGTH_OF_ARRAY(samples); samplesNdx++)
+		{
+			std::ostringstream caseName;
+			caseName << "samples_" << samples[samplesNdx];
+
+			de::MovePtr<tcu::TestCaseGroup> samplesTests	(new tcu::TestCaseGroup(testCtx, caseName.str().c_str(), ""));
+
+			samplesTests->addChild(new AlphaToCoverageTest(testCtx, "alpha_opaque", "", samples[samplesNdx], GEOMETRY_TYPE_OPAQUE_QUAD));
+			samplesTests->addChild(new AlphaToCoverageTest(testCtx, "alpha_translucent", "", samples[samplesNdx], GEOMETRY_TYPE_TRANSLUCENT_QUAD));
+			samplesTests->addChild(new AlphaToCoverageTest(testCtx, "alpha_invisible", "", samples[samplesNdx], GEOMETRY_TYPE_INVISIBLE_QUAD));
+
+			alphaToCoverageTests->addChild(samplesTests.release());
+		}
+		multisampleTests->addChild(alphaToCoverageTests.release());
+	}
+
+	return multisampleTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.hpp
new file mode 100644
index 0000000..db5ed3b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineMultisampleTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEMULTISAMPLETESTS_HPP
+#define _VKTPIPELINEMULTISAMPLETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Multisample Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createMultisampleTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEMULTISAMPLETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.cpp
new file mode 100644
index 0000000..1b9400d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.cpp
@@ -0,0 +1,1584 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 ARM Limited.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief PushConstant Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelinePushConstantTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deMemory.h"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include <algorithm>
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+enum
+{
+	TRIANGLE_COUNT	= 2,
+	MAX_RANGE_COUNT	= 5
+};
+
+enum RangeSizeCase
+{
+	SIZE_CASE_4	= 0,
+	SIZE_CASE_16,
+	SIZE_CASE_32,
+	SIZE_CASE_48,
+	SIZE_CASE_128,
+	SIZE_CASE_UNSUPPORTED
+};
+
+struct PushConstantData
+{
+	struct PushConstantRange
+	{
+		VkShaderStageFlags		shaderStage;
+		deUint32				offset;
+		deUint32				size;
+	} range;
+	struct PushConstantUpdate
+	{
+		deUint32				offset;
+		deUint32				size;
+	} update;
+};
+
+class PushConstantGraphicsTest : public vkt::TestCase
+{
+public:
+							PushConstantGraphicsTest	(tcu::TestContext&			testContext,
+														 const std::string&			name,
+														 const std::string&			description,
+														 const deUint32				rangeCount,
+														 const PushConstantData		pushConstantRange[MAX_RANGE_COUNT],
+														 const deBool				multipleUpdate);
+	virtual					~PushConstantGraphicsTest	(void);
+	virtual void			initPrograms				(SourceCollections& sourceCollections) const;
+	virtual TestInstance*	createInstance				(Context& context) const;
+	RangeSizeCase			getRangeSizeCase			(deUint32 rangeSize) const;
+
+private:
+	const deUint32			m_rangeCount;
+	PushConstantData		m_pushConstantRange[MAX_RANGE_COUNT];
+	const deBool			m_multipleUpdate;
+};
+
+class PushConstantGraphicsTestInstance : public vkt::TestInstance
+{
+public:
+								PushConstantGraphicsTestInstance	(Context&					context,
+																	 const deUint32				rangeCount,
+																	 const PushConstantData		pushConstantRange[MAX_RANGE_COUNT],
+																	 const deBool				multipleUpdate);
+	virtual						~PushConstantGraphicsTestInstance	(void);
+	virtual tcu::TestStatus		iterate								(void);
+
+	void						createShaderStage					(const DeviceInterface&		vk,
+																	 VkDevice					device,
+																	 const BinaryCollection&	programCollection,
+																	 const char*				name,
+																	 VkShaderStageFlagBits		stage,
+																	 Move<VkShaderModule>*		module);
+	std::vector<Vertex4RGBA>	createQuad							(const float size);
+
+private:
+	tcu::TestStatus				verifyImage							(void);
+
+private:
+	const tcu::UVec2								m_renderSize;
+	const VkFormat									m_colorFormat;
+	const deUint32									m_rangeCount;
+	PushConstantData								m_pushConstantRange[MAX_RANGE_COUNT];
+	const deBool									m_multipleUpdate;
+
+	VkImageCreateInfo								m_colorImageCreateInfo;
+	Move<VkImage>									m_colorImage;
+	de::MovePtr<Allocation>							m_colorImageAlloc;
+	Move<VkImageView>								m_colorAttachmentView;
+	Move<VkRenderPass>								m_renderPass;
+	Move<VkFramebuffer>								m_framebuffer;
+
+	Move<VkShaderModule>							m_vertexShaderModule;
+	Move<VkShaderModule>							m_fragmentShaderModule;
+	Move<VkShaderModule>							m_geometryShaderModule;
+	Move<VkShaderModule>							m_tessControlShaderModule;
+	Move<VkShaderModule>							m_tessEvaluationShaderModule;
+
+	VkShaderStageFlags								m_shaderFlags;
+	std::vector<VkPipelineShaderStageCreateInfo>	m_shaderStage;
+
+	Move<VkBuffer>									m_vertexBuffer;
+	std::vector<Vertex4RGBA>						m_vertices;
+	de::MovePtr<Allocation>							m_vertexBufferAlloc;
+
+	Move<VkBuffer>									m_uniformBuffer;
+	de::MovePtr<Allocation>							m_uniformBufferAlloc;
+	Move<VkDescriptorPool>							m_descriptorPool;
+	Move<VkDescriptorSetLayout>						m_descriptorSetLayout;
+	Move<VkDescriptorSet>							m_descriptorSet;
+
+	Move<VkPipelineLayout>							m_pipelineLayout;
+	Move<VkPipeline>								m_graphicsPipelines;
+
+	Move<VkCommandPool>								m_cmdPool;
+	Move<VkCommandBuffer>							m_cmdBuffer;
+
+	Move<VkFence>									m_fence;
+};
+
+PushConstantGraphicsTest::PushConstantGraphicsTest (tcu::TestContext&			testContext,
+													const std::string&			name,
+													const std::string&			description,
+													const deUint32				rangeCount,
+													const PushConstantData		pushConstantRange[MAX_RANGE_COUNT],
+													const deBool				multipleUpdate)
+	: vkt::TestCase		(testContext, name, description)
+	, m_rangeCount		(rangeCount)
+	, m_multipleUpdate	(multipleUpdate)
+{
+	deMemcpy(m_pushConstantRange, pushConstantRange, sizeof(PushConstantData) * MAX_RANGE_COUNT);
+}
+
+PushConstantGraphicsTest::~PushConstantGraphicsTest (void)
+{
+}
+
+TestInstance* PushConstantGraphicsTest::createInstance (Context& context) const
+{
+	return new PushConstantGraphicsTestInstance(context, m_rangeCount, m_pushConstantRange, m_multipleUpdate);
+}
+
+RangeSizeCase PushConstantGraphicsTest::getRangeSizeCase (deUint32 rangeSize) const
+{
+	switch (rangeSize)
+	{
+		case 4:
+			return SIZE_CASE_4;
+		case 16:
+			return SIZE_CASE_16;
+		case 32:
+			return SIZE_CASE_32;
+		case 48:
+			return SIZE_CASE_48;
+		case 128:
+			return SIZE_CASE_128;
+		default:
+			DE_FATAL("Range size unsupported yet");
+			return SIZE_CASE_UNSUPPORTED;
+	}
+}
+
+void PushConstantGraphicsTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream	vertexSrc;
+	std::ostringstream	fragmentSrc;
+	std::ostringstream	geometrySrc;
+	std::ostringstream	tessControlSrc;
+	std::ostringstream	tessEvaluationSrc;
+
+	for (size_t rangeNdx = 0; rangeNdx < m_rangeCount; rangeNdx++)
+	{
+		if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_VERTEX_BIT)
+		{
+			vertexSrc << "#version 450\n"
+					  << "layout(location = 0) in highp vec4 position;\n"
+					  << "layout(location = 1) in highp vec4 color;\n"
+					  << "layout(location = 0) out highp vec4 vtxColor;\n"
+					  << "layout(push_constant) uniform Material {\n";
+
+			switch (getRangeSizeCase(m_pushConstantRange[rangeNdx].range.size))
+			{
+				case SIZE_CASE_4:
+					vertexSrc << "int kind;\n"
+							  << "} matInst;\n";
+					break;
+				case SIZE_CASE_16:
+					vertexSrc << "vec4 color;\n"
+							  << "} matInst;\n"
+							  << "layout(std140, binding = 0) uniform UniformBuf {\n"
+							  << "vec4 element;\n"
+							  << "} uniformBuf;\n";
+					break;
+				case SIZE_CASE_32:
+					vertexSrc << "vec4 color[2];\n"
+							  << "} matInst;\n";
+					break;
+				case SIZE_CASE_48:
+					vertexSrc << "int dummy1;\n"
+							  << "vec4 dummy2;\n"
+							  << "vec4 color;\n"
+							  << "} matInst;\n";
+					break;
+				case SIZE_CASE_128:
+					vertexSrc << "vec4 color[8];\n"
+							  << "} matInst;\n";
+					break;
+				default:
+					DE_FATAL("Not implemented yet");
+					break;
+			}
+
+			vertexSrc << "void main()\n"
+					  << "{\n"
+					  << "	gl_Position = position;\n";
+
+			switch (getRangeSizeCase(m_pushConstantRange[rangeNdx].range.size))
+			{
+				case SIZE_CASE_4:
+					vertexSrc << "switch (matInst.kind) {\n"
+							  << "case 0: vtxColor = vec4(0.0, 1.0, 0, 1.0); break;\n"
+							  << "case 1: vtxColor = vec4(0.0, 0.0, 1.0, 1.0); break;\n"
+							  << "case 2: vtxColor = vec4(1.0, 0.0, 0, 1.0); break;\n"
+							  << "default: vtxColor = color; break;}\n"
+							  << "}\n";
+					break;
+				case SIZE_CASE_16:
+					vertexSrc << "vtxColor = (matInst.color + uniformBuf.element) * 0.5;\n"
+							  << "}\n";
+					break;
+				case SIZE_CASE_32:
+					vertexSrc << "vtxColor = (matInst.color[0] + matInst.color[1]) * 0.5;\n"
+							  << "}\n";
+					break;
+				case SIZE_CASE_48:
+					vertexSrc << "vtxColor = matInst.color;\n"
+							  << "}\n";
+					break;
+				case SIZE_CASE_128:
+					vertexSrc << "vec4 color = vec4(0.0, 0, 0, 0.0);\n"
+							  << "for (int i = 0; i < 8; i++)\n"
+							  << "{\n"
+							  << "  color = color + matInst.color[i];\n"
+							  << "}\n"
+							  << "vtxColor = color * 0.125;\n"
+							  << "}\n";
+					break;
+				default:
+					DE_FATAL("Not implemented yet");
+					break;
+			}
+
+			sourceCollections.glslSources.add("color_vert") << glu::VertexSource(vertexSrc.str());
+		}
+
+		if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
+		{
+			tessControlSrc << "#version 450\n"
+						   << "layout (vertices = 3) out;\n"
+						   << "layout(push_constant) uniform TessLevel {\n"
+						   << "    layout(offset = 24) int level;\n"
+						   << "} tessLevel;\n"
+						   << "layout(location = 0) in highp vec4 color[];\n"
+						   << "layout(location = 0) out highp vec4 vtxColor[];\n"
+						   << "void main()\n"
+						   << "{\n"
+						   << "  gl_TessLevelInner[0] = tessLevel.level;\n"
+						   << "  gl_TessLevelOuter[0] = tessLevel.level;\n"
+						   << "  gl_TessLevelOuter[1] = tessLevel.level;\n"
+						   << "  gl_TessLevelOuter[2] = tessLevel.level;\n"
+						   << "  gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
+						   << "  vtxColor[gl_InvocationID] = color[gl_InvocationID];\n"
+						   << "}\n";
+
+			sourceCollections.glslSources.add("color_tesc") << glu::TessellationControlSource(tessControlSrc.str());
+		}
+
+		if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+		{
+			tessEvaluationSrc << "#version 450\n"
+							  << "layout (triangles) in;\n"
+							  << "layout(push_constant) uniform Material {\n"
+							  << "    layout(offset = 32) vec4 color;\n"
+							  << "} matInst;\n"
+							  << "layout(location = 0) in highp vec4 color[];\n"
+							  << "layout(location = 0) out highp vec4 vtxColor;\n"
+							  << "void main()\n"
+							  << "{\n"
+							  << "  gl_Position = gl_TessCoord.x * gl_in[0].gl_Position + gl_TessCoord.y * gl_in[1].gl_Position + gl_TessCoord.z * gl_in[2].gl_Position;\n"
+							  << "  vtxColor = matInst.color;\n"
+							  << "}\n";
+
+			sourceCollections.glslSources.add("color_tese") << glu::TessellationEvaluationSource(tessEvaluationSrc.str());
+		}
+
+		if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_GEOMETRY_BIT)
+		{
+			geometrySrc << "#version 450\n"
+						<< "layout(triangles) in;\n"
+						<< "layout(triangle_strip, max_vertices=3) out;\n"
+						<< "layout(push_constant) uniform Material {\n"
+						<< "    layout(offset = 20) int kind;\n"
+						<< "} matInst;\n"
+						<< "layout(location = 0) in highp vec4 color[];\n"
+						<< "layout(location = 0) out highp vec4 vtxColor;\n"
+						<< "void main()\n"
+						<< "{\n"
+						<< "  for(int i=0; i<3; i++)\n"
+						<< "  {\n"
+						<< "    gl_Position.xyz = gl_in[i].gl_Position.xyz / matInst.kind;\n"
+						<< "    gl_Position.w = gl_in[i].gl_Position.w;\n"
+						<< "    vtxColor = color[i];\n"
+						<< "    EmitVertex();\n"
+						<< "  }\n"
+						<< "  EndPrimitive();\n"
+						<< "}\n";
+
+			sourceCollections.glslSources.add("color_geom") << glu::GeometrySource(geometrySrc.str());
+		}
+
+		if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_FRAGMENT_BIT)
+		{
+			fragmentSrc << "#version 450\n"
+						<< "layout(location = 0) in highp vec4 vtxColor;\n"
+						<< "layout(location = 0) out highp vec4 fragColor;\n"
+						<< "layout(push_constant) uniform Material {\n";
+
+			if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_VERTEX_BIT)
+			{
+				fragmentSrc << "    layout(offset = 0) int kind;\n"
+							<< "} matInst;\n";
+			}
+			else
+			{
+				fragmentSrc << "    layout(offset = 16) int kind;\n"
+							<< "} matInst;\n";
+			}
+
+			fragmentSrc << "void main (void)\n"
+						<< "{\n"
+						<< "    switch (matInst.kind) {\n"
+						<< "    case 0: fragColor = vec4(0, 1.0, 0, 1.0); break;\n"
+						<< "    case 1: fragColor = vec4(0, 0.0, 1.0, 1.0); break;\n"
+						<< "    case 2: fragColor = vtxColor; break;\n"
+						<< "    default: fragColor = vec4(1.0, 1.0, 1.0, 1.0); break;}\n"
+						<< "}\n";
+
+			sourceCollections.glslSources.add("color_frag") << glu::FragmentSource(fragmentSrc.str());
+		}
+	}
+
+	// add a pass through fragment shader if it's not activated in push constant ranges
+	if (fragmentSrc.str().empty())
+	{
+		fragmentSrc << "#version 450\n"
+					<< "layout(location = 0) in highp vec4 vtxColor;\n"
+					<< "layout(location = 0) out highp vec4 fragColor;\n"
+					<< "void main (void)\n"
+					<< "{\n"
+					<< "	fragColor = vtxColor;\n"
+					<< "}\n";
+
+		sourceCollections.glslSources.add("color_frag") << glu::FragmentSource(fragmentSrc.str());
+	}
+}
+
+void PushConstantGraphicsTestInstance::createShaderStage (const DeviceInterface&	vk,
+														  VkDevice					device,
+														  const BinaryCollection&	programCollection,
+														  const char*				name,
+														  VkShaderStageFlagBits		stage,
+														  Move<VkShaderModule>*		module)
+{
+	*module = createShaderModule(vk, device, programCollection.get(name), 0);
+
+	const vk::VkPipelineShaderStageCreateInfo	stageCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+		DE_NULL,												// const void*							pNext;
+		0u,														// VkPipelineShaderStageCreateFlags		flags;
+		stage,													// VkShaderStageFlagBits				stage;
+		**module,												// VkShaderModule						module;
+		"main",													// const char*							pName;
+		DE_NULL													// const VkSpecializationInfo*			pSpecializationInfo;
+	};
+
+	m_shaderStage.push_back(stageCreateInfo);
+}
+
+std::vector<Vertex4RGBA> PushConstantGraphicsTestInstance::createQuad(const float size)
+{
+	std::vector<Vertex4RGBA>	vertices;
+
+	const tcu::Vec4				color				= tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f);
+	const Vertex4RGBA			lowerLeftVertex		= {tcu::Vec4(-size, -size, 0.0f, 1.0f), color};
+	const Vertex4RGBA			lowerRightVertex	= {tcu::Vec4(size, -size, 0.0f, 1.0f), color};
+	const Vertex4RGBA			UpperLeftVertex		= {tcu::Vec4(-size, size, 0.0f, 1.0f), color};
+	const Vertex4RGBA			UpperRightVertex	= {tcu::Vec4(size, size, 0.0f, 1.0f), color};
+
+	vertices.push_back(lowerLeftVertex);
+	vertices.push_back(lowerRightVertex);
+	vertices.push_back(UpperLeftVertex);
+	vertices.push_back(UpperLeftVertex);
+	vertices.push_back(lowerRightVertex);
+	vertices.push_back(UpperRightVertex);
+
+	return vertices;
+}
+
+PushConstantGraphicsTestInstance::PushConstantGraphicsTestInstance (Context&					context,
+																	const deUint32				rangeCount,
+																	const PushConstantData		pushConstantRange[MAX_RANGE_COUNT],
+																	deBool						multipleUpdate)
+	: vkt::TestInstance		(context)
+	, m_renderSize			(32, 32)
+	, m_colorFormat			(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_rangeCount			(rangeCount)
+	, m_multipleUpdate		(multipleUpdate)
+	, m_shaderFlags			(VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT)
+{
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	deMemcpy(m_pushConstantRange, pushConstantRange, sizeof(PushConstantData) * MAX_RANGE_COUNT);
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImageCreateInfo	= colorImageParams;
+		m_colorImage			= createImage(vk, vkDevice, &m_colorImageCreateInfo);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkImageViewCreateFlags		flags;
+			*m_colorImage,										// VkImage						image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType				viewType;
+			m_colorFormat,										// VkFormat						format;
+			componentMappingRGBA,								// VkChannelMapping				channels;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u },		// VkImageSubresourceRange		subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentDescription attachments[1] =
+		{
+			colorAttachmentDescription
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkAttachmentReference depthAttachmentReference =
+		{
+			VK_ATTACHMENT_UNUSED,								// deUint32			attachment;
+			VK_IMAGE_LAYOUT_UNDEFINED							// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags		flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint				pipelineBindPoint;
+			0u,													// deUint32							inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*		pInputAttachments;
+			1u,													// deUint32							colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*		pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*		pResolveAttachments;
+			&depthAttachmentReference,							// const VkAttachmentReference*		pDepthStencilAttachment;
+			0u,													// deUint32							preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*		pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			attachments,										// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachmentBindInfos[1] =
+		{
+		  *m_colorAttachmentView
+		};
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkFramebufferCreateFlags		flags;
+			*m_renderPass,										// VkRenderPass					renderPass;
+			1u,													// deUint32						attachmentCount;
+			attachmentBindInfos,								// const VkImageView*			pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32						width;
+			(deUint32)m_renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		// create push constant range
+		VkPushConstantRange	pushConstantRanges[MAX_RANGE_COUNT];
+		for (size_t rangeNdx = 0; rangeNdx < m_rangeCount; rangeNdx++)
+		{
+			pushConstantRanges[rangeNdx].stageFlags	= m_pushConstantRange[rangeNdx].range.shaderStage;
+			pushConstantRanges[rangeNdx].offset		= m_pushConstantRange[rangeNdx].range.offset;
+			pushConstantRanges[rangeNdx].size		= m_pushConstantRange[rangeNdx].range.size;
+		}
+
+		// create descriptor set layout
+		m_descriptorSetLayout = DescriptorSetLayoutBuilder().addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_SHADER_STAGE_VERTEX_BIT).build(vk, vkDevice);
+
+		// create descriptor pool
+		m_descriptorPool = DescriptorPoolBuilder().addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1u).build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+		// create uniform buffer
+		const VkBufferCreateInfo uniformBufferCreateInfo =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,						// VkStructureType		sType;
+			DE_NULL,													// const void*			pNext;
+			0u,															// VkBufferCreateFlags	flags
+			16u,														// VkDeviceSize			size;
+			VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,							// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,									// VkSharingMode		sharingMode;
+			1u,															// deUint32				queueFamilyCount;
+			&queueFamilyIndex											// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_uniformBuffer			= createBuffer(vk, vkDevice, &uniformBufferCreateInfo);
+		m_uniformBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_uniformBuffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_uniformBuffer, m_uniformBufferAlloc->getMemory(), m_uniformBufferAlloc->getOffset()));
+
+		tcu::Vec4	value	= tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f);
+		deMemcpy(m_uniformBufferAlloc->getHostPtr(), &value, 16u);
+		flushMappedMemoryRange(vk, vkDevice, m_uniformBufferAlloc->getMemory(), m_uniformBufferAlloc->getOffset(), 16u);
+
+		// create and update descriptor set
+		const VkDescriptorSetAllocateInfo allocInfo =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,				// VkStructureType                             sType;
+			DE_NULL,													// const void*                                 pNext;
+			*m_descriptorPool,											// VkDescriptorPool                            descriptorPool;
+			1u,															// uint32_t                                    setLayoutCount;
+			&(*m_descriptorSetLayout),									// const VkDescriptorSetLayout*                pSetLayouts;
+		};
+		m_descriptorSet	= allocateDescriptorSet(vk, vkDevice, &allocInfo);
+
+		const VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*m_uniformBuffer, (VkDeviceSize)0u, (VkDeviceSize)16u);
+
+		DescriptorSetUpdateBuilder()
+			.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &descriptorInfo)
+			.update(vk, vkDevice);
+
+		// create pipeline layout
+		const VkPipelineLayoutCreateInfo	pipelineLayoutParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkPipelineLayoutCreateFlags	flags;
+			1u,													// deUint32						descriptorSetCount;
+			&(*m_descriptorSetLayout),							// const VkDescriptorSetLayout*	pSetLayouts;
+			m_rangeCount,										// deUint32						pushConstantRangeCount;
+			pushConstantRanges									// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shaders
+	{
+		for (size_t rangeNdx = 0; rangeNdx < m_rangeCount; rangeNdx++)
+		{
+			if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_GEOMETRY_BIT)
+			{
+				m_shaderFlags |= VK_SHADER_STAGE_GEOMETRY_BIT;
+			}
+			if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
+			{
+				m_shaderFlags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
+			}
+			if (m_pushConstantRange[rangeNdx].range.shaderStage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
+			{
+				m_shaderFlags |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
+			}
+		}
+
+		VkPhysicalDeviceFeatures features = m_context.getDeviceFeatures();
+
+		createShaderStage(vk, vkDevice, m_context.getBinaryCollection(), "color_vert", VK_SHADER_STAGE_VERTEX_BIT , &m_vertexShaderModule);
+		if (m_shaderFlags & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT || m_shaderFlags & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
+		{
+			if (features.tessellationShader == VK_FALSE)
+			{
+				TCU_THROW(NotSupportedError, "Tessellation Not Supported");
+			}
+			createShaderStage(vk, vkDevice, m_context.getBinaryCollection(), "color_tesc", VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, &m_tessControlShaderModule);
+			createShaderStage(vk, vkDevice, m_context.getBinaryCollection(), "color_tese", VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, &m_tessEvaluationShaderModule);
+		}
+		if (m_shaderFlags & VK_SHADER_STAGE_GEOMETRY_BIT)
+		{
+			if (features.geometryShader == VK_FALSE)
+			{
+				TCU_THROW(NotSupportedError, "Geometry Not Supported");
+			}
+			createShaderStage(vk, vkDevice, m_context.getBinaryCollection(), "color_geom", VK_SHADER_STAGE_GEOMETRY_BIT, &m_geometryShaderModule);
+		}
+		createShaderStage(vk, vkDevice, m_context.getBinaryCollection(), "color_frag", VK_SHADER_STAGE_FRAGMENT_BIT, &m_fragmentShaderModule);
+	}
+
+	// Create pipeline
+	{
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,										// deUint32					binding;
+			sizeof(Vertex4RGBA),					// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX				// VkVertexInputStepRate	stepRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offsetInBytes;
+			},
+			{
+				1u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				DE_OFFSET_OF(Vertex4RGBA, color),	// deUint32	offset;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// vkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									bindingCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									attributeCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPrimitiveTopology topology = (m_shaderFlags & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineInputAssemblyStateCreateFlags)0u,					// VkPipelineInputAssemblyStateCreateFlags	flags;
+			topology,														// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	originX;
+			0.0f,						// float	originY;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			(VkPipelineViewportStateCreateFlags)0u,							// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor,														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f,															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,															// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,											// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,											// VkBlendFactor			srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |			// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo	multisampleStateParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilFailOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilPassOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilDepthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	stencilCompareOp;
+				0u,						// deUint32		stencilCompareMask;
+				0u,						// deUint32		stencilWriteMask;
+				0u,						// deUint32		stencilReference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilFailOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilPassOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	stencilDepthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	stencilCompareOp;
+				0u,						// deUint32		stencilCompareMask;
+				0u,						// deUint32		stencilWriteMask;
+				0u,						// deUint32		stencilReference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f,														// float			maxDepthBounds;
+		};
+
+		const VkPipelineTessellationStateCreateInfo tessellationStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,		// VkStructureType                             sType;
+			DE_NULL,														// const void*                                 pNext;
+			0u,																// VkPipelineTessellationStateCreateFlags      flags;
+			3u,																// uint32_t                                    patchControlPoints;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			(deUint32)m_shaderStage.size(),						// deUint32											stageCount;
+			&m_shaderStage[0],									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			(m_shaderFlags & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? &tessellationStateParams: DE_NULL),			// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterStateCreateInfo*			pRasterState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipelines = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex buffer
+	{
+		m_vertices			= createQuad(1.0f);
+
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,						// VkStructureType		sType;
+			DE_NULL,													// const void*			pNext;
+			0u,															// VkBufferCreateFlags	flags;
+			(VkDeviceSize)(sizeof(Vertex4RGBA) * m_vertices.size()),	// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,							// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,									// VkSharingMode		sharingMode;
+			1u,															// deUint32				queueFamilyCount;
+			&queueFamilyIndex											// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), vertexBufferParams.size);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,		// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex							// deUint32				queueFamilyIndex;
+		};
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValues[] =
+		{
+			defaultClearValue(m_colorFormat)
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 } , { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			1,														// deUint32				clearValueCount;
+			attachmentClearValues									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		// update push constant
+		std::vector<tcu::Vec4> color(8, tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f));
+		std::vector<tcu::Vec4> allOnes(8, tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f));
+
+		const deUint32	kind	= 2u;
+		const void*		value	= DE_NULL;
+		for (size_t rangeNdx = 0; rangeNdx < m_rangeCount; rangeNdx++)
+		{
+			value = (m_pushConstantRange[rangeNdx].range.size == 4u) ? (void*)(&kind) : (void*)(&color[0]);
+
+			vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[rangeNdx].range.shaderStage, m_pushConstantRange[rangeNdx].range.offset, m_pushConstantRange[rangeNdx].range.size, value);
+
+			if (m_pushConstantRange[rangeNdx].update.size < m_pushConstantRange[rangeNdx].range.size)
+			{
+				value = (void*)(&allOnes[0]);
+				vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[rangeNdx].range.shaderStage, m_pushConstantRange[rangeNdx].update.offset, m_pushConstantRange[rangeNdx].update.size, value);
+			}
+		}
+
+		// draw quad
+		const VkDeviceSize	triangleOffset	= (m_vertices.size() / TRIANGLE_COUNT) * sizeof(Vertex4RGBA);
+		for (int triangleNdx = 0; triangleNdx < TRIANGLE_COUNT; triangleNdx++)
+		{
+			VkDeviceSize vertexBufferOffset = triangleOffset * triangleNdx;
+
+			if (m_multipleUpdate)
+			{
+				vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange[0].range.shaderStage, m_pushConstantRange[0].range.offset, m_pushConstantRange[0].range.size, &triangleNdx);
+			}
+
+			vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines);
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+			vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0, 1, &(*m_descriptorSet), 0, DE_NULL);
+
+			vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / TRIANGLE_COUNT), 1, 0, 0);
+		}
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+PushConstantGraphicsTestInstance::~PushConstantGraphicsTestInstance (void)
+{
+}
+
+tcu::TestStatus PushConstantGraphicsTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return verifyImage();
+}
+
+tcu::TestStatus PushConstantGraphicsTestInstance::verifyImage (void)
+{
+	const tcu::TextureFormat	tcuColorFormat	= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat	tcuDepthFormat	= tcu::TextureFormat();
+	const ColorVertexShader		vertexShader;
+	const ColorFragmentShader	fragmentShader	(tcuColorFormat, tcuDepthFormat);
+	const rr::Program			program			(&vertexShader, &fragmentShader);
+	ReferenceRenderer			refRenderer		(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuDepthFormat, &program);
+	bool						compareOk		= false;
+
+	// Render reference image
+	{
+		if (m_shaderFlags & VK_SHADER_STAGE_GEOMETRY_BIT)
+		{
+			m_vertices = createQuad(0.5f);
+		}
+
+		for (size_t rangeNdx = 0; rangeNdx < m_rangeCount; rangeNdx++)
+		{
+			if (m_pushConstantRange[rangeNdx].update.size < m_pushConstantRange[rangeNdx].range.size)
+			{
+				for (size_t vertexNdx = 0; vertexNdx < m_vertices.size(); vertexNdx++)
+				{
+					m_vertices[vertexNdx].color.xyzw() = tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f);
+				}
+			}
+		}
+
+		if (m_multipleUpdate)
+		{
+			for (size_t vertexNdx = 0; vertexNdx < 3; vertexNdx++)
+			{
+				m_vertices[vertexNdx].color.xyz() = tcu::Vec3(0.0f, 1.0f, 0.0f);
+			}
+			for (size_t vertexNdx = 3; vertexNdx < m_vertices.size(); vertexNdx++)
+			{
+				m_vertices[vertexNdx].color.xyz() = tcu::Vec3(0.0f, 0.0f, 1.0f);
+			}
+		}
+
+		for (int triangleNdx = 0; triangleNdx < TRIANGLE_COUNT; triangleNdx++)
+		{
+			rr::RenderState renderState(refRenderer.getViewportState());
+
+			refRenderer.draw(renderState,
+							 rr::PRIMITIVETYPE_TRIANGLES,
+							 std::vector<Vertex4RGBA>(m_vertices.begin() + triangleNdx * 3,
+													  m_vertices.begin() + (triangleNdx + 1) * 3));
+		}
+	}
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&			vk					= m_context.getDeviceInterface();
+		const VkDevice					vkDevice			= m_context.getDevice();
+		const VkQueue					queue				= m_context.getUniversalQueue();
+		const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator					allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::MovePtr<tcu::TextureLevel>	result				= readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize);
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  refRenderer.getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(2, 2, 2, 2),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+class PushConstantComputeTest : public vkt::TestCase
+{
+public:
+							PushConstantComputeTest		(tcu::TestContext&		testContext,
+														 const std::string&		name,
+														 const std::string&		description,
+														 const PushConstantData	pushConstantRange);
+	virtual					~PushConstantComputeTest	(void);
+	virtual void			initPrograms				(SourceCollections& sourceCollections) const;
+	virtual TestInstance*	createInstance				(Context& context) const;
+
+private:
+	const PushConstantData	m_pushConstantRange;
+};
+
+class PushConstantComputeTestInstance : public vkt::TestInstance
+{
+public:
+							PushConstantComputeTestInstance		(Context&				context,
+																 const PushConstantData	pushConstantRange);
+	virtual					~PushConstantComputeTestInstance	(void);
+	virtual tcu::TestStatus	iterate								(void);
+
+private:
+	const PushConstantData			m_pushConstantRange;
+
+	Move<VkBuffer>					m_outBuffer;
+	de::MovePtr<Allocation>			m_outBufferAlloc;
+	Move<VkDescriptorPool>			m_descriptorPool;
+	Move<VkDescriptorSetLayout>		m_descriptorSetLayout;
+	Move<VkDescriptorSet>			m_descriptorSet;
+
+	Move<VkPipelineLayout>			m_pipelineLayout;
+	Move<VkPipeline>				m_computePipelines;
+
+	Move<VkShaderModule>			m_computeShaderModule;
+
+	Move<VkCommandPool>				m_cmdPool;
+	Move<VkCommandBuffer>			m_cmdBuffer;
+
+	Move<VkFence>					m_fence;
+};
+
+PushConstantComputeTest::PushConstantComputeTest (tcu::TestContext&			testContext,
+												  const std::string&		name,
+												  const std::string&		description,
+												  const PushConstantData	pushConstantRange)
+	: vkt::TestCase			(testContext, name, description)
+	, m_pushConstantRange	(pushConstantRange)
+{
+}
+
+PushConstantComputeTest::~PushConstantComputeTest (void)
+{
+}
+
+TestInstance* PushConstantComputeTest::createInstance (Context& context) const
+{
+	return new PushConstantComputeTestInstance(context, m_pushConstantRange);
+}
+
+void PushConstantComputeTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream	computeSrc;
+
+	computeSrc << "#version 450\n"
+			   << "layout(local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n"
+			   << "layout(std140, set = 0, binding = 0) writeonly buffer Output {\n"
+			   << "  vec4 elements[];\n"
+			   << "} outData;\n"
+			   << "layout(push_constant) uniform Material{\n"
+			   << "  vec4 element;\n"
+			   << "} matInst;\n"
+			   << "void main (void)\n"
+			   << "{\n"
+			   << "  outData.elements[gl_GlobalInvocationID.x] = matInst.element;\n"
+			   << "}\n";
+
+	sourceCollections.glslSources.add("compute") << glu::ComputeSource(computeSrc.str());
+}
+
+PushConstantComputeTestInstance::PushConstantComputeTestInstance (Context&					context,
+																  const PushConstantData	pushConstantRange)
+	: vkt::TestInstance		(context)
+	, m_pushConstantRange	(pushConstantRange)
+{
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const VkDevice				vkDevice			= context.getDevice();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc			(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+
+	// Create pipeline layout
+	{
+		// create push constant range
+		VkPushConstantRange	pushConstantRanges;
+		pushConstantRanges.stageFlags	= m_pushConstantRange.range.shaderStage;
+		pushConstantRanges.offset		= m_pushConstantRange.range.offset;
+		pushConstantRanges.size			= m_pushConstantRange.range.size;
+
+		// create descriptor set layout
+		m_descriptorSetLayout = DescriptorSetLayoutBuilder().addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT).build(vk, vkDevice);
+
+		// create descriptor pool
+		m_descriptorPool = DescriptorPoolBuilder().addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1u).build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+		// create uniform buffer
+		const VkDeviceSize			bufferSize			= sizeof(tcu::Vec4) * 8;
+		const VkBufferCreateInfo	bufferCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,						// VkStructureType		sType;
+			DE_NULL,													// const void*			pNext;
+			0u,															// VkBufferCreateFlags	flags
+			bufferSize,													// VkDeviceSize			size;
+			VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,							// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,									// VkSharingMode		sharingMode;
+			1u,															// deUint32				queueFamilyCount;
+			&queueFamilyIndex											// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_outBuffer			= createBuffer(vk, vkDevice, &bufferCreateInfo);
+		m_outBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_outBuffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_outBuffer, m_outBufferAlloc->getMemory(), m_outBufferAlloc->getOffset()));
+
+		// create and update descriptor set
+		const VkDescriptorSetAllocateInfo allocInfo =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,				// VkStructureType                             sType;
+			DE_NULL,													// const void*                                 pNext;
+			*m_descriptorPool,											// VkDescriptorPool                            descriptorPool;
+			1u,															// uint32_t                                    setLayoutCount;
+			&(*m_descriptorSetLayout),									// const VkDescriptorSetLayout*                pSetLayouts;
+		};
+		m_descriptorSet	= allocateDescriptorSet(vk, vkDevice, &allocInfo);
+
+		const VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*m_outBuffer, (VkDeviceSize)0u, bufferSize);
+
+		DescriptorSetUpdateBuilder()
+			.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfo)
+			.update(vk, vkDevice);
+
+		// create pipeline layout
+		const VkPipelineLayoutCreateInfo	pipelineLayoutParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkPipelineLayoutCreateFlags	flags;
+			1u,													// deUint32						descriptorSetCount;
+			&(*m_descriptorSetLayout),							// const VkDescriptorSetLayout*	pSetLayouts;
+			1u,													// deUint32						pushConstantRangeCount;
+			&pushConstantRanges									// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// create pipeline
+	{
+		m_computeShaderModule = createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("compute"), 0);
+
+		const VkPipelineShaderStageCreateInfo	stageCreateInfo	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType						sType;
+			DE_NULL,												// const void*							pNext;
+			0u,														// VkPipelineShaderStageCreateFlags		flags;
+			VK_SHADER_STAGE_COMPUTE_BIT,							// VkShaderStageFlagBits				stage;
+			*m_computeShaderModule,									// VkShaderModule						module;
+			"main",													// const char*							pName;
+			DE_NULL													// const VkSpecializationInfo*			pSpecializationInfo;
+		};
+
+		const VkComputePipelineCreateInfo		createInfo	=
+		{
+			VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,			// VkStructureType                             sType;
+			DE_NULL,												// const void*                                 pNext;
+			0u,														// VkPipelineCreateFlags                       flags;
+			stageCreateInfo,										// VkPipelineShaderStageCreateInfo             stage;
+			*m_pipelineLayout,										// VkPipelineLayout                            layout;
+			(VkPipeline)0,											// VkPipeline                                  basePipelineHandle;
+			0u,														// int32_t                                     basePipelineIndex;
+		};
+
+		m_computePipelines = createComputePipeline(vk, vkDevice, (vk::VkPipelineCache)0u, &createInfo);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex								// deUint32				queueFamilyIndex;
+		};
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_computePipelines);
+		vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0, 1, &(*m_descriptorSet), 0, DE_NULL);
+
+		// update push constant
+		tcu::Vec4	value	= tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f);
+		vk.cmdPushConstants(*m_cmdBuffer, *m_pipelineLayout, m_pushConstantRange.range.shaderStage, m_pushConstantRange.range.offset, m_pushConstantRange.range.size, &value);
+
+		vk.cmdDispatch(*m_cmdBuffer, 8, 1, 1);
+
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+PushConstantComputeTestInstance::~PushConstantComputeTestInstance (void)
+{
+}
+
+tcu::TestStatus PushConstantComputeTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	// verify result
+	std::vector<tcu::Vec4>	expectValue(8, tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f));
+	if (deMemCmp((void*)(&expectValue[0]), m_outBufferAlloc->getHostPtr(), (size_t)(sizeof(tcu::Vec4) * 8)))
+	{
+		return tcu::TestStatus::fail("Image mismatch");
+	}
+	return tcu::TestStatus::pass("result image matches with reference");
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createPushConstantTests (tcu::TestContext& testCtx)
+{
+	static const struct
+	{
+		const char*			name;
+		const char*			description;
+		deUint32			count;
+		PushConstantData	range[MAX_RANGE_COUNT];
+		deBool				hasMultipleUpdates;
+	} graphicsParams[] =
+	{
+		// test range size from minimum valid size to maximum
+		{
+			"range_size_4",
+			"test range size is 4 bytes(minimum valid size)",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 4 } , { 0, 4 } } },
+			false
+		},
+		{
+			"range_size_16",
+			"test range size is 16 bytes, and together with a normal uniform",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 16 }, { 0, 16 } } },
+			false
+		},
+		{
+			"range_size_128",
+			"test range size is 128 bytes(maximum valid size)",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 128 }, { 0, 128 } } },
+			false
+		},
+		// test range count, including all valid shader stage in graphics pipeline, and also multiple shader stages share one single range
+		{
+			"count_2_shader_VF",
+			"test range count is 2, use vertex and fragment shaders",
+			2u,
+			{
+				{ { VK_SHADER_STAGE_VERTEX_BIT, 0, 16 }, { 0, 16 } },
+				{ { VK_SHADER_STAGE_FRAGMENT_BIT, 16, 4 }, { 16, 4 } },
+			},
+			false
+		},
+		{
+			"count_3shader_VGF",
+			"test range count is 3, use vertex, geometry and fragment shaders",
+			3u,
+			{
+				{ { VK_SHADER_STAGE_VERTEX_BIT, 0, 16 }, { 0, 16 } },
+				{ { VK_SHADER_STAGE_FRAGMENT_BIT, 16, 4 }, { 16, 4 } },
+				{ { VK_SHADER_STAGE_GEOMETRY_BIT, 20, 4 }, { 20, 4 } },
+			},
+			false
+		},
+		{
+			"count_5_shader_VTGF",
+			"test range count is 5, use vertex, tessellation, geometry and fragment shaders",
+			5u,
+			{
+				{ { VK_SHADER_STAGE_VERTEX_BIT, 0, 16 }, { 0, 16 } },
+				{ { VK_SHADER_STAGE_FRAGMENT_BIT, 16, 4 }, { 16, 4 } },
+				{ { VK_SHADER_STAGE_GEOMETRY_BIT, 20, 4 }, { 20, 4 } },
+				{ { VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, 24, 4 }, { 24, 4 } },
+				{ { VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, 32, 16 }, { 32, 16 } },
+			},
+			false
+		},
+		{
+			"count_1_shader_VF",
+			"test range count is 1, vertex and fragment shaders share one range",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0, 4 }, { 0, 4 } } },
+			false
+		},
+		// test data partial update and multiple times update
+		{
+			"data_update_partial_1",
+			"test partial update of the values",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 32 }, { 4, 24 } } },
+			false
+		},
+		{
+			"data_update_partial_2",
+			"test partial update of the values",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 48 }, { 32, 16 } } },
+			false
+		},
+		{
+			"data_update_multiple",
+			"test multiple times update of the values",
+			1u,
+			{ { { VK_SHADER_STAGE_VERTEX_BIT, 0, 4 }, { 0, 4 } } },
+			true
+		},
+	};
+
+	static const struct
+	{
+		const char*			name;
+		const char*			description;
+		PushConstantData	range;
+	} computeParams[] =
+	{
+		{
+			"simple_test",
+			"test compute pipeline",
+			{ { VK_SHADER_STAGE_COMPUTE_BIT, 0, 16 }, { 0, 16 } },
+		},
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>	pushConstantTests	(new tcu::TestCaseGroup(testCtx, "push_constant", "PushConstant tests"));
+
+	de::MovePtr<tcu::TestCaseGroup>	graphicsTests	(new tcu::TestCaseGroup(testCtx, "graphics_pipeline", "graphics pipeline"));
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(graphicsParams); ndx++)
+	{
+		graphicsTests->addChild(new PushConstantGraphicsTest(testCtx, graphicsParams[ndx].name, graphicsParams[ndx].description, graphicsParams[ndx].count, graphicsParams[ndx].range, graphicsParams[ndx].hasMultipleUpdates));
+	}
+	pushConstantTests->addChild(graphicsTests.release());
+
+	de::MovePtr<tcu::TestCaseGroup>	computeTests	(new tcu::TestCaseGroup(testCtx, "compute_pipeline", "compute pipeline"));
+	computeTests->addChild(new PushConstantComputeTest(testCtx, computeParams[0].name, computeParams[0].description, computeParams[0].range));
+	pushConstantTests->addChild(computeTests.release());
+
+	return pushConstantTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.hpp
new file mode 100644
index 0000000..e4f7c14
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelinePushConstantTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEPUSHCONSTANTTESTS_HPP
+#define _VKTPIPELINEPUSHCONSTANTTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 ARM Limited.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief PushConstant Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createPushConstantTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEPUSHCONSTANTTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.cpp
new file mode 100644
index 0000000..68376f9
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.cpp
@@ -0,0 +1,343 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Reference renderer.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "rrShadingContext.hpp"
+#include "rrVertexAttrib.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+rr::BlendFunc mapVkBlendFactor (VkBlendFactor blend)
+{
+	switch (blend)
+	{
+		case VK_BLEND_FACTOR_ZERO:						return rr::BLENDFUNC_ZERO;
+		case VK_BLEND_FACTOR_ONE:						return rr::BLENDFUNC_ONE;
+		case VK_BLEND_FACTOR_SRC_COLOR:					return rr::BLENDFUNC_SRC_COLOR;
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR:		return rr::BLENDFUNC_ONE_MINUS_SRC_COLOR;
+		case VK_BLEND_FACTOR_DST_COLOR:					return rr::BLENDFUNC_DST_COLOR;
+		case VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR:		return rr::BLENDFUNC_ONE_MINUS_DST_COLOR;
+		case VK_BLEND_FACTOR_SRC_ALPHA:					return rr::BLENDFUNC_SRC_ALPHA;
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA:		return rr::BLENDFUNC_ONE_MINUS_SRC_ALPHA;
+		case VK_BLEND_FACTOR_DST_ALPHA:					return rr::BLENDFUNC_DST_ALPHA;
+		case VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA:		return rr::BLENDFUNC_ONE_MINUS_DST_ALPHA;
+		case VK_BLEND_FACTOR_CONSTANT_COLOR:			return rr::BLENDFUNC_CONSTANT_COLOR;
+		case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR:	return rr::BLENDFUNC_ONE_MINUS_CONSTANT_COLOR;
+		case VK_BLEND_FACTOR_CONSTANT_ALPHA:			return rr::BLENDFUNC_CONSTANT_ALPHA;
+		case VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA:	return rr::BLENDFUNC_ONE_MINUS_CONSTANT_ALPHA;
+		case VK_BLEND_FACTOR_SRC_ALPHA_SATURATE:		return rr::BLENDFUNC_SRC_ALPHA_SATURATE;
+		case VK_BLEND_FACTOR_SRC1_COLOR:				return rr::BLENDFUNC_SRC1_COLOR;
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR:		return rr::BLENDFUNC_ONE_MINUS_SRC1_COLOR;
+		case VK_BLEND_FACTOR_SRC1_ALPHA:				return rr::BLENDFUNC_SRC1_ALPHA;
+		case VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA:		return rr::BLENDFUNC_ONE_MINUS_SRC1_ALPHA;
+		default:
+			DE_ASSERT(false);
+	}
+	return rr::BLENDFUNC_LAST;
+}
+
+rr::BlendEquation mapVkBlendOp (VkBlendOp blendOp)
+{
+	switch (blendOp)
+	{
+		case VK_BLEND_OP_ADD:					return rr::BLENDEQUATION_ADD;
+		case VK_BLEND_OP_SUBTRACT:				return rr::BLENDEQUATION_SUBTRACT;
+		case VK_BLEND_OP_REVERSE_SUBTRACT:		return rr::BLENDEQUATION_REVERSE_SUBTRACT;
+		case VK_BLEND_OP_MIN:					return rr::BLENDEQUATION_MIN;
+		case VK_BLEND_OP_MAX:					return rr::BLENDEQUATION_MAX;
+		default:
+			DE_ASSERT(false);
+	}
+	return rr::BLENDEQUATION_LAST;
+}
+
+tcu::BVec4 mapVkColorComponentFlags (VkColorComponentFlags flags)
+{
+	return tcu::BVec4((flags & VK_COLOR_COMPONENT_R_BIT) != 0,
+					  (flags & VK_COLOR_COMPONENT_G_BIT) != 0,
+					  (flags & VK_COLOR_COMPONENT_B_BIT) != 0,
+					  (flags & VK_COLOR_COMPONENT_A_BIT) != 0);
+}
+
+rr::TestFunc mapVkCompareOp (VkCompareOp compareFunc)
+{
+	switch (compareFunc)
+	{
+		case VK_COMPARE_OP_NEVER:				return rr::TESTFUNC_NEVER;
+		case VK_COMPARE_OP_LESS:				return rr::TESTFUNC_LESS;
+		case VK_COMPARE_OP_EQUAL:				return rr::TESTFUNC_EQUAL;
+		case VK_COMPARE_OP_LESS_OR_EQUAL:		return rr::TESTFUNC_LEQUAL;
+		case VK_COMPARE_OP_GREATER:				return rr::TESTFUNC_GREATER;
+		case VK_COMPARE_OP_NOT_EQUAL:			return rr::TESTFUNC_NOTEQUAL;
+		case VK_COMPARE_OP_GREATER_OR_EQUAL:	return rr::TESTFUNC_GEQUAL;
+		case VK_COMPARE_OP_ALWAYS:				return rr::TESTFUNC_ALWAYS;
+		default:
+			DE_ASSERT(false);
+	}
+	return rr::TESTFUNC_LAST;
+}
+
+rr::PrimitiveType mapVkPrimitiveTopology (VkPrimitiveTopology primitiveTopology)
+{
+	switch (primitiveTopology)
+	{
+		case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:						return rr::PRIMITIVETYPE_POINTS;
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:						return rr::PRIMITIVETYPE_LINES;
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:						return rr::PRIMITIVETYPE_LINE_STRIP;
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:					return rr::PRIMITIVETYPE_TRIANGLES;
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:					return rr::PRIMITIVETYPE_TRIANGLE_FAN;
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:					return rr::PRIMITIVETYPE_TRIANGLE_STRIP;
+		case VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY:		return rr::PRIMITIVETYPE_LINES_ADJACENCY;
+		case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY:		return rr::PRIMITIVETYPE_LINE_STRIP_ADJACENCY;
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY:	return rr::PRIMITIVETYPE_TRIANGLES_ADJACENCY;
+		case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY:	return rr::PRIMITIVETYPE_TRIANGLE_STRIP_ADJACENCY;
+		default:
+			DE_ASSERT(false);
+	}
+	return rr::PRIMITIVETYPE_LAST;
+}
+
+rr::StencilOp mapVkStencilOp (vk::VkStencilOp stencilOp)
+{
+	switch (stencilOp)
+	{
+		case VK_STENCIL_OP_KEEP:					return rr::STENCILOP_KEEP;
+		case VK_STENCIL_OP_ZERO:					return rr::STENCILOP_ZERO;
+		case VK_STENCIL_OP_REPLACE:					return rr::STENCILOP_REPLACE;
+		case VK_STENCIL_OP_INCREMENT_AND_CLAMP:		return rr::STENCILOP_INCR;
+		case VK_STENCIL_OP_DECREMENT_AND_CLAMP:		return rr::STENCILOP_DECR;
+		case VK_STENCIL_OP_INVERT:					return rr::STENCILOP_INVERT;
+		case VK_STENCIL_OP_INCREMENT_AND_WRAP:		return rr::STENCILOP_INCR_WRAP;
+		case VK_STENCIL_OP_DECREMENT_AND_WRAP:		return rr::STENCILOP_DECR_WRAP;
+		default:
+			DE_ASSERT(false);
+	}
+	return rr::STENCILOP_LAST;
+}
+
+tcu::Vec4 swizzle (const tcu::Vec4& color, const tcu::UVec4& swizzle)
+{
+	const float channelValues[] =
+	{
+		0.0f,
+		1.0f,
+		color.x(),
+		color.y(),
+		color.z(),
+		color.w()
+	};
+
+	return tcu::Vec4(channelValues[swizzle.x()],
+					 channelValues[swizzle.y()],
+					 channelValues[swizzle.z()],
+					 channelValues[swizzle.w()]);
+}
+
+ReferenceRenderer::ReferenceRenderer(int						surfaceWidth,
+									 int						surfaceHeight,
+									 int						numSamples,
+									 const tcu::TextureFormat&	colorFormat,
+									 const tcu::TextureFormat&	depthStencilFormat,
+									 const rr::Program* const	program)
+	: m_surfaceWidth		(surfaceWidth)
+	, m_surfaceHeight		(surfaceHeight)
+	, m_numSamples			(numSamples)
+	, m_colorFormat			(colorFormat)
+	, m_depthStencilFormat	(depthStencilFormat)
+	, m_program				(program)
+{
+	const tcu::TextureChannelClass	formatClass				= tcu::getTextureChannelClass(colorFormat.type);
+	const bool						hasDepthStencil			= (m_depthStencilFormat.order != tcu::TextureFormat::CHANNELORDER_LAST);
+	const bool						hasDepthBufferOnly		= (m_depthStencilFormat.order == tcu::TextureFormat::D);
+	const bool						hasStencilBufferOnly	= (m_depthStencilFormat.order == tcu::TextureFormat::S);
+	const int						actualSamples			= (formatClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER || formatClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)? 1: m_numSamples;
+
+	m_colorBuffer.setStorage(m_colorFormat, actualSamples, m_surfaceWidth, m_surfaceHeight);
+	m_resolveColorBuffer.setStorage(m_colorFormat, m_surfaceWidth, m_surfaceHeight);
+
+	if (formatClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+	{
+		tcu::clear(m_colorBuffer.getAccess(), defaultClearColorInt(m_colorFormat));
+		tcu::clear(m_resolveColorBuffer.getAccess(), defaultClearColorInt(m_colorFormat));
+	}
+	else if (formatClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+	{
+		tcu::clear(m_colorBuffer.getAccess(), defaultClearColorUint(m_colorFormat));
+		tcu::clear(m_resolveColorBuffer.getAccess(), defaultClearColorUint(m_colorFormat));
+	}
+	else
+	{
+		tcu::Vec4 clearColor = defaultClearColor(m_colorFormat);
+
+		if (isSRGB(m_colorFormat))
+			clearColor = tcu::linearToSRGB(clearColor);
+
+		tcu::clear(m_colorBuffer.getAccess(), clearColor);
+		tcu::clear(m_resolveColorBuffer.getAccess(), clearColor);
+	}
+
+	if (hasDepthStencil)
+	{
+		if (hasDepthBufferOnly)
+		{
+			m_depthStencilBuffer.setStorage(m_depthStencilFormat, actualSamples, surfaceWidth, surfaceHeight);
+			tcu::clearDepth(m_depthStencilBuffer.getAccess(), defaultClearDepth());
+
+			m_renderTarget = new rr::RenderTarget(rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_colorBuffer.getAccess()),
+												  rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_depthStencilBuffer.getAccess()));
+		}
+		else if (hasStencilBufferOnly)
+		{
+			m_depthStencilBuffer.setStorage(m_depthStencilFormat, actualSamples, surfaceWidth, surfaceHeight);
+			tcu::clearStencil(m_depthStencilBuffer.getAccess(), defaultClearStencil());
+
+			m_renderTarget = new rr::RenderTarget(rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_colorBuffer.getAccess()),
+												  rr::MultisamplePixelBufferAccess(),
+												  rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_depthStencilBuffer.getAccess()));
+		}
+		else
+		{
+			m_depthStencilBuffer.setStorage(m_depthStencilFormat, actualSamples, surfaceWidth, surfaceHeight);
+
+			tcu::clearDepth(m_depthStencilBuffer.getAccess(), defaultClearDepth());
+			tcu::clearStencil(m_depthStencilBuffer.getAccess(), defaultClearStencil());
+
+			m_renderTarget = new rr::RenderTarget(rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_colorBuffer.getAccess()),
+												  rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_depthStencilBuffer.getAccess()),
+												  rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_depthStencilBuffer.getAccess()));
+		}
+	}
+	else
+	{
+		m_renderTarget = new rr::RenderTarget(rr::MultisamplePixelBufferAccess::fromMultisampleAccess(m_colorBuffer.getAccess()));
+	}
+}
+
+ReferenceRenderer::~ReferenceRenderer (void)
+{
+	delete m_renderTarget;
+}
+
+void ReferenceRenderer::colorClear(const tcu::Vec4& color)
+{
+	tcu::clear(m_colorBuffer.getAccess(), color);
+	tcu::clear(m_resolveColorBuffer.getAccess(), color);
+}
+
+void ReferenceRenderer::draw (const rr::RenderState&			renderState,
+							  const rr::PrimitiveType			primitive,
+							  const std::vector<Vertex4RGBA>&	vertexBuffer)
+{
+	const rr::PrimitiveList primitives(primitive, (int)vertexBuffer.size(), 0);
+
+	std::vector<tcu::Vec4> positions;
+	std::vector<tcu::Vec4> colors;
+
+	for (size_t vertexNdx = 0; vertexNdx < vertexBuffer.size(); vertexNdx++)
+	{
+		const Vertex4RGBA& v = vertexBuffer[vertexNdx];
+		positions.push_back(v.position);
+		colors.push_back(v.color);
+	}
+
+	rr::VertexAttrib vertexAttribs[2];
+
+	// Position attribute
+	vertexAttribs[0].type		= rr::VERTEXATTRIBTYPE_FLOAT;
+	vertexAttribs[0].size		= 4;
+	vertexAttribs[0].pointer	= positions.data();
+	// UV attribute
+	vertexAttribs[1].type		= rr::VERTEXATTRIBTYPE_FLOAT;
+	vertexAttribs[1].size		= 4;
+	vertexAttribs[1].pointer	= colors.data();
+
+	rr::DrawCommand drawQuadCommand(renderState, *m_renderTarget, *m_program, 2, vertexAttribs, primitives);
+
+	m_renderer.draw(drawQuadCommand);
+}
+
+void ReferenceRenderer::draw (const rr::RenderState&			renderState,
+							  const rr::PrimitiveType			primitive,
+							  const std::vector<Vertex4Tex4>&	vertexBuffer)
+{
+	const rr::PrimitiveList primitives(primitive, (int)vertexBuffer.size(), 0);
+
+	std::vector<tcu::Vec4> positions;
+	std::vector<tcu::Vec4> texCoords;
+
+	for (size_t vertexNdx = 0; vertexNdx < vertexBuffer.size(); vertexNdx++)
+	{
+		const Vertex4Tex4& v = vertexBuffer[vertexNdx];
+		positions.push_back(v.position);
+		texCoords.push_back(v.texCoord);
+	}
+
+	rr::VertexAttrib vertexAttribs[2];
+
+	// Position attribute
+	vertexAttribs[0].type		= rr::VERTEXATTRIBTYPE_FLOAT;
+	vertexAttribs[0].size		= 4;
+	vertexAttribs[0].pointer	= positions.data();
+	// UV attribute
+	vertexAttribs[1].type		= rr::VERTEXATTRIBTYPE_FLOAT;
+	vertexAttribs[1].size		= 4;
+	vertexAttribs[1].pointer	= texCoords.data();
+
+	rr::DrawCommand drawQuadCommand(renderState, *m_renderTarget, *m_program, 2, vertexAttribs, primitives);
+
+	m_renderer.draw(drawQuadCommand);
+}
+
+tcu::PixelBufferAccess ReferenceRenderer::getAccess (void)
+{
+	rr::MultisampleConstPixelBufferAccess multiSampleAccess = rr::MultisampleConstPixelBufferAccess::fromMultisampleAccess(m_colorBuffer.getAccess());
+	rr::resolveMultisampleColorBuffer(m_resolveColorBuffer.getAccess(), multiSampleAccess);
+
+	return m_resolveColorBuffer.getAccess();
+}
+
+const rr::ViewportState ReferenceRenderer::getViewportState (void) const
+{
+	return rr::ViewportState(rr::WindowRectangle(0, 0, m_surfaceWidth, m_surfaceHeight));
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.hpp
new file mode 100644
index 0000000..3de3cf1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineReferenceRenderer.hpp
@@ -0,0 +1,351 @@
+#ifndef _VKTPIPELINEREFERENCERENDERER_HPP
+#define _VKTPIPELINEREFERENCERENDERER_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Reference renderer.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "tcuVector.hpp"
+#include "tcuVectorType.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTextureUtil.hpp"
+#include "rrRenderState.hpp"
+#include "rrRenderer.hpp"
+#include <cstring>
+
+namespace vkt
+{
+
+namespace pipeline
+{
+
+tcu::Vec4	swizzle		(const tcu::Vec4& color, const tcu::UVec4& swizzle);
+
+class ColorVertexShader : public rr::VertexShader
+{
+public:
+	ColorVertexShader (void) : rr::VertexShader(2, 2)
+	{
+		m_inputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_inputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+
+		m_outputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_outputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+	}
+
+	virtual ~ColorVertexShader (void) {}
+
+	virtual void shadeVertices (const rr::VertexAttrib*		inputs,
+								rr::VertexPacket* const*	packets,
+								const int					numPackets) const
+	{
+		tcu::Vec4 position;
+		tcu::Vec4 color;
+
+		for (int packetNdx = 0; packetNdx < numPackets; packetNdx++)
+		{
+			rr::VertexPacket* const packet	= packets[packetNdx];
+
+			readVertexAttrib(position, inputs[0], packet->instanceNdx, packet->vertexNdx);
+			readVertexAttrib(color, inputs[1], packet->instanceNdx, packet->vertexNdx);
+
+			packet->outputs[0]	= position;
+			packet->outputs[1]	= color;
+			packet->position	= position;
+		}
+	}
+};
+
+class TexCoordVertexShader : public rr::VertexShader
+{
+public:
+	TexCoordVertexShader (void) : rr::VertexShader(2, 2)
+	{
+		m_inputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_inputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+
+		m_outputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_outputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+	}
+
+	virtual ~TexCoordVertexShader (void) {}
+
+	virtual void shadeVertices (const rr::VertexAttrib*		inputs,
+								rr::VertexPacket* const*	packets,
+								const int					numPackets) const
+	{
+		tcu::Vec4 position;
+		tcu::Vec4 texCoord;
+
+		for (int packetNdx = 0; packetNdx < numPackets; packetNdx++)
+		{
+			rr::VertexPacket* const packet	= packets[packetNdx];
+
+			readVertexAttrib(position, inputs[0], packet->instanceNdx, packet->vertexNdx);
+			readVertexAttrib(texCoord, inputs[1], packet->instanceNdx, packet->vertexNdx);
+
+			packet->outputs[0]	= position;
+			packet->outputs[1]	= texCoord;
+			packet->position	= position;
+		}
+	}
+};
+
+class ColorFragmentShader : public rr::FragmentShader
+{
+private:
+	const tcu::TextureFormat		m_colorFormat;
+	const tcu::TextureFormat		m_depthStencilFormat;
+
+public:
+	ColorFragmentShader (const tcu::TextureFormat& colorFormat,
+						 const tcu::TextureFormat& depthStencilFormat)
+		: rr::FragmentShader	(2, 1)
+		, m_colorFormat			(colorFormat)
+		, m_depthStencilFormat	(depthStencilFormat)
+	{
+		const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(m_colorFormat.type);
+
+		m_inputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_inputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+		m_outputs[0].type	= (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)? rr::GENERICVECTYPE_INT32 :
+							  (channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)? rr::GENERICVECTYPE_UINT32
+							  : rr::GENERICVECTYPE_FLOAT;
+	}
+
+	virtual ~ColorFragmentShader (void) {}
+
+	virtual void shadeFragments (rr::FragmentPacket*				packets,
+								 const int							numPackets,
+								 const rr::FragmentShadingContext&	context) const
+	{
+		for (int packetNdx = 0; packetNdx < numPackets; packetNdx++)
+		{
+			const rr::FragmentPacket& packet = packets[packetNdx];
+
+			if (m_depthStencilFormat.order == tcu::TextureFormat::D || m_depthStencilFormat.order == tcu::TextureFormat::DS)
+			{
+				for (int fragNdx = 0; fragNdx < 4; fragNdx++)
+				{
+					const tcu::Vec4 vtxPosition = rr::readVarying<float>(packet, context, 0, fragNdx);
+					rr::writeFragmentDepth(context, packetNdx, fragNdx, 0, vtxPosition.z());
+				}
+			}
+
+			for (int fragNdx = 0; fragNdx < 4; fragNdx++)
+			{
+				const tcu::Vec4 vtxColor = rr::readVarying<float>(packet, context, 1, fragNdx);
+				rr::writeFragmentOutput(context, packetNdx, fragNdx, 0, vtxColor);
+			}
+		}
+	}
+};
+
+template<typename TextureType>
+class SamplerFragmentShader : public rr::FragmentShader
+{
+private:
+	const tcu::TextureFormat		m_colorFormat;
+	const tcu::TextureFormatInfo	m_colorFormatInfo;
+	const TextureType				m_texture;
+	const tcu::Sampler				m_sampler;
+	const float						m_lod;
+	const tcu::Vec4					m_lookupScale;
+	const tcu::Vec4					m_lookupBias;
+	const tcu::UVec4				m_swizzle;
+
+public:
+	SamplerFragmentShader (const tcu::TextureFormat& colorFormat, const TextureType& texture, const tcu::Sampler& sampler, float lod, const tcu::Vec4& lookupScale, const tcu::Vec4& lookupBias, const tcu::UVec4& swizzle)
+		: rr::FragmentShader	(2, 1)
+		, m_colorFormat			(colorFormat)
+		, m_colorFormatInfo		(tcu::getTextureFormatInfo(m_colorFormat))
+		, m_texture				(texture)
+		, m_sampler				(sampler)
+		, m_lod					(lod)
+		, m_lookupScale			(lookupScale)
+		, m_lookupBias			(lookupBias)
+		, m_swizzle				(swizzle)
+	{
+		const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(m_colorFormat.type);
+		m_inputs[0].type	= rr::GENERICVECTYPE_FLOAT;
+		m_inputs[1].type	= rr::GENERICVECTYPE_FLOAT;
+		m_outputs[0].type	= (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)? rr::GENERICVECTYPE_INT32 :
+							  (channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)? rr::GENERICVECTYPE_UINT32
+							  : rr::GENERICVECTYPE_FLOAT;
+	}
+
+	virtual ~SamplerFragmentShader (void)
+	{
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::Texture1D& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::Texture1DArray& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::Texture2D& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::Texture2DArray& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), texCoord.z(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::Texture3D& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), texCoord.z(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::TextureCube& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), texCoord.z(), lod);
+	}
+
+	static tcu::Vec4 sampleTexture (const tcu::TextureCubeArray& texture, const tcu::Sampler& sampler, const tcu::Vec4& texCoord, float lod)
+	{
+		return texture.sample(sampler, texCoord.x(), texCoord.y(), texCoord.z(), texCoord.w(), lod);
+	}
+
+	virtual void shadeFragments (rr::FragmentPacket*				packets,
+								 const int							numPackets,
+								 const rr::FragmentShadingContext&	context) const
+	{
+		for (int packetNdx = 0; packetNdx < numPackets; packetNdx++)
+		{
+			const rr::FragmentPacket& packet = packets[packetNdx];
+
+			for (int fragNdx = 0; fragNdx < 4; fragNdx++)
+			{
+				const tcu::Vec4	vtxTexCoord	= rr::readVarying<float>(packet, context, 1, fragNdx);
+				const tcu::Vec4	texColor	= sampleTexture(m_texture, m_sampler, vtxTexCoord, m_lod);
+				const tcu::Vec4	normColor	= texColor * m_lookupScale + m_lookupBias;
+				const tcu::Vec4 swizColor	= swizzle(normColor, m_swizzle);
+				const tcu::Vec4	color		= (swizColor + m_colorFormatInfo.lookupBias) / m_colorFormatInfo.lookupScale;
+				rr::writeFragmentOutput(context, packetNdx, fragNdx, 0, color);
+			}
+		}
+	}
+};
+
+class Program
+{
+public:
+	virtual ~Program (void) { }
+
+	virtual rr::Program getReferenceProgram (void) const = 0;
+};
+
+template<typename TextureType>
+class SamplerProgram: public Program
+{
+private:
+	TexCoordVertexShader				m_vertexShader;
+	SamplerFragmentShader<TextureType>	m_fragmentShader;
+public:
+	SamplerProgram (const tcu::TextureFormat& colorFormat, const TextureType& texture, const tcu::Sampler& sampler, float lod, const tcu::Vec4& lookupScale, const tcu::Vec4& lookupBias, const tcu::UVec4& swizzle)
+		: m_vertexShader	()
+		, m_fragmentShader	(colorFormat, texture, sampler, lod, lookupScale, lookupBias, swizzle)
+	{
+	}
+
+	virtual ~SamplerProgram (void) { }
+
+	virtual rr::Program getReferenceProgram (void) const
+	{
+		return rr::Program(&m_vertexShader, &m_fragmentShader);
+	}
+};
+
+class ReferenceRenderer
+{
+public:
+								ReferenceRenderer		(int							surfaceWidth,
+														 int							surfaceHeight,
+														 int							numSamples,
+														 const tcu::TextureFormat&		colorFormat,
+														 const tcu::TextureFormat&		depthStencilFormat,
+														 const rr::Program* const		program);
+
+	virtual						~ReferenceRenderer		(void);
+
+	void						colorClear				(const tcu::Vec4& color);
+
+	void						draw					(const rr::RenderState&				renderState,
+														 const rr::PrimitiveType			primitive,
+														 const std::vector<Vertex4RGBA>&	vertexBuffer);
+
+	void						draw					(const rr::RenderState&				renderState,
+														 const rr::PrimitiveType			primitive,
+														 const std::vector<Vertex4Tex4>&	vertexBuffer);
+
+	tcu::PixelBufferAccess		getAccess				(void);
+	const rr::ViewportState		getViewportState		(void) const;
+
+private:
+	rr::Renderer				m_renderer;
+
+	const int					m_surfaceWidth;
+	const int					m_surfaceHeight;
+	const int					m_numSamples;
+
+	const tcu::TextureFormat	m_colorFormat;
+	const tcu::TextureFormat	m_depthStencilFormat;
+
+	tcu::TextureLevel			m_colorBuffer;
+	tcu::TextureLevel			m_resolveColorBuffer;
+	tcu::TextureLevel			m_depthStencilBuffer;
+
+	rr::RenderTarget*			m_renderTarget;
+	const rr::Program*			m_program;
+};
+
+rr::TestFunc					mapVkCompareOp				(vk::VkCompareOp compareFunc);
+rr::PrimitiveType				mapVkPrimitiveTopology		(vk::VkPrimitiveTopology primitiveTopology);
+rr::BlendFunc					mapVkBlendFactor			(vk::VkBlendFactor blendFactor);
+rr::BlendEquation				mapVkBlendOp				(vk::VkBlendOp blendOp);
+tcu::BVec4						mapVkColorComponentFlags	(vk::VkColorComponentFlags flags);
+rr::StencilOp					mapVkStencilOp				(vk::VkStencilOp stencilOp);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEREFERENCERENDERER_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.cpp
new file mode 100644
index 0000000..de60cef
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.cpp
@@ -0,0 +1,924 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Sampler Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineSamplerTests.hpp"
+#include "vktPipelineImageSamplingInstance.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkPrograms.hpp"
+#include "tcuPlatform.hpp"
+#include "tcuTextureUtil.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+
+#include <iomanip>
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+using de::MovePtr;
+
+namespace
+{
+
+class SamplerTest : public vkt::TestCase
+{
+public:
+										SamplerTest				(tcu::TestContext&	testContext,
+																 const char*		name,
+																 const char*		description,
+																 VkImageViewType	imageViewType,
+																 VkFormat			imageFormat,
+																 int				imageSize,
+																 float				samplerLod);
+	virtual								~SamplerTest			(void) {}
+
+	virtual void						initPrograms			(SourceCollections& sourceCollections) const;
+	virtual TestInstance*				createInstance			(Context& context) const;
+	virtual tcu::UVec2					getRenderSize			(VkImageViewType viewType) const;
+	virtual std::vector<Vertex4Tex4>	createVertices			(void) const;
+	virtual VkSamplerCreateInfo			getSamplerCreateInfo	(void) const;
+
+	static std::string					getGlslSamplerType		(const tcu::TextureFormat& format, VkImageViewType type);
+	static tcu::IVec3					getImageSize			(VkImageViewType viewType, int size);
+	static int							getArraySize			(VkImageViewType viewType);
+
+protected:
+	VkImageViewType						m_imageViewType;
+	VkFormat							m_imageFormat;
+	int									m_imageSize;
+	VkImageViewCreateInfo				m_imageViewParams;
+	VkSamplerCreateInfo					m_samplerParams;
+	float								m_samplerLod;
+};
+
+class SamplerMagFilterTest : public SamplerTest
+{
+public:
+									SamplerMagFilterTest	(tcu::TestContext&	testContext,
+															 const char*		name,
+															 const char*		description,
+															 VkImageViewType	imageViewType,
+															 VkFormat			imageFormat,
+															 VkFilter			magFilter);
+	virtual							~SamplerMagFilterTest	(void) {}
+	virtual VkSamplerCreateInfo		getSamplerCreateInfo	(void) const;
+
+private:
+	VkFilter						m_magFilter;
+};
+
+class SamplerMinFilterTest : public SamplerTest
+{
+public:
+									SamplerMinFilterTest	(tcu::TestContext&	testContext,
+															 const char*		name,
+															 const char*		description,
+															 VkImageViewType	imageViewType,
+															 VkFormat			imageFormat,
+															 VkFilter			minFilter);
+	virtual							~SamplerMinFilterTest	(void) {}
+	virtual VkSamplerCreateInfo		getSamplerCreateInfo	(void) const;
+
+private:
+	VkFilter						m_minFilter;
+};
+
+class SamplerLodTest : public SamplerTest
+{
+public:
+									SamplerLodTest			(tcu::TestContext&		testContext,
+															 const char*			name,
+															 const char*			description,
+															 VkImageViewType		imageViewType,
+															 VkFormat				imageFormat,
+															 VkSamplerMipmapMode	mipmapMode,
+															 float					minLod,
+															 float					maxLod,
+															 float					mipLodBias,
+															 float					samplerLod);
+	virtual							~SamplerLodTest			(void) {}
+	virtual VkSamplerCreateInfo		getSamplerCreateInfo	(void) const;
+
+private:
+	VkSamplerMipmapMode				m_mipmapMode;
+	float							m_minLod;
+	float							m_maxLod;
+	float							m_mipLodBias;
+};
+
+class SamplerAddressModesTest : public SamplerTest
+{
+public:
+										SamplerAddressModesTest		(tcu::TestContext&		testContext,
+																	 const char*			name,
+																	 const char*			description,
+																	 VkImageViewType		imageViewType,
+																	 VkFormat				imageFormat,
+																	 VkSamplerAddressMode	addressU,
+																	 VkSamplerAddressMode	addressV,
+																	 VkSamplerAddressMode	addressW,
+																	 VkBorderColor			borderColor);
+	virtual								~SamplerAddressModesTest	(void) {}
+	virtual tcu::UVec2					getRenderSize				(VkImageViewType viewType) const;
+	virtual std::vector<Vertex4Tex4>	createVertices				(void) const;
+	virtual VkSamplerCreateInfo			getSamplerCreateInfo		(void) const;
+
+private:
+	VkSamplerAddressMode				m_addressU;
+	VkSamplerAddressMode				m_addressV;
+	VkSamplerAddressMode				m_addressW;
+	VkBorderColor						m_borderColor;
+};
+
+
+// SamplerTest
+
+SamplerTest::SamplerTest (tcu::TestContext&	testContext,
+						  const char*		name,
+						  const char*		description,
+						  VkImageViewType	imageViewType,
+						  VkFormat			imageFormat,
+						  int				imageSize,
+						  float				samplerLod)
+	: vkt::TestCase		(testContext, name, description)
+	, m_imageViewType	(imageViewType)
+	, m_imageFormat		(imageFormat)
+	, m_imageSize		(imageSize)
+	, m_samplerLod		(samplerLod)
+{
+}
+
+void SamplerTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	std::ostringstream				vertexSrc;
+	std::ostringstream				fragmentSrc;
+	const char*						texCoordSwizzle	= DE_NULL;
+	tcu::TextureFormat				format			= (isCompressedFormat(m_imageFormat)) ? tcu::getUncompressedFormat(mapVkCompressedFormat(m_imageFormat))
+																						  : mapVkFormat(m_imageFormat);
+
+	// \note We don't want to perform normalization on any compressed formats.
+	//		 In case of non-sRGB LDR ASTC it would lead to lack of coverage
+	//		 as uncompressed format for that is f16 but values will be in range
+	//		 0..1 already.
+	const tcu::TextureFormatInfo	formatInfo		= (!isCompressedFormat(m_imageFormat) ? tcu::getTextureFormatInfo(format)
+																						  : tcu::getTextureFormatInfo(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)));
+
+	switch (m_imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			texCoordSwizzle = "x";
+			break;
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D:
+			texCoordSwizzle = "xy";
+			break;
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_3D:
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			texCoordSwizzle = "xyz";
+			break;
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			texCoordSwizzle = "xyzw";
+			break;
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	vertexSrc << "#version 440\n"
+			  << "layout(location = 0) in vec4 position;\n"
+			  << "layout(location = 1) in vec4 texCoords;\n"
+			  << "layout(location = 0) out highp vec4 vtxTexCoords;\n"
+			  << "out gl_PerVertex {\n"
+			  << "	vec4 gl_Position;\n"
+			  << "};\n"
+			  << "void main (void)\n"
+			  << "{\n"
+			  << "	gl_Position = position;\n"
+			  << "	vtxTexCoords = texCoords;\n"
+			  << "}\n";
+
+	fragmentSrc << "#version 440\n"
+				<< "layout(set = 0, binding = 0) uniform highp " << getGlslSamplerType(format, m_imageViewType) << " texSampler;\n"
+				<< "layout(location = 0) in highp vec4 vtxTexCoords;\n"
+				<< "layout(location = 0) out highp vec4 fragColor;\n"
+				<< "void main (void)\n"
+				<< "{\n"
+				<< "	fragColor = ";
+
+	if (m_samplerLod > 0.0f)
+		fragmentSrc << "textureLod(texSampler, vtxTexCoords." << texCoordSwizzle << ", " << std::fixed <<  m_samplerLod << ")";
+	else
+		fragmentSrc << "texture(texSampler, vtxTexCoords." << texCoordSwizzle << ")" << std::fixed;
+
+	fragmentSrc << " * vec4" << std::scientific << formatInfo.lookupScale << " + vec4" << formatInfo.lookupBias << ";\n"
+				<< "}\n";
+
+	sourceCollections.glslSources.add("tex_vert") << glu::VertexSource(vertexSrc.str());
+	sourceCollections.glslSources.add("tex_frag") << glu::FragmentSource(fragmentSrc.str());
+}
+
+TestInstance* SamplerTest::createInstance (Context& context) const
+{
+	const tcu::UVec2				renderSize			= getRenderSize(m_imageViewType);
+	const std::vector<Vertex4Tex4>	vertices			= createVertices();
+	const VkSamplerCreateInfo		samplerParams		= getSamplerCreateInfo();
+	const VkComponentMapping		componentMapping	= getFormatComponentMapping(m_imageFormat);
+	const VkImageSubresourceRange	subresourceRange	=
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,								// VkImageAspectFlags	aspectMask;
+		0u,														// deUint32				baseMipLevel;
+		(deUint32)deLog2Floor32(m_imageSize) + 1,				// deUint32				mipLevels;
+		0u,														// deUint32				baseArrayLayer;
+		(deUint32)SamplerTest::getArraySize(m_imageViewType)	// deUint32				arraySize;
+	};
+
+
+
+	return new ImageSamplingInstance(context, renderSize, m_imageViewType, m_imageFormat,
+									 getImageSize(m_imageViewType, m_imageSize),
+									 getArraySize(m_imageViewType),
+									 componentMapping, subresourceRange,
+									 samplerParams, m_samplerLod,vertices);
+}
+
+tcu::UVec2 SamplerTest::getRenderSize (VkImageViewType viewType) const
+{
+	if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_2D)
+	{
+		return tcu::UVec2(16u, 16u);
+	}
+	else
+	{
+		return tcu::UVec2(16u * 3u, 16u * 2u);
+	}
+}
+
+std::vector<Vertex4Tex4> SamplerTest::createVertices (void) const
+{
+	std::vector<Vertex4Tex4> vertices = createTestQuadMosaic(m_imageViewType);
+	// Adjust texture coordinate to avoid doing NEAREST filtering exactly on texel boundaries.
+	// TODO: Would be nice to base this on number of texels and subtexel precision. But this
+	// seems to work.
+	for (unsigned int i = 0; i < vertices.size(); ++i) {
+		vertices[i].texCoord += tcu::Vec4(0.001f, 0.001f, 0.001f, 0.0f);
+	}
+	return vertices;
+}
+
+VkSamplerCreateInfo SamplerTest::getSamplerCreateInfo (void) const
+{
+	const VkSamplerCreateInfo defaultSamplerParams =
+	{
+		VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,									// VkStructureType			sType;
+		DE_NULL,																// const void*				pNext;
+		0u,																		// VkSamplerCreateFlags		flags;
+		VK_FILTER_NEAREST,														// VkFilter					magFilter;
+		VK_FILTER_NEAREST,														// VkFilter					minFilter;
+		VK_SAMPLER_MIPMAP_MODE_NEAREST,											// VkSamplerMipmapMode		mipmapMode;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeU;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeV;
+		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,									// VkSamplerAddressMode		addressModeW;
+		0.0f,																	// float					mipLodBias;
+		VK_FALSE,																// VkBool32					anisotropyEnable;
+		1.0f,																	// float					maxAnisotropy;
+		false,																	// VkBool32					compareEnable;
+		VK_COMPARE_OP_NEVER,													// VkCompareOp				compareOp;
+		0.0f,																	// float					minLod;
+		0.25f,																	// float					maxLod;
+		getFormatBorderColor(BORDER_COLOR_TRANSPARENT_BLACK, m_imageFormat),	// VkBorderColor			borderColor;
+		false																	// VkBool32					unnormalizedCoordinates;
+	};
+
+	return defaultSamplerParams;
+}
+
+std::string SamplerTest::getGlslSamplerType (const tcu::TextureFormat& format, VkImageViewType type)
+{
+	std::ostringstream samplerType;
+
+	if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+		samplerType << "u";
+	else if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+		samplerType << "i";
+
+	switch (type)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+			samplerType << "sampler1D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			samplerType << "sampler1DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D:
+			samplerType << "sampler2D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			samplerType << "sampler2DArray";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			samplerType << "sampler3D";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			samplerType << "samplerCube";
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			samplerType << "samplerCubeArray";
+			break;
+
+		default:
+			DE_FATAL("Unknown image view type");
+			break;
+	}
+
+	return samplerType.str();
+}
+
+tcu::IVec3 SamplerTest::getImageSize (VkImageViewType viewType, int size)
+{
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D:
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			return tcu::IVec3(size, 1, 1);
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			return tcu::IVec3(size, size, 4);
+
+		default:
+			break;
+	}
+
+	return tcu::IVec3(size, size, 1);
+}
+
+int SamplerTest::getArraySize (VkImageViewType viewType)
+{
+	switch (viewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+			return 6;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			return 36;
+
+		default:
+			break;
+	}
+
+	return 1;
+}
+
+
+// SamplerMagFilterTest
+
+SamplerMagFilterTest::SamplerMagFilterTest (tcu::TestContext&	testContext,
+											const char*			name,
+											const char*			description,
+											VkImageViewType		imageViewType,
+											VkFormat			imageFormat,
+											VkFilter			magFilter)
+	: SamplerTest	(testContext, name, description, imageViewType, imageFormat, 8, 0.0f)
+	, m_magFilter	(magFilter)
+{
+}
+
+VkSamplerCreateInfo SamplerMagFilterTest::getSamplerCreateInfo (void) const
+{
+	VkSamplerCreateInfo samplerParams = SamplerTest::getSamplerCreateInfo();
+	samplerParams.magFilter = m_magFilter;
+
+	return samplerParams;
+}
+
+
+// SamplerMinFilterTest
+
+SamplerMinFilterTest::SamplerMinFilterTest (tcu::TestContext&	testContext,
+											const char*			name,
+											const char*			description,
+											VkImageViewType		imageViewType,
+											VkFormat			imageFormat,
+											VkFilter			minFilter)
+	: SamplerTest	(testContext, name, description, imageViewType, imageFormat, 32, 0.0f)
+	, m_minFilter	(minFilter)
+{
+}
+
+VkSamplerCreateInfo SamplerMinFilterTest::getSamplerCreateInfo (void) const
+{
+	VkSamplerCreateInfo samplerParams = SamplerTest::getSamplerCreateInfo();
+	samplerParams.minFilter = m_minFilter;
+	// set minLod to epsilon, to force use of the minFilter
+	samplerParams.minLod = 0.01f;
+
+	return samplerParams;
+}
+
+
+// SamplerLodTest
+
+SamplerLodTest::SamplerLodTest (tcu::TestContext&	testContext,
+								const char*			name,
+								const char*			description,
+								VkImageViewType		imageViewType,
+								VkFormat			imageFormat,
+								VkSamplerMipmapMode	mipmapMode,
+								float				minLod,
+								float				maxLod,
+								float				mipLodBias,
+								float				samplerLod)
+	: SamplerTest	(testContext, name, description, imageViewType, imageFormat, 32, samplerLod)
+	, m_mipmapMode	(mipmapMode)
+	, m_minLod		(minLod)
+	, m_maxLod		(maxLod)
+	, m_mipLodBias	(mipLodBias)
+{
+}
+
+VkSamplerCreateInfo SamplerLodTest::getSamplerCreateInfo (void) const
+{
+	VkSamplerCreateInfo samplerParams = SamplerTest::getSamplerCreateInfo();
+
+	samplerParams.mipmapMode	= m_mipmapMode;
+	samplerParams.minLod		= m_minLod;
+	samplerParams.maxLod		= m_maxLod;
+	samplerParams.mipLodBias	= m_mipLodBias;
+
+	return samplerParams;
+}
+
+
+// SamplerAddressModesTest
+
+SamplerAddressModesTest::SamplerAddressModesTest (tcu::TestContext&		testContext,
+												  const char*			name,
+												  const char*			description,
+												  VkImageViewType		imageViewType,
+												  VkFormat				imageFormat,
+												  VkSamplerAddressMode	addressU,
+												  VkSamplerAddressMode	addressV,
+												  VkSamplerAddressMode	addressW,
+												  VkBorderColor			borderColor)
+	: SamplerTest	(testContext, name, description, imageViewType, imageFormat, 8, 0.0f)
+	, m_addressU	(addressU)
+	, m_addressV	(addressV)
+	, m_addressW	(addressW)
+	, m_borderColor	(borderColor)
+{
+}
+
+tcu::UVec2 SamplerAddressModesTest::getRenderSize (VkImageViewType viewType) const
+{
+	return 4u * SamplerTest::getRenderSize(viewType);
+}
+
+std::vector<Vertex4Tex4> SamplerAddressModesTest::createVertices (void) const
+{
+	std::vector<Vertex4Tex4> vertices = SamplerTest::createVertices();
+
+	switch (m_imageViewType)
+	{
+		case VK_IMAGE_VIEW_TYPE_1D: case VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			for (size_t vertexNdx = 0; vertexNdx < vertices.size(); vertexNdx++)
+				vertices[vertexNdx].texCoord.x() = (vertices[vertexNdx].texCoord.x() - 0.5f) * 4.0f;
+
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_2D:
+		case VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			for (size_t vertexNdx = 0; vertexNdx < vertices.size(); vertexNdx++)
+				vertices[vertexNdx].texCoord.xy() = (vertices[vertexNdx].texCoord.swizzle(0, 1) - tcu::Vec2(0.5f)) * 4.0f;
+
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_3D:
+			for (size_t vertexNdx = 0; vertexNdx < vertices.size(); vertexNdx++)
+				vertices[vertexNdx].texCoord.xyz() = (vertices[vertexNdx].texCoord.swizzle(0, 1, 2) - tcu::Vec3(0.5f)) * 4.0f;
+
+			break;
+
+		case VK_IMAGE_VIEW_TYPE_CUBE:
+		case VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+
+	return vertices;
+}
+
+VkSamplerCreateInfo SamplerAddressModesTest::getSamplerCreateInfo (void) const
+{
+	VkSamplerCreateInfo samplerParams = SamplerTest::getSamplerCreateInfo();
+	samplerParams.addressModeU	= m_addressU;
+	samplerParams.addressModeV	= m_addressV;
+	samplerParams.addressModeW	= m_addressW;
+	samplerParams.borderColor	= m_borderColor;
+
+	return samplerParams;
+}
+
+
+// Utilities to create test nodes
+
+std::string getFormatCaseName (const VkFormat format)
+{
+	const std::string fullName = getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+MovePtr<tcu::TestCaseGroup> createSamplerMagFilterTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat)
+{
+	MovePtr<tcu::TestCaseGroup> samplerMagFilterTests (new tcu::TestCaseGroup(testCtx, "mag_filter", "Tests for magnification filter"));
+
+	if (isCompressedFormat(imageFormat) || (!isIntFormat(imageFormat) && !isUintFormat(imageFormat)))
+		samplerMagFilterTests->addChild(new SamplerMagFilterTest(testCtx, "linear", "Magnifies image using VK_TEX_FILTER_LINEAR", imageViewType, imageFormat, VK_FILTER_LINEAR));
+	samplerMagFilterTests->addChild(new SamplerMagFilterTest(testCtx, "nearest", "Magnifies image using VK_TEX_FILTER_NEAREST", imageViewType, imageFormat, VK_FILTER_NEAREST));
+
+	return samplerMagFilterTests;
+}
+
+MovePtr<tcu::TestCaseGroup> createSamplerMinFilterTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat)
+{
+	MovePtr<tcu::TestCaseGroup> samplerMinFilterTests (new tcu::TestCaseGroup(testCtx, "min_filter", "Tests for minification filter"));
+
+	if (isCompressedFormat(imageFormat) || (!isIntFormat(imageFormat) && !isUintFormat(imageFormat)))
+		samplerMinFilterTests->addChild(new SamplerMinFilterTest(testCtx, "linear", "Minifies image using VK_TEX_FILTER_LINEAR", imageViewType, imageFormat, VK_FILTER_LINEAR));
+	samplerMinFilterTests->addChild(new SamplerMinFilterTest(testCtx, "nearest", "Minifies image using VK_TEX_FILTER_NEAREST", imageViewType, imageFormat, VK_FILTER_NEAREST));
+
+	return samplerMinFilterTests;
+}
+
+MovePtr<tcu::TestCaseGroup> createSamplerLodTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat, VkSamplerMipmapMode mipmapMode)
+{
+	struct TestCaseConfig
+	{
+		const char*	name;
+		const char*	description;
+		float		minLod;
+		float		maxLod;
+		float		mipLodBias;
+		float		lod;
+	};
+
+	TestCaseConfig testCaseConfigs [] =
+	{
+		{ "equal_min_3_max_3",		"minLod = 3, maxLod = 3, mipLodBias = 0, lod = 0",		3.0f, 3.0f, 0.0f, 0.0f },
+		{ "select_min_1",			"minLod = 1, maxLod = 5, mipLodBias = 0, lod = 0",		1.0f, 5.0f, 0.0f, 0.0f },
+		{ "select_max_4",			"minLod = 0, maxLod = 4, mipLodBias = 0, lod = 5",		0.0f, 4.0f, 0.0f, 5.0f },
+		{ "select_bias_2_1",		"minLod = 0, maxLod = 2.1, mipLodBias = 5.0, lod = 0",	0.0f, 2.1f, 5.0f, 0.0f },
+		{ "select_bias_2_5",		"minLod = 0, maxLod = 5, mipLodBias = 2.5, lod = 0",	0.0f, 5.0f, 2.5f, 0.00001f },
+		{ "select_bias_3_1",		"minLod = 0, maxLod = 5, mipLodBias = -0.9, lod = 4.0",	0.0f, 5.0f, -0.9f, 4.0f },
+		{ "select_bias_3_7",		"minLod = 0, maxLod = 5, mipLodBias = 3.0, lod = 0.7",	0.0f, 5.0f, 3.0f, 0.7f },
+	};
+
+	MovePtr<tcu::TestCaseGroup> samplerLodTests (new tcu::TestCaseGroup(testCtx, "lod", "Tests for sampler LOD"));
+
+	for (int configNdx = 0; configNdx < DE_LENGTH_OF_ARRAY(testCaseConfigs); configNdx++)
+	{
+		const TestCaseConfig& config = testCaseConfigs[configNdx];
+
+		samplerLodTests->addChild(new SamplerLodTest(testCtx, config.name, config.description, imageViewType, imageFormat, mipmapMode, config.minLod, config.maxLod, config.mipLodBias, config.lod));
+	}
+
+	return samplerLodTests;
+}
+
+MovePtr<tcu::TestCaseGroup> createSamplerMipmapTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat)
+{
+	MovePtr<tcu::TestCaseGroup> samplerMipmapTests (new tcu::TestCaseGroup(testCtx, "mipmap", "Tests for mipmap modes"));
+
+	// Mipmap mode: nearest
+	MovePtr<tcu::TestCaseGroup> mipmapNearestTests (new tcu::TestCaseGroup(testCtx, "nearest", "Uses VK_TEX_MIPMAP_MODE_NEAREST"));
+	mipmapNearestTests->addChild(createSamplerLodTests(testCtx, imageViewType, imageFormat, VK_SAMPLER_MIPMAP_MODE_NEAREST).release());
+	samplerMipmapTests->addChild(mipmapNearestTests.release());
+
+	// Mipmap mode: linear
+	if (isCompressedFormat(imageFormat) || (!isIntFormat(imageFormat) && !isUintFormat(imageFormat)))
+	{
+		MovePtr<tcu::TestCaseGroup> mipmapLinearTests(new tcu::TestCaseGroup(testCtx, "linear", "Uses VK_TEX_MIPMAP_MODE_LINEAR"));
+		mipmapLinearTests->addChild(createSamplerLodTests(testCtx, imageViewType, imageFormat, VK_SAMPLER_MIPMAP_MODE_LINEAR).release());
+		samplerMipmapTests->addChild(mipmapLinearTests.release());
+	}
+
+	return samplerMipmapTests;
+}
+
+std::string getAddressModesCaseName (VkSamplerAddressMode u, VkSamplerAddressMode v, VkSamplerAddressMode w, BorderColor border)
+{
+	static const char* borderColorNames[BORDER_COLOR_COUNT] =
+	{
+		"opaque_black",
+		"opaque_white",
+		"transparent_black",
+	};
+
+	std::ostringstream caseName;
+
+	if (u == v && v == w)
+	{
+		const std::string fullName = getSamplerAddressModeName(u);
+		DE_ASSERT(de::beginsWith(fullName, "VK_SAMPLER_ADDRESS_"));
+
+		caseName << "all_";
+		caseName << de::toLower(fullName.substr(19));
+
+		if (u == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER)
+		{
+			caseName << "_" << borderColorNames[border];
+		}
+	}
+	else
+	{
+		const std::string fullNameU = getSamplerAddressModeName(u);
+		const std::string fullNameV = getSamplerAddressModeName(v);
+		const std::string fullNameW = getSamplerAddressModeName(w);
+
+		DE_ASSERT(de::beginsWith(fullNameU, "VK_SAMPLER_ADDRESS_"));
+		DE_ASSERT(de::beginsWith(fullNameV, "VK_SAMPLER_ADDRESS_"));
+		DE_ASSERT(de::beginsWith(fullNameW, "VK_SAMPLER_ADDRESS_"));
+
+		caseName << "uvw"
+				 << "_" << de::toLower(fullNameU.substr(19))
+				 << "_" << de::toLower(fullNameV.substr(19))
+				 << "_" << de::toLower(fullNameW.substr(19));
+	}
+
+	return caseName.str();
+}
+
+MovePtr<tcu::TestCaseGroup> createSamplerAddressModesTests (tcu::TestContext& testCtx, VkImageViewType imageViewType, VkFormat imageFormat)
+{
+	struct TestCaseConfig
+	{
+		VkSamplerAddressMode	u;
+		VkSamplerAddressMode	v;
+		VkSamplerAddressMode	w;
+		BorderColor				border;
+	};
+
+	const TestCaseConfig testCaseConfigs[] =
+	{
+		// All address modes equal
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_TRANSPARENT_BLACK },
+		{ VK_SAMPLER_ADDRESS_MODE_REPEAT,				VK_SAMPLER_ADDRESS_MODE_REPEAT,					VK_SAMPLER_ADDRESS_MODE_REPEAT,					BORDER_COLOR_TRANSPARENT_BLACK },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		BORDER_COLOR_TRANSPARENT_BLACK },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_TRANSPARENT_BLACK },
+
+		// All address modes equal using border color
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_TRANSPARENT_BLACK },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_BLACK },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_WHITE },
+
+		// Pairwise combinations of address modes not covered by previous tests
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_REPEAT,					BORDER_COLOR_OPAQUE_WHITE},
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_REPEAT,					VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_REPEAT,					BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_REPEAT,					VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_REPEAT,				VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_REPEAT,					VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_REPEAT,					BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_REPEAT,				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_REPEAT,				VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_REPEAT,				VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,		VK_SAMPLER_ADDRESS_MODE_REPEAT,					BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_REPEAT,					VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,			BORDER_COLOR_OPAQUE_WHITE },
+		{ VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,		VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,	VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,		BORDER_COLOR_OPAQUE_WHITE },
+	};
+
+	MovePtr<tcu::TestCaseGroup> samplerAddressModesTests (new tcu::TestCaseGroup(testCtx, "address_modes", "Tests for address modes"));
+
+	for (int configNdx = 0; configNdx < DE_LENGTH_OF_ARRAY(testCaseConfigs); configNdx++)
+	{
+		const TestCaseConfig& config = testCaseConfigs[configNdx];
+
+		samplerAddressModesTests->addChild(new SamplerAddressModesTest(testCtx,
+																	   getAddressModesCaseName(config.u, config.v, config.w, config.border).c_str(),
+																	   "",
+																	   imageViewType,
+																	   imageFormat,
+																	   config.u, config.v, config.w,
+																	   getFormatBorderColor(config.border, imageFormat)));
+	}
+
+	return samplerAddressModesTests;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createSamplerTests (tcu::TestContext& testCtx)
+{
+	const struct
+	{
+		VkImageViewType		type;
+		const char*			name;
+	}
+	imageViewTypes[] =
+	{
+		{ VK_IMAGE_VIEW_TYPE_1D,			"1d" },
+		{ VK_IMAGE_VIEW_TYPE_1D_ARRAY,		"1d_array" },
+		{ VK_IMAGE_VIEW_TYPE_2D,			"2d" },
+		{ VK_IMAGE_VIEW_TYPE_2D_ARRAY,		"2d_array" },
+		{ VK_IMAGE_VIEW_TYPE_3D,			"3d" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE,			"cube" },
+		{ VK_IMAGE_VIEW_TYPE_CUBE_ARRAY,	"cube_array" }
+	};
+
+	const VkFormat formats[] =
+	{
+		// Packed formats
+		VK_FORMAT_R4G4_UNORM_PACK8,
+		VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+		VK_FORMAT_R5G6B5_UNORM_PACK16,
+		VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+		VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+		VK_FORMAT_A2R10G10B10_UINT_PACK32,
+		VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+		VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+		VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+		VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+
+		// Pairwise combinations of 8-bit channel formats, UNORM/SNORM/SINT/UINT/SRGB type x 1-to-4 channels x RGBA/BGRA order
+		VK_FORMAT_R8_SRGB,
+		VK_FORMAT_R8G8B8_UINT,
+		VK_FORMAT_B8G8R8A8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_B8G8R8_SNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_B8G8R8_SRGB,
+		VK_FORMAT_R8G8_SRGB,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_B8G8R8_SINT,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_B8G8R8_UNORM,
+		VK_FORMAT_R8_UNORM,
+
+		// Pairwise combinations of 16/32-bit channel formats x SINT/UINT/SFLOAT type x 1-to-4 channels
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R16G16B16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R16G16B16_SFLOAT,
+		VK_FORMAT_R16G16_SINT,
+
+		// Scaled formats
+		VK_FORMAT_R8G8B8A8_SSCALED,
+		VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+
+		// Compressed formats
+		VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+		VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+		VK_FORMAT_EAC_R11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11_SNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+		VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+		VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+		VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+		VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+		VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+		VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+		VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+		VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+		VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+	};
+
+	de::MovePtr<tcu::TestCaseGroup> samplerTests		(new tcu::TestCaseGroup(testCtx, "sampler", "Sampler tests"));
+	de::MovePtr<tcu::TestCaseGroup> viewTypeTests		(new tcu::TestCaseGroup(testCtx, "view_type", ""));
+
+	for (int viewTypeNdx = 0; viewTypeNdx < DE_LENGTH_OF_ARRAY(imageViewTypes); viewTypeNdx++)
+	{
+		const VkImageViewType			viewType		= imageViewTypes[viewTypeNdx].type;
+		de::MovePtr<tcu::TestCaseGroup>	viewTypeGroup	(new tcu::TestCaseGroup(testCtx, imageViewTypes[viewTypeNdx].name, (std::string("Uses a ") + imageViewTypes[viewTypeNdx].name + " view").c_str()));
+		de::MovePtr<tcu::TestCaseGroup>	formatTests		(new tcu::TestCaseGroup(testCtx, "format", "Tests samplable formats"));
+
+		for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(formats); formatNdx++)
+		{
+			const VkFormat	format			= formats[formatNdx];
+			const bool		isCompressed	= isCompressedFormat(format);
+
+			if (isCompressed)
+			{
+				// Do not use compressed formats with 1D and 1D array textures.
+				if (viewType == VK_IMAGE_VIEW_TYPE_1D || viewType == VK_IMAGE_VIEW_TYPE_1D_ARRAY)
+					break;
+			}
+
+			de::MovePtr<tcu::TestCaseGroup>	formatGroup	(new tcu::TestCaseGroup(testCtx,
+																				getFormatCaseName(format).c_str(),
+																				(std::string("Samples a texture of format ") + getFormatName(format)).c_str()));
+
+			if (!isCompressed)
+			{
+				// Do not include minFilter tests with compressed formats.
+				// Randomly generated compressed textures are too noisy and will derive in false positives.
+				de::MovePtr<tcu::TestCaseGroup>	minFilterTests		= createSamplerMinFilterTests(testCtx, viewType, format);
+				formatGroup->addChild(minFilterTests.release());
+			}
+
+			de::MovePtr<tcu::TestCaseGroup>	magFilterTests		= createSamplerMagFilterTests(testCtx, viewType, format);
+			de::MovePtr<tcu::TestCaseGroup>	mipmapTests			= createSamplerMipmapTests(testCtx, viewType, format);
+
+			formatGroup->addChild(magFilterTests.release());
+			formatGroup->addChild(mipmapTests.release());
+
+			if (viewType != VK_IMAGE_VIEW_TYPE_CUBE && viewType != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY)
+			{
+				de::MovePtr<tcu::TestCaseGroup>	addressModesTests	= createSamplerAddressModesTests(testCtx, viewType, format);
+				formatGroup->addChild(addressModesTests.release());
+			}
+
+			formatTests->addChild(formatGroup.release());
+		}
+
+		viewTypeGroup->addChild(formatTests.release());
+		viewTypeTests->addChild(viewTypeGroup.release());
+	}
+
+	samplerTests->addChild(viewTypeTests.release());
+
+	return samplerTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.hpp
new file mode 100644
index 0000000..223bce2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineSamplerTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINESAMPLERTESTS_HPP
+#define _VKTPIPELINESAMPLERTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Sampler Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createSamplerTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINESAMPLERTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.cpp
new file mode 100644
index 0000000..e8ff69a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.cpp
@@ -0,0 +1,1150 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Stencil Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineStencilTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktPipelineUniqueRandomIterator.hpp"
+#include "vktTestCase.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deMemory.h"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include <algorithm>
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+bool isSupportedDepthStencilFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	VkFormatProperties formatProps;
+
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0;
+}
+
+class StencilOpStateUniqueRandomIterator : public UniqueRandomIterator<VkStencilOpState>
+{
+public:
+								StencilOpStateUniqueRandomIterator	(int seed);
+	virtual						~StencilOpStateUniqueRandomIterator	(void) {}
+	virtual VkStencilOpState	getIndexedValue (deUint32 index);
+
+private:
+	const static VkStencilOp	m_stencilOps[];
+	const static VkCompareOp	m_compareOps[];
+
+	// Pre-calculated constants
+	const static deUint32		m_stencilOpsLength;
+	const static deUint32		m_stencilOpsLength2;
+	const static deUint32		m_stencilOpsLength3;
+	const static deUint32		m_compareOpsLength;
+
+	// Total number of cross-combinations of (stencilFailOp x stencilPassOp x stencilDepthFailOp x stencilCompareOp)
+	const static deUint32		m_totalStencilOpStates;
+};
+
+
+class StencilTest : public vkt::TestCase
+{
+public:
+	enum
+	{
+		QUAD_COUNT = 4
+	};
+
+	struct StencilStateConfig
+	{
+		deUint32	frontReadMask;
+		deUint32	frontWriteMask;
+		deUint32	frontRef;
+
+		deUint32	backReadMask;
+		deUint32	backWriteMask;
+		deUint32	backRef;
+	};
+
+	const static StencilStateConfig			s_stencilStateConfigs[QUAD_COUNT];
+	const static float						s_quadDepths[QUAD_COUNT];
+
+
+											StencilTest				(tcu::TestContext&			testContext,
+																	 const std::string&			name,
+																	 const std::string&			description,
+																	 VkFormat					stencilFormat,
+																	 const VkStencilOpState&	stencilOpStateFront,
+																	 const VkStencilOpState&	stencilOpStateBack);
+	virtual									~StencilTest			(void);
+	virtual void							initPrograms			(SourceCollections& sourceCollections) const;
+	virtual TestInstance*					createInstance			(Context& context) const;
+
+private:
+	VkFormat								m_stencilFormat;
+	const VkStencilOpState					m_stencilOpStateFront;
+	const VkStencilOpState					m_stencilOpStateBack;
+};
+
+class StencilTestInstance : public vkt::TestInstance
+{
+public:
+										StencilTestInstance		(Context&					context,
+																 VkFormat					stencilFormat,
+																 const VkStencilOpState&	stencilOpStatesFront,
+																 const VkStencilOpState&	stencilOpStatesBack);
+	virtual								~StencilTestInstance	(void);
+	virtual tcu::TestStatus				iterate					(void);
+
+private:
+	tcu::TestStatus						verifyImage				(void);
+
+	VkStencilOpState					m_stencilOpStateFront;
+	VkStencilOpState					m_stencilOpStateBack;
+	const tcu::UVec2					m_renderSize;
+	const VkFormat						m_colorFormat;
+	const VkFormat						m_stencilFormat;
+
+	VkImageCreateInfo					m_colorImageCreateInfo;
+	Move<VkImage>						m_colorImage;
+	de::MovePtr<Allocation>				m_colorImageAlloc;
+	Move<VkImage>						m_stencilImage;
+	de::MovePtr<Allocation>				m_stencilImageAlloc;
+	Move<VkImageView>					m_colorAttachmentView;
+	Move<VkImageView>					m_stencilAttachmentView;
+	Move<VkRenderPass>					m_renderPass;
+	Move<VkFramebuffer>					m_framebuffer;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	std::vector<Vertex4RGBA>			m_vertices;
+	de::MovePtr<Allocation>				m_vertexBufferAlloc;
+
+	Move<VkPipelineLayout>				m_pipelineLayout;
+	Move<VkPipeline>					m_graphicsPipelines[StencilTest::QUAD_COUNT];
+
+	Move<VkCommandPool>					m_cmdPool;
+	Move<VkCommandBuffer>				m_cmdBuffer;
+
+	Move<VkFence>						m_fence;
+};
+
+
+// StencilOpStateUniqueRandomIterator
+
+const VkStencilOp StencilOpStateUniqueRandomIterator::m_stencilOps[] =
+{
+	VK_STENCIL_OP_KEEP,
+	VK_STENCIL_OP_ZERO,
+	VK_STENCIL_OP_REPLACE,
+	VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+	VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+	VK_STENCIL_OP_INVERT,
+	VK_STENCIL_OP_INCREMENT_AND_WRAP,
+	VK_STENCIL_OP_DECREMENT_AND_WRAP
+};
+
+const VkCompareOp StencilOpStateUniqueRandomIterator::m_compareOps[] =
+{
+	VK_COMPARE_OP_NEVER,
+	VK_COMPARE_OP_LESS,
+	VK_COMPARE_OP_EQUAL,
+	VK_COMPARE_OP_LESS_OR_EQUAL,
+	VK_COMPARE_OP_GREATER,
+	VK_COMPARE_OP_NOT_EQUAL,
+	VK_COMPARE_OP_GREATER_OR_EQUAL,
+	VK_COMPARE_OP_ALWAYS
+};
+
+const deUint32 StencilOpStateUniqueRandomIterator::m_stencilOpsLength		= DE_LENGTH_OF_ARRAY(m_stencilOps);
+const deUint32 StencilOpStateUniqueRandomIterator::m_stencilOpsLength2		= m_stencilOpsLength * m_stencilOpsLength;
+const deUint32 StencilOpStateUniqueRandomIterator::m_stencilOpsLength3		= m_stencilOpsLength2 * m_stencilOpsLength;
+const deUint32 StencilOpStateUniqueRandomIterator::m_compareOpsLength		= DE_LENGTH_OF_ARRAY(m_compareOps);
+const deUint32 StencilOpStateUniqueRandomIterator::m_totalStencilOpStates	= m_stencilOpsLength3 * m_compareOpsLength;
+
+StencilOpStateUniqueRandomIterator::StencilOpStateUniqueRandomIterator (int seed)
+	: UniqueRandomIterator<VkStencilOpState>(m_totalStencilOpStates, m_totalStencilOpStates, seed)
+{
+}
+
+VkStencilOpState StencilOpStateUniqueRandomIterator::getIndexedValue (deUint32 index)
+{
+	const deUint32 stencilCompareOpIndex = index / m_stencilOpsLength3;
+	const deUint32 stencilCompareOpSeqIndex = stencilCompareOpIndex * m_stencilOpsLength3;
+
+	const deUint32 stencilDepthFailOpIndex = (index - stencilCompareOpSeqIndex) / m_stencilOpsLength2;
+	const deUint32 stencilDepthFailOpSeqIndex = stencilDepthFailOpIndex * m_stencilOpsLength2;
+
+	const deUint32 stencilPassOpIndex = (index - stencilCompareOpSeqIndex - stencilDepthFailOpSeqIndex) / m_stencilOpsLength;
+	const deUint32 stencilPassOpSeqIndex = stencilPassOpIndex * m_stencilOpsLength;
+
+	const deUint32 stencilFailOpIndex = index - stencilCompareOpSeqIndex - stencilDepthFailOpSeqIndex - stencilPassOpSeqIndex;
+
+	const VkStencilOpState stencilOpState =
+	{
+		m_stencilOps[stencilFailOpIndex],		// VkStencilOp	failOp;
+		m_stencilOps[stencilPassOpIndex],		// VkStencilOp	passOp;
+		m_stencilOps[stencilDepthFailOpIndex],	// VkStencilOp	depthFailOp;
+		m_compareOps[stencilCompareOpIndex],	// VkCompareOp	compareOp;
+		0x0,									// deUint32		compareMask;
+		0x0,									// deUint32		writeMask;
+		0x0										// deUint32		reference;
+	};
+
+	return stencilOpState;
+}
+
+
+// StencilTest
+
+const StencilTest::StencilStateConfig StencilTest::s_stencilStateConfigs[QUAD_COUNT] =
+{
+	//	frontReadMask	frontWriteMask		frontRef		backReadMask	backWriteMask	backRef
+	{	0xFF,			0xFF,				0xAB,			0xF0,			0xFF,			0xFF	},
+	{	0xFF,			0xF0,				0xCD,			0xF0,			0xF0,			0xEF	},
+	{	0xF0,			0x0F,				0xEF,			0xFF,			0x0F,			0xCD	},
+	{	0xF0,			0x01,				0xFF,			0xFF,			0x01,			0xAB	}
+};
+
+const float StencilTest::s_quadDepths[QUAD_COUNT] =
+{
+	0.1f,
+	0.0f,
+	0.3f,
+	0.2f
+};
+
+StencilTest::StencilTest (tcu::TestContext&			testContext,
+						  const std::string&		name,
+						  const std::string&		description,
+						  VkFormat					stencilFormat,
+						  const VkStencilOpState&	stencilOpStateFront,
+						  const VkStencilOpState&	stencilOpStateBack)
+	: vkt::TestCase			(testContext, name, description)
+	, m_stencilFormat		(stencilFormat)
+	, m_stencilOpStateFront	(stencilOpStateFront)
+	, m_stencilOpStateBack	(stencilOpStateBack)
+{
+}
+
+StencilTest::~StencilTest (void)
+{
+}
+
+TestInstance* StencilTest::createInstance (Context& context) const
+{
+	return new StencilTestInstance(context, m_stencilFormat, m_stencilOpStateFront, m_stencilOpStateBack);
+}
+
+void StencilTest::initPrograms (SourceCollections& sourceCollections) const
+{
+	sourceCollections.glslSources.add("color_vert") << glu::VertexSource(
+		"#version 310 es\n"
+		"layout(location = 0) in vec4 position;\n"
+		"layout(location = 1) in vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	gl_Position = position;\n"
+		"	vtxColor = color;\n"
+		"}\n");
+
+	sourceCollections.glslSources.add("color_frag") << glu::FragmentSource(
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n");
+}
+
+
+// StencilTestInstance
+
+StencilTestInstance::StencilTestInstance (Context&					context,
+										  VkFormat					stencilFormat,
+										  const VkStencilOpState&	stencilOpStateFront,
+										  const VkStencilOpState&	stencilOpStateBack)
+	: vkt::TestInstance		(context)
+	, m_stencilOpStateFront	(stencilOpStateFront)
+	, m_stencilOpStateBack	(stencilOpStateBack)
+	, m_renderSize			(32, 32)
+	, m_colorFormat			(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_stencilFormat		(stencilFormat)
+{
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImageCreateInfo	= colorImageParams;
+		m_colorImage			= createImage(vk, vkDevice, &m_colorImageCreateInfo);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create stencil image
+	{
+		// Check format support
+		if (!isSupportedDepthStencilFormat(context.getInstanceInterface(), context.getPhysicalDevice(), m_stencilFormat))
+			throw tcu::NotSupportedError(std::string("Unsupported depth/stencil format: ") + getFormatName(m_stencilFormat));
+
+		const VkImageCreateInfo stencilImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,								// VkImageType				imageType;
+			m_stencilFormat,								// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },		// VkExtent3D				extent;
+			1u,												// deUint32					mipLevels;
+			1u,												// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,						// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,	// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,						// VkSharingMode			sharingMode;
+			1u,												// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,								// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED						// VkImageLayout			initialLayout;
+		};
+
+		m_stencilImage = createImage(vk, vkDevice, &stencilImageParams);
+
+		// Allocate and bind stencil image memory
+		m_stencilImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_stencilImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_stencilImage, m_stencilImageAlloc->getMemory(), m_stencilImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			componentMappingRGBA,								// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u }		// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create stencil attachment view
+	{
+		const VkImageViewCreateInfo stencilAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_stencilImage,									// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_stencilFormat,									// VkFormat					format;
+			componentMappingRGBA,								// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_STENCIL_BIT, 0u, 1u, 0u, 1u },	// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_stencilAttachmentView = createImageView(vk, vkDevice, &stencilAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags	flags;
+			m_colorFormat,										// VkFormat						format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits		samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp			storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp			stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				finalLayout;
+		};
+
+		const VkAttachmentDescription stencilAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags	flags;
+			m_stencilFormat,									// VkFormat						format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits		samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			loadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			storeOp;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			stencilStoreOp;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,	// VkImageLayout				initialLayout;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,	// VkImageLayout				finalLayout;
+		};
+
+		const VkAttachmentDescription attachments[2] =
+		{
+			colorAttachmentDescription,
+			stencilAttachmentDescription
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkAttachmentReference stencilAttachmentReference =
+		{
+			1u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL	// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			&stencilAttachmentReference,						// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,													// deUint32						preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			2u,													// deUint32							attachmentCount;
+			attachments,										// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachmentBindInfos[2] = { *m_colorAttachmentView, *m_stencilAttachmentView };
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkFramebufferCreateFlags	flags;
+			*m_renderPass,										// VkRenderPass				renderPass;
+			2u,													// deUint32					attachmentCount;
+			attachmentBindInfos,								// const VkImageView*		pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32					width;
+			(deUint32)m_renderSize.y(),							// deUint32					height;
+			1u													// deUint32					layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkPipelineLayoutCreateFlags	flags;
+			0u,													// deUint32						setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("color_frag"), 0);
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStages[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,										// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,									// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkVertexInputBindingDescription vertexInputBindingDescription =
+		{
+			0u,										// deUint32					binding;
+			sizeof(Vertex4RGBA),					// deUint32					strideInBytes;
+			VK_VERTEX_INPUT_RATE_VERTEX				// VkVertexInputStepRate	inputRate;
+		};
+
+		const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+		{
+			{
+				0u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				0u									// deUint32	offsetInBytes;
+			},
+			{
+				1u,									// deUint32	location;
+				0u,									// deUint32	binding;
+				VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+				DE_OFFSET_OF(Vertex4RGBA, color),	// deUint32	offsetInBytes;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			1u,																// deUint32									vertexBindingDescriptionCount;
+			&vertexInputBindingDescription,									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			2u,																// deUint32									vertexAttributeDescriptionCount;
+			vertexInputAttributeDescriptions								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor,														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			false,															// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,															// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,											// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,											// VKBLENDFACTOR			SRCALPHABLENDFACTOR;
+			VK_BLEND_FACTOR_ZERO,											// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,												// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |			// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		const bool isDepthEnabled = (vk::mapVkFormat(m_stencilFormat).order != tcu::TextureFormat::S);
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			isDepthEnabled,												// VkBool32									depthTestEnable;
+			isDepthEnabled,												// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			true,														// VkBool32									stencilTestEnable;
+			m_stencilOpStateFront,										// VkStencilOpState							front;
+			m_stencilOpStateBack,										// VkStencilOpState							back;
+			-1.0f,														// float									minDepthBounds;
+			+1.0f														// float									maxDepthBounds;
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStages,										// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		// Setup different stencil masks and refs in each quad
+		for (int quadNdx = 0; quadNdx < StencilTest::QUAD_COUNT; quadNdx++)
+		{
+			const StencilTest::StencilStateConfig&	config	= StencilTest::s_stencilStateConfigs[quadNdx];
+			VkStencilOpState&						front	= depthStencilStateParams.front;
+			VkStencilOpState&						back	= depthStencilStateParams.back;
+
+			front.compareMask	= config.frontReadMask;
+			front.writeMask		= config.frontWriteMask;
+			front.reference		= config.frontRef;
+
+			back.compareMask	= config.backReadMask;
+			back.writeMask		= config.backWriteMask;
+			back.reference		= config.backRef;
+
+			m_graphicsPipelines[quadNdx] = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+		}
+	}
+
+
+	// Create vertex buffer
+	{
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			1024u,										// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_vertices			= createOverlappingQuads();
+		m_vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+		m_vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_vertexBuffer, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset()));
+
+		// Adjust depths
+		for (int quadNdx = 0; quadNdx < 4; quadNdx++)
+			for (int vertexNdx = 0; vertexNdx < 6; vertexNdx++)
+				m_vertices[quadNdx * 6 + vertexNdx].position.z() = StencilTest::s_quadDepths[quadNdx];
+
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+
+		const VkMappedMemoryRange flushRange =
+		{
+				VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// VkStructureType	sType;
+				DE_NULL,								// const void*		pNext;
+				m_vertexBufferAlloc->getMemory(),		// VkDeviceMemory	memory;
+				m_vertexBufferAlloc->getOffset(),		// VkDeviceSize		offset;
+				vertexBufferParams.size					// VkDeviceSize		size;
+		};
+
+		vk.flushMappedMemoryRanges(vkDevice, 1, &flushRange);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32					queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			*m_cmdPool,										// VkCommandPool		commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel	level;
+			1u												// deUint32				bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValues[2] =
+		{
+			defaultClearValue(m_colorFormat),
+			defaultClearValue(m_stencilFormat)
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 } , { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			2,														// deUint32				clearValueCount;
+			attachmentClearValues									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		const VkDeviceSize		quadOffset		= (m_vertices.size() / StencilTest::QUAD_COUNT) * sizeof(Vertex4RGBA);
+
+		for (int quadNdx = 0; quadNdx < StencilTest::QUAD_COUNT; quadNdx++)
+		{
+			VkDeviceSize vertexBufferOffset = quadOffset * quadNdx;
+
+			vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines[quadNdx]);
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, 1, &m_vertexBuffer.get(), &vertexBufferOffset);
+			vk.cmdDraw(*m_cmdBuffer, (deUint32)(m_vertices.size() / StencilTest::QUAD_COUNT), 1, 0, 0);
+		}
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+StencilTestInstance::~StencilTestInstance (void)
+{
+}
+
+tcu::TestStatus StencilTestInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return verifyImage();
+}
+
+tcu::TestStatus StencilTestInstance::verifyImage (void)
+{
+	const tcu::TextureFormat	tcuColorFormat		= mapVkFormat(m_colorFormat);
+	const tcu::TextureFormat	tcuStencilFormat	= mapVkFormat(m_stencilFormat);
+	const ColorVertexShader		vertexShader;
+	const ColorFragmentShader	fragmentShader		(tcuColorFormat, tcuStencilFormat);
+	const rr::Program			program				(&vertexShader, &fragmentShader);
+	ReferenceRenderer			refRenderer			(m_renderSize.x(), m_renderSize.y(), 1, tcuColorFormat, tcuStencilFormat, &program);
+	bool						compareOk			= false;
+
+	// Render reference image
+	{
+		// Set depth state
+		rr::RenderState renderState(refRenderer.getViewportState());
+
+		renderState.fragOps.depthTestEnabled	= true;
+		renderState.fragOps.depthFunc			= mapVkCompareOp(VK_COMPARE_OP_LESS);
+		renderState.fragOps.stencilTestEnabled	= true;
+
+		rr::StencilState& refStencilFront	= renderState.fragOps.stencilStates[rr::FACETYPE_FRONT];
+		rr::StencilState& refStencilBack	= renderState.fragOps.stencilStates[rr::FACETYPE_BACK];
+
+		refStencilFront.sFail		= mapVkStencilOp(m_stencilOpStateFront.failOp);
+		refStencilFront.dpFail		= mapVkStencilOp(m_stencilOpStateFront.depthFailOp);
+		refStencilFront.dpPass		= mapVkStencilOp(m_stencilOpStateFront.passOp);
+		refStencilFront.func		= mapVkCompareOp(m_stencilOpStateFront.compareOp);
+
+		refStencilBack.sFail		= mapVkStencilOp(m_stencilOpStateBack.failOp);
+		refStencilBack.dpFail		= mapVkStencilOp(m_stencilOpStateBack.depthFailOp);
+		refStencilBack.dpPass		= mapVkStencilOp(m_stencilOpStateBack.passOp);
+		refStencilBack.func			= mapVkCompareOp(m_stencilOpStateBack.compareOp);
+
+		// Reverse winding of vertices, as Vulkan screen coordinates start at upper left
+		std::vector<Vertex4RGBA> cwVertices(m_vertices);
+		for (size_t vertexNdx = 0; vertexNdx < cwVertices.size() - 2; vertexNdx += 3)
+		{
+			const Vertex4RGBA cwVertex1	= cwVertices[vertexNdx + 1];
+
+			cwVertices[vertexNdx + 1]	= cwVertices[vertexNdx + 2];
+			cwVertices[vertexNdx + 2]	= cwVertex1;
+		}
+
+		for (int quadNdx = 0; quadNdx < StencilTest::QUAD_COUNT; quadNdx++)
+		{
+			refStencilFront.ref			= (int)StencilTest::s_stencilStateConfigs[quadNdx].frontRef;
+			refStencilFront.compMask	= StencilTest::s_stencilStateConfigs[quadNdx].frontReadMask;
+			refStencilFront.writeMask	= StencilTest::s_stencilStateConfigs[quadNdx].frontWriteMask;
+
+			refStencilBack.ref			= (int)StencilTest::s_stencilStateConfigs[quadNdx].backRef;
+			refStencilBack.compMask		= StencilTest::s_stencilStateConfigs[quadNdx].backReadMask;
+			refStencilBack.writeMask	= StencilTest::s_stencilStateConfigs[quadNdx].backWriteMask;
+
+			refRenderer.draw(renderState,
+							 rr::PRIMITIVETYPE_TRIANGLES,
+							 std::vector<Vertex4RGBA>(cwVertices.begin() + quadNdx * 6,
+													  cwVertices.begin() + (quadNdx + 1) * 6));
+		}
+	}
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&				vk					= m_context.getDeviceInterface();
+		const VkDevice						vkDevice			= m_context.getDevice();
+		const VkQueue						queue				= m_context.getUniversalQueue();
+		const deUint32						queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator						allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::UniquePtr<tcu::TextureLevel>	result				(readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize).release());
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  refRenderer.getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(2, 2, 2, 2),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+
+// Utilities for test names
+
+std::string getShortName (VkCompareOp compareOp)
+{
+	const std::string  fullName = getCompareOpName(compareOp);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_COMPARE_OP_"));
+
+	return de::toLower(fullName.substr(14));
+}
+
+const char* getShortName (VkStencilOp stencilOp)
+{
+	switch (stencilOp)
+	{
+		case VK_STENCIL_OP_KEEP:					return "keep";
+		case VK_STENCIL_OP_ZERO:					return "zero";
+		case VK_STENCIL_OP_REPLACE:					return "repl";
+		case VK_STENCIL_OP_INCREMENT_AND_CLAMP:		return "incc";
+		case VK_STENCIL_OP_DECREMENT_AND_CLAMP:		return "decc";
+		case VK_STENCIL_OP_INVERT:					return "inv";
+		case VK_STENCIL_OP_INCREMENT_AND_WRAP:		return "wrap";
+		case VK_STENCIL_OP_DECREMENT_AND_WRAP:		return "decw";
+
+		default:
+			DE_FATAL("Invalid VkStencilOpState value");
+	}
+	return DE_NULL;
+}
+
+std::string getStencilName(const VkStencilOpState& stencilOpState)
+{
+	std::ostringstream name;
+
+	name << "fail_" << getShortName(stencilOpState.failOp)
+		 << "_pass_" << getShortName(stencilOpState.passOp)
+		 << "_dfail_" << getShortName(stencilOpState.depthFailOp)
+		 << "_comp_" << getShortName(stencilOpState.compareOp);
+
+	return name.str();
+}
+
+std::string getStencilStateSetName(const VkStencilOpState& stencilOpStateFront,
+								   const VkStencilOpState& stencilOpStateBack)
+{
+	std::ostringstream name;
+
+	name << "front_" << getStencilName(stencilOpStateFront)
+		 << "_back_" << getStencilName(stencilOpStateBack);
+
+	return name.str();
+}
+
+std::string getStencilStateSetDescription(const VkStencilOpState& stencilOpStateFront,
+										  const VkStencilOpState& stencilOpStateBack)
+{
+	std::ostringstream desc;
+
+	desc << "\nFront faces:\n" << stencilOpStateFront;
+	desc << "Back faces:\n" << stencilOpStateBack;
+
+	return desc.str();
+}
+
+std::string getFormatCaseName (VkFormat format)
+{
+	const std::string fullName = getFormatName(format);
+
+	DE_ASSERT(de::beginsWith(fullName, "VK_FORMAT_"));
+
+	return de::toLower(fullName.substr(10));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createStencilTests (tcu::TestContext& testCtx)
+{
+	const VkFormat stencilFormats[] =
+	{
+		VK_FORMAT_S8_UINT,
+		VK_FORMAT_D16_UNORM_S8_UINT,
+		VK_FORMAT_D24_UNORM_S8_UINT,
+		VK_FORMAT_D32_SFLOAT_S8_UINT
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>		stencilTests	(new tcu::TestCaseGroup(testCtx, "stencil", "Stencil tests"));
+	de::MovePtr<tcu::TestCaseGroup>		formatTests		(new tcu::TestCaseGroup(testCtx, "format", "Uses different stencil formats"));
+	StencilOpStateUniqueRandomIterator	stencilOpItr	(123);
+
+	for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(stencilFormats); formatNdx++)
+	{
+		const VkFormat					stencilFormat	= stencilFormats[formatNdx];
+		de::MovePtr<tcu::TestCaseGroup>	formatTest		(new tcu::TestCaseGroup(testCtx,
+																				getFormatCaseName(stencilFormat).c_str(),
+																				(std::string("Uses format ") + getFormatName(stencilFormat)).c_str()));
+
+		de::MovePtr<tcu::TestCaseGroup>	stencilStateTests;
+		{
+			std::ostringstream desc;
+			desc << "Draws 4 quads with the following depths and dynamic stencil states: ";
+			for (int quadNdx = 0; quadNdx < StencilTest::QUAD_COUNT; quadNdx++)
+			{
+				const StencilTest::StencilStateConfig& stencilConfig = StencilTest::s_stencilStateConfigs[quadNdx];
+
+				desc << "(" << quadNdx << ") "
+					 << "z = " << StencilTest::s_quadDepths[quadNdx] << ", "
+					 << "frontReadMask = " << stencilConfig.frontReadMask << ", "
+					 << "frontWriteMask = " << stencilConfig.frontWriteMask << ", "
+					 << "frontRef = " << stencilConfig.frontRef << ", "
+					 << "backReadMask = " << stencilConfig.backReadMask << ", "
+					 << "backWriteMask = " << stencilConfig.backWriteMask << ", "
+					 << "backRef = " << stencilConfig.backRef;
+			}
+
+			stencilStateTests = de::MovePtr<tcu::TestCaseGroup>(new tcu::TestCaseGroup(testCtx, "states", desc.str().c_str()));
+		}
+
+		stencilOpItr.reset();
+
+		VkStencilOpState		prevStencilState	= stencilOpItr.next();
+		const VkStencilOpState	firstStencilState	= prevStencilState;
+
+		while (stencilOpItr.hasNext())
+		{
+			const VkStencilOpState stencilState = stencilOpItr.next();
+
+			// Use current stencil state in front fraces and previous state in back faces
+			stencilStateTests->addChild(new StencilTest(testCtx,
+														getStencilStateSetName(stencilState, prevStencilState),
+														getStencilStateSetDescription(stencilState, prevStencilState),
+														stencilFormat,
+														stencilState,
+														prevStencilState));
+
+			prevStencilState = stencilState;
+		}
+
+		// Use first stencil state with last stencil state. This would make the test suite cover all states in front and back faces.
+		stencilStateTests->addChild(new StencilTest(testCtx,
+													getStencilStateSetName(firstStencilState, prevStencilState),
+													getStencilStateSetDescription(firstStencilState, prevStencilState),
+													stencilFormat,
+													firstStencilState,
+													prevStencilState));
+
+		formatTest->addChild(stencilStateTests.release());
+		formatTests->addChild(formatTest.release());
+	}
+	stencilTests->addChild(formatTests.release());
+
+	return stencilTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.hpp
new file mode 100644
index 0000000..fe0ec20
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineStencilTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINESTENCILTESTS_HPP
+#define _VKTPIPELINESTENCILTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Stencil Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createStencilTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINESTENCILTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.cpp
new file mode 100644
index 0000000..abd4978
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.cpp
@@ -0,0 +1,83 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pipeline Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineTests.hpp"
+#include "vktPipelineStencilTests.hpp"
+#include "vktPipelineBlendTests.hpp"
+#include "vktPipelineDepthTests.hpp"
+#include "vktPipelineImageTests.hpp"
+#include "vktPipelineInputAssemblyTests.hpp"
+#include "vktPipelineSamplerTests.hpp"
+#include "vktPipelineImageViewTests.hpp"
+#include "vktPipelinePushConstantTests.hpp"
+#include "vktPipelineMultisampleTests.hpp"
+#include "vktPipelineVertexInputTests.hpp"
+#include "vktPipelineTimestampTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* pipelineTests)
+{
+	tcu::TestContext&	testCtx	= pipelineTests->getTestContext();
+
+	pipelineTests->addChild(createStencilTests		(testCtx));
+	pipelineTests->addChild(createBlendTests		(testCtx));
+	pipelineTests->addChild(createDepthTests		(testCtx));
+	pipelineTests->addChild(createImageTests		(testCtx));
+	pipelineTests->addChild(createSamplerTests		(testCtx));
+	pipelineTests->addChild(createImageViewTests	(testCtx));
+	pipelineTests->addChild(createPushConstantTests	(testCtx));
+	pipelineTests->addChild(createMultisampleTests	(testCtx));
+	pipelineTests->addChild(createVertexInputTests	(testCtx));
+	pipelineTests->addChild(createInputAssemblyTests(testCtx));
+	pipelineTests->addChild(createTimestampTests	(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "pipeline", "Pipeline Tests", createChildren);
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.hpp
new file mode 100644
index 0000000..cb1c470
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTPIPELINETESTS_HPP
+#define _VKTPIPELINETESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Pipeline Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup*		createTests			(tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINETESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.cpp
new file mode 100644
index 0000000..6189e94
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.cpp
@@ -0,0 +1,2245 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 ARM Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Timestamp Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineTimestampTests.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuImageCompare.hpp"
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+#include "vkTypeUtil.hpp"
+
+#include <sstream>
+#include <vector>
+#include <cctype>
+#include <locale>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+typedef std::vector<VkPipelineStageFlagBits> StageFlagVector;
+
+// helper functions
+#define GEN_DESC_STRING(name,postfix)                                      \
+		do {                                                               \
+		   for (std::string::size_type ndx = 0; ndx<strlen(#name); ++ndx)  \
+			 if(isDescription && #name[ndx] == '_')                        \
+			   desc << " ";                                                \
+			 else                                                          \
+			   desc << std::tolower(#name[ndx],loc);                       \
+		   if (isDescription)                                              \
+			 desc << " " << #postfix;                                      \
+		   else                                                            \
+			 desc << "_" << #postfix;                                      \
+		} while (deGetFalse())
+
+std::string getPipelineStageFlagStr (const VkPipelineStageFlagBits stage,
+									 bool                          isDescription)
+{
+	std::ostringstream desc;
+	std::locale loc;
+	switch(stage)
+	{
+#define STAGE_CASE(p)                              \
+		case VK_PIPELINE_STAGE_##p##_BIT:          \
+		{                                          \
+			GEN_DESC_STRING(p, stage);             \
+			break;                                 \
+		}
+		STAGE_CASE(TOP_OF_PIPE);
+		STAGE_CASE(DRAW_INDIRECT);
+		STAGE_CASE(VERTEX_INPUT);
+		STAGE_CASE(VERTEX_SHADER);
+		STAGE_CASE(TESSELLATION_CONTROL_SHADER);
+		STAGE_CASE(TESSELLATION_EVALUATION_SHADER);
+		STAGE_CASE(GEOMETRY_SHADER);
+		STAGE_CASE(FRAGMENT_SHADER);
+		STAGE_CASE(EARLY_FRAGMENT_TESTS);
+		STAGE_CASE(LATE_FRAGMENT_TESTS);
+		STAGE_CASE(COLOR_ATTACHMENT_OUTPUT);
+		STAGE_CASE(COMPUTE_SHADER);
+		STAGE_CASE(TRANSFER);
+		STAGE_CASE(HOST);
+		STAGE_CASE(ALL_GRAPHICS);
+		STAGE_CASE(ALL_COMMANDS);
+#undef STAGE_CASE
+	  default:
+		desc << "unknown stage!";
+		DE_FATAL("Unknown Stage!");
+		break;
+	};
+
+	return desc.str();
+}
+
+enum TransferMethod
+{
+	TRANSFER_METHOD_COPY_BUFFER = 0,
+	TRANSFER_METHOD_COPY_IMAGE,
+	TRANSFER_METHOD_BLIT_IMAGE,
+	TRANSFER_METHOD_COPY_BUFFER_TO_IMAGE,
+	TRANSFER_METHOD_COPY_IMAGE_TO_BUFFER,
+	TRANSFER_METHOD_UPDATE_BUFFER,
+	TRANSFER_METHOD_FILL_BUFFER,
+	TRANSFER_METHOD_CLEAR_COLOR_IMAGE,
+	TRANSFER_METHOD_CLEAR_DEPTH_STENCIL_IMAGE,
+	TRANSFER_METHOD_RESOLVE_IMAGE,
+	TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS,
+	TRANSFER_METHOD_LAST
+};
+
+std::string getTransferMethodStr(const TransferMethod method,
+								 bool                 isDescription)
+{
+	std::ostringstream desc;
+	std::locale loc;
+	switch(method)
+	{
+#define METHOD_CASE(p)                             \
+		case TRANSFER_METHOD_##p:                  \
+		{                                          \
+			GEN_DESC_STRING(p, method);            \
+			break;                                 \
+		}
+	  METHOD_CASE(COPY_BUFFER)
+	  METHOD_CASE(COPY_IMAGE)
+	  METHOD_CASE(BLIT_IMAGE)
+	  METHOD_CASE(COPY_BUFFER_TO_IMAGE)
+	  METHOD_CASE(COPY_IMAGE_TO_BUFFER)
+	  METHOD_CASE(UPDATE_BUFFER)
+	  METHOD_CASE(FILL_BUFFER)
+	  METHOD_CASE(CLEAR_COLOR_IMAGE)
+	  METHOD_CASE(CLEAR_DEPTH_STENCIL_IMAGE)
+	  METHOD_CASE(RESOLVE_IMAGE)
+	  METHOD_CASE(COPY_QUERY_POOL_RESULTS)
+#undef METHOD_CASE
+	  default:
+		desc << "unknown method!";
+		DE_FATAL("Unknown method!");
+		break;
+	};
+
+	return desc.str();
+}
+
+// helper classes
+class TimestampTestParam
+{
+public:
+							  TimestampTestParam      (const VkPipelineStageFlagBits* stages,
+													   const deUint32                 stageCount,
+													   const bool                     inRenderPass);
+							  ~TimestampTestParam     (void);
+	virtual const std::string generateTestName        (void) const;
+	virtual const std::string generateTestDescription (void) const;
+	StageFlagVector           getStageVector          (void) const { return m_stageVec; }
+	bool                      getInRenderPass         (void) const { return m_inRenderPass; }
+	void                      toggleInRenderPass      (void)       { m_inRenderPass = !m_inRenderPass; }
+protected:
+	StageFlagVector           m_stageVec;
+	bool                      m_inRenderPass;
+};
+
+TimestampTestParam::TimestampTestParam(const VkPipelineStageFlagBits* stages,
+									   const deUint32                 stageCount,
+									   const bool                     inRenderPass)
+	: m_inRenderPass(inRenderPass)
+{
+	for (deUint32 ndx = 0; ndx < stageCount; ndx++)
+	{
+		m_stageVec.push_back(stages[ndx]);
+	}
+}
+
+TimestampTestParam::~TimestampTestParam(void)
+{
+}
+
+const std::string TimestampTestParam::generateTestName(void) const
+{
+	std::string result("");
+
+	for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
+	{
+		if(*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
+		{
+			result += getPipelineStageFlagStr(*it, false) + '_';
+		}
+	}
+	if(m_inRenderPass)
+		result += "in_render_pass";
+	else
+		result += "out_of_render_pass";
+
+	return result;
+}
+
+const std::string TimestampTestParam::generateTestDescription(void) const
+{
+	std::string result("Record timestamp after ");
+
+	for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
+	{
+		if(*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
+		{
+			result += getPipelineStageFlagStr(*it, true) + ' ';
+		}
+	}
+	if(m_inRenderPass)
+		result += " in the renderpass";
+	else
+		result += " out of the render pass";
+
+	return result;
+}
+
+class TransferTimestampTestParam : public TimestampTestParam
+{
+public:
+					  TransferTimestampTestParam  (const VkPipelineStageFlagBits* stages,
+												   const deUint32                 stageCount,
+												   const bool                     inRenderPass,
+												   const deUint32                 methodNdx);
+					  ~TransferTimestampTestParam (void)       { }
+	const std::string generateTestName            (void) const;
+	const std::string generateTestDescription     (void) const;
+	TransferMethod    getMethod                   (void) const { return m_method; }
+protected:
+	TransferMethod    m_method;
+};
+
+TransferTimestampTestParam::TransferTimestampTestParam(const VkPipelineStageFlagBits* stages,
+													   const deUint32                 stageCount,
+													   const bool                     inRenderPass,
+													   const deUint32                 methodNdx)
+	: TimestampTestParam(stages, stageCount, inRenderPass)
+{
+	DE_ASSERT(methodNdx < (deUint32)TRANSFER_METHOD_LAST);
+
+	m_method = (TransferMethod)methodNdx;
+}
+
+const std::string TransferTimestampTestParam::generateTestName(void) const
+{
+	std::string result("");
+
+	for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
+	{
+		if(*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
+		{
+			result += getPipelineStageFlagStr(*it, false) + '_';
+		}
+	}
+
+	result += "with_" + getTransferMethodStr(m_method, false);
+
+	return result;
+
+}
+
+const std::string TransferTimestampTestParam::generateTestDescription(void) const
+{
+	std::string result("");
+
+	for (StageFlagVector::const_iterator it = m_stageVec.begin(); it != m_stageVec.end(); it++)
+	{
+		if(*it != VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT)
+		{
+			result += getPipelineStageFlagStr(*it, true) + ' ';
+		}
+	}
+
+	result += "with " + getTransferMethodStr(m_method, true);
+
+	return result;
+
+}
+
+class SimpleGraphicsPipelineBuilder
+{
+public:
+					 SimpleGraphicsPipelineBuilder  (Context&              context);
+					 ~SimpleGraphicsPipelineBuilder (void) { }
+	void             bindShaderStage                (VkShaderStageFlagBits stage,
+													 const char*           source_name,
+													 const char*           entry_name);
+	void             enableTessellationStage        (deUint32              patchControlPoints);
+	Move<VkPipeline> buildPipeline                  (tcu::UVec2            renderSize,
+													 VkRenderPass          renderPass);
+protected:
+	enum
+	{
+		VK_MAX_SHADER_STAGES = 6,
+	};
+
+	Context&                            m_context;
+
+	Move<VkShaderModule>                m_shaderModules[VK_MAX_SHADER_STAGES];
+	deUint32                            m_shaderStageCount;
+	VkPipelineShaderStageCreateInfo     m_shaderStageInfo[VK_MAX_SHADER_STAGES];
+
+	deUint32                            m_patchControlPoints;
+
+	Move<VkPipelineLayout>              m_pipelineLayout;
+	Move<VkPipeline>                    m_graphicsPipelines;
+
+};
+
+SimpleGraphicsPipelineBuilder::SimpleGraphicsPipelineBuilder(Context& context)
+	: m_context(context)
+{
+	m_patchControlPoints = 0;
+	m_shaderStageCount   = 0;
+}
+
+void SimpleGraphicsPipelineBuilder::bindShaderStage(VkShaderStageFlagBits stage,
+													const char*           source_name,
+													const char*           entry_name)
+{
+	const DeviceInterface&  vk        = m_context.getDeviceInterface();
+	const VkDevice          vkDevice  = m_context.getDevice();
+
+	// Create shader module
+	deUint32*               pCode     = (deUint32*)m_context.getBinaryCollection().get(source_name).getBinary();
+	deUint32                codeSize  = (deUint32)m_context.getBinaryCollection().get(source_name).getSize();
+
+	const VkShaderModuleCreateInfo moduleCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,                // VkStructureType             sType;
+		DE_NULL,                                                    // const void*                 pNext;
+		0u,                                                         // VkShaderModuleCreateFlags   flags;
+		codeSize,                                                   // deUintptr                   codeSize;
+		pCode,                                                      // const deUint32*             pCode;
+	};
+
+	m_shaderModules[m_shaderStageCount] = createShaderModule(vk, vkDevice, &moduleCreateInfo);
+
+	// Prepare shader stage info
+	m_shaderStageInfo[m_shaderStageCount].sType               = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+	m_shaderStageInfo[m_shaderStageCount].pNext               = DE_NULL;
+	m_shaderStageInfo[m_shaderStageCount].flags               = 0u;
+	m_shaderStageInfo[m_shaderStageCount].stage               = stage;
+	m_shaderStageInfo[m_shaderStageCount].module              = *m_shaderModules[m_shaderStageCount];
+	m_shaderStageInfo[m_shaderStageCount].pName               = entry_name;
+	m_shaderStageInfo[m_shaderStageCount].pSpecializationInfo = DE_NULL;
+
+	m_shaderStageCount++;
+}
+
+Move<VkPipeline> SimpleGraphicsPipelineBuilder::buildPipeline(tcu::UVec2 renderSize, VkRenderPass renderPass)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+	const VkDevice              vkDevice            = m_context.getDevice();
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,      // VkStructureType                  sType;
+			DE_NULL,                                            // const void*                      pNext;
+			0u,                                                 // VkPipelineLayoutCreateFlags      flags;
+			0u,                                                 // deUint32                         setLayoutCount;
+			DE_NULL,                                            // const VkDescriptorSetLayout*     pSetLayouts;
+			0u,                                                 // deUint32                         pushConstantRangeCount;
+			DE_NULL                                             // const VkPushConstantRange*       pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create pipeline
+	const VkVertexInputBindingDescription vertexInputBindingDescription =
+	{
+		0u,                                 // deUint32                 binding;
+		sizeof(Vertex4RGBA),                // deUint32                 strideInBytes;
+		VK_VERTEX_INPUT_RATE_VERTEX,        // VkVertexInputRate        inputRate;
+	};
+
+	const VkVertexInputAttributeDescription vertexInputAttributeDescriptions[2] =
+	{
+		{
+			0u,                                 // deUint32 location;
+			0u,                                 // deUint32 binding;
+			VK_FORMAT_R32G32B32A32_SFLOAT,      // VkFormat format;
+			0u                                  // deUint32 offsetInBytes;
+		},
+		{
+			1u,                                 // deUint32 location;
+			0u,                                 // deUint32 binding;
+			VK_FORMAT_R32G32B32A32_SFLOAT,      // VkFormat format;
+			DE_OFFSET_OF(Vertex4RGBA, color),   // deUint32 offsetInBytes;
+		}
+	};
+
+	const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,      // VkStructureType                          sType;
+		DE_NULL,                                                        // const void*                              pNext;
+		0u,                                                             // VkPipelineVertexInputStateCreateFlags    flags;
+		1u,                                                             // deUint32                                 vertexBindingDescriptionCount;
+		&vertexInputBindingDescription,                                 // const VkVertexInputBindingDescription*   pVertexBindingDescriptions;
+		2u,                                                             // deUint32                                 vertexAttributeDescriptionCount;
+		vertexInputAttributeDescriptions,                               // const VkVertexInputAttributeDescription* pVertexAttributeDescriptions;
+	};
+
+	VkPrimitiveTopology primitiveTopology = (m_patchControlPoints > 0) ? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST : VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+	const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,    // VkStructureType                          sType;
+		DE_NULL,                                                        // const void*                              pNext;
+		0u,                                                             // VkPipelineInputAssemblyStateCreateFlags  flags;
+		primitiveTopology,                                              // VkPrimitiveTopology                      topology;
+		VK_FALSE,                                                       // VkBool32                                 primitiveRestartEnable;
+	};
+
+	const VkViewport viewport =
+	{
+		0.0f,                       // float    originX;
+		0.0f,                       // float    originY;
+		(float)renderSize.x(),      // float    width;
+		(float)renderSize.y(),      // float    height;
+		0.0f,                       // float    minDepth;
+		1.0f                        // float    maxDepth;
+	};
+	const VkRect2D scissor =
+	{
+		{ 0u, 0u },                                                     // VkOffset2D  offset;
+		{ renderSize.x(), renderSize.y() }                              // VkExtent2D  extent;
+	};
+	const VkPipelineViewportStateCreateInfo viewportStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,          // VkStructureType                      sType;
+		DE_NULL,                                                        // const void*                          pNext;
+		0u,                                                             // VkPipelineViewportStateCreateFlags   flags;
+		1u,                                                             // deUint32                             viewportCount;
+		&viewport,                                                      // const VkViewport*                    pViewports;
+		1u,                                                             // deUint32                             scissorCount;
+		&scissor                                                        // const VkRect2D*                      pScissors;
+	};
+
+	const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,     // VkStructureType                          sType;
+		DE_NULL,                                                        // const void*                              pNext;
+		0u,                                                             // VkPipelineRasterizationStateCreateFlags  flags;
+		VK_FALSE,                                                       // VkBool32                                 depthClampEnable;
+		VK_FALSE,                                                       // VkBool32                                 rasterizerDiscardEnable;
+		VK_POLYGON_MODE_FILL,                                           // VkPolygonMode                            polygonMode;
+		VK_CULL_MODE_NONE,                                              // VkCullModeFlags                          cullMode;
+		VK_FRONT_FACE_COUNTER_CLOCKWISE,                                // VkFrontFace                              frontFace;
+		VK_FALSE,                                                       // VkBool32                                 depthBiasEnable;
+		0.0f,                                                           // float                                    depthBiasConstantFactor;
+		0.0f,                                                           // float                                    depthBiasClamp;
+		0.0f,                                                           // float                                    depthBiasSlopeFactor;
+		1.0f,                                                           // float                                    lineWidth;
+	};
+
+	const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+	{
+		VK_FALSE,                                                                   // VkBool32                 blendEnable;
+		VK_BLEND_FACTOR_ONE,                                                        // VkBlendFactor            srcColorBlendFactor;
+		VK_BLEND_FACTOR_ZERO,                                                       // VkBlendFactor            dstColorBlendFactor;
+		VK_BLEND_OP_ADD,                                                            // VkBlendOp                colorBlendOp;
+		VK_BLEND_FACTOR_ONE,                                                        // VkBlendFactor            srcAlphaBlendFactor;
+		VK_BLEND_FACTOR_ZERO,                                                       // VkBlendFactor            dstAlphaBlendFactor;
+		VK_BLEND_OP_ADD,                                                            // VkBlendOp                alphaBlendOp;
+		VK_COLOR_COMPONENT_R_BIT |
+		VK_COLOR_COMPONENT_G_BIT |
+		VK_COLOR_COMPONENT_B_BIT |
+		VK_COLOR_COMPONENT_A_BIT                                                    // VkColorComponentFlags    colorWriteMask;
+	};
+
+	const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,   // VkStructureType                              sType;
+		DE_NULL,                                                    // const void*                                  pNext;
+		0u,                                                         // VkPipelineColorBlendStateCreateFlags         flags;
+		VK_FALSE,                                                   // VkBool32                                     logicOpEnable;
+		VK_LOGIC_OP_COPY,                                           // VkLogicOp                                    logicOp;
+		1u,                                                         // deUint32                                     attachmentCount;
+		&colorBlendAttachmentState,                                 // const VkPipelineColorBlendAttachmentState*   pAttachments;
+		{ 0.0f, 0.0f, 0.0f, 0.0f },                                 // float                                        blendConst[4];
+	};
+
+	const VkPipelineMultisampleStateCreateInfo  multisampleStateParams  =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,   // VkStructureType                          sType;
+		DE_NULL,                                                    // const void*                              pNext;
+		0u,                                                         // VkPipelineMultisampleStateCreateFlags    flags;
+		VK_SAMPLE_COUNT_1_BIT,                                      // VkSampleCountFlagBits                    rasterizationSamples;
+		VK_FALSE,                                                   // VkBool32                                 sampleShadingEnable;
+		0.0f,                                                       // float                                    minSampleShading;
+		DE_NULL,                                                    // const VkSampleMask*                      pSampleMask;
+		VK_FALSE,                                                   // VkBool32                                 alphaToCoverageEnable;
+		VK_FALSE,                                                   // VkBool32                                 alphaToOneEnable;
+	};
+
+	const VkPipelineDynamicStateCreateInfo  dynamicStateParams      =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,       // VkStructureType                      sType;
+		DE_NULL,                                                    // const void*                          pNext;
+		0u,                                                         // VkPipelineDynamicStateCreateFlags    flags;
+		0u,                                                         // deUint32                             dynamicStateCount;
+		DE_NULL,                                                    // const VkDynamicState*                pDynamicStates;
+	};
+
+	VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO, // VkStructureType                          sType;
+		DE_NULL,                                                    // const void*                              pNext;
+		0u,                                                         // VkPipelineDepthStencilStateCreateFlags   flags;
+		VK_TRUE,                                                    // VkBool32                                 depthTestEnable;
+		VK_TRUE,                                                    // VkBool32                                 depthWriteEnable;
+		VK_COMPARE_OP_LESS_OR_EQUAL,                                // VkCompareOp                              depthCompareOp;
+		VK_FALSE,                                                   // VkBool32                                 depthBoundsTestEnable;
+		VK_FALSE,                                                   // VkBool32                                 stencilTestEnable;
+		// VkStencilOpState front;
+		{
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  failOp;
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  passOp;
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  depthFailOp;
+			VK_COMPARE_OP_NEVER,    // VkCompareOp  compareOp;
+			0u,                     // deUint32     compareMask;
+			0u,                     // deUint32     writeMask;
+			0u,                     // deUint32     reference;
+		},
+		// VkStencilOpState back;
+		{
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  failOp;
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  passOp;
+			VK_STENCIL_OP_KEEP,     // VkStencilOp  depthFailOp;
+			VK_COMPARE_OP_NEVER,    // VkCompareOp  compareOp;
+			0u,                     // deUint32     compareMask;
+			0u,                     // deUint32     writeMask;
+			0u,                     // deUint32     reference;
+		},
+		-1.0f,                                                      // float                                    minDepthBounds;
+		+1.0f,                                                      // float                                    maxDepthBounds;
+	};
+
+	const VkPipelineTessellationStateCreateInfo*	pTessCreateInfo		= DE_NULL;
+	const VkPipelineTessellationStateCreateInfo		tessStateCreateInfo	=
+	{
+			VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,  // VkStructureType                          sType;
+			DE_NULL,                                                    // const void*                              pNext;
+			0u,                                                         // VkPipelineTessellationStateCreateFlags   flags;
+			m_patchControlPoints,                                       // deUint32                                 patchControlPoints;
+	};
+
+	if (m_patchControlPoints > 0)
+		pTessCreateInfo = &tessStateCreateInfo;
+
+	const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+	{
+		VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,    // VkStructureType                                  sType;
+		DE_NULL,                                            // const void*                                      pNext;
+		0u,                                                 // VkPipelineCreateFlags                            flags;
+		m_shaderStageCount,                                 // deUint32                                         stageCount;
+		m_shaderStageInfo,                                  // const VkPipelineShaderStageCreateInfo*           pStages;
+		&vertexInputStateParams,                            // const VkPipelineVertexInputStateCreateInfo*      pVertexInputState;
+		&inputAssemblyStateParams,                          // const VkPipelineInputAssemblyStateCreateInfo*    pInputAssemblyState;
+		pTessCreateInfo,                                    // const VkPipelineTessellationStateCreateInfo*     pTessellationState;
+		&viewportStateParams,                               // const VkPipelineViewportStateCreateInfo*         pViewportState;
+		&rasterStateParams,                                 // const VkPipelineRasterizationStateCreateInfo*    pRasterState;
+		&multisampleStateParams,                            // const VkPipelineMultisampleStateCreateInfo*      pMultisampleState;
+		&depthStencilStateParams,                           // const VkPipelineDepthStencilStateCreateInfo*     pDepthStencilState;
+		&colorBlendStateParams,                             // const VkPipelineColorBlendStateCreateInfo*       pColorBlendState;
+		&dynamicStateParams,                                // const VkPipelineDynamicStateCreateInfo*          pDynamicState;
+		*m_pipelineLayout,                                  // VkPipelineLayout                                 layout;
+		renderPass,                                         // VkRenderPass                                     renderPass;
+		0u,                                                 // deUint32                                         subpass;
+		0u,                                                 // VkPipeline                                       basePipelineHandle;
+		0,                                                  // deInt32                                          basePipelineIndex;
+	};
+
+	return createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+}
+
+void SimpleGraphicsPipelineBuilder::enableTessellationStage(deUint32 patchControlPoints)
+{
+	m_patchControlPoints = patchControlPoints;
+}
+
+template <class Test>
+vkt::TestCase* newTestCase(tcu::TestContext&     testContext,
+						   TimestampTestParam*   testParam)
+{
+	return new Test(testContext,
+					testParam->generateTestName().c_str(),
+					testParam->generateTestDescription().c_str(),
+					testParam);
+}
+
+// Test Classes
+class TimestampTest : public vkt::TestCase
+{
+public:
+	enum
+	{
+		ENTRY_COUNT = 8
+	};
+
+						  TimestampTest(tcu::TestContext&         testContext,
+										const std::string&        name,
+										const std::string&        description,
+										const TimestampTestParam* param)
+							  : vkt::TestCase  (testContext, name, description)
+							  , m_stages       (param->getStageVector())
+							  , m_inRenderPass (param->getInRenderPass())
+							  { }
+	virtual               ~TimestampTest (void) { }
+	virtual void          initPrograms   (SourceCollections&      programCollection) const;
+	virtual TestInstance* createInstance (Context&                context) const;
+protected:
+	const StageFlagVector m_stages;
+	const bool            m_inRenderPass;
+};
+
+class TimestampTestInstance : public vkt::TestInstance
+{
+public:
+							TimestampTestInstance      (Context&                 context,
+														const StageFlagVector&   stages,
+														const bool               inRenderPass);
+	virtual                 ~TimestampTestInstance     (void);
+	virtual tcu::TestStatus iterate                    (void);
+protected:
+	virtual tcu::TestStatus verifyTimestamp            (void);
+	virtual void            configCommandBuffer        (void);
+	Move<VkBuffer>          createBufferAndBindMemory  (VkDeviceSize             size,
+														VkBufferUsageFlags       usage,
+														de::MovePtr<Allocation>* pAlloc);
+	Move<VkImage>           createImage2DAndBindMemory (VkFormat                 format,
+														deUint32                 width,
+														deUint32                 height,
+														VkImageUsageFlags        usage,
+														VkSampleCountFlagBits    sampleCount,
+														de::MovePtr<Allocation>* pAlloc);
+protected:
+	const StageFlagVector   m_stages;
+	bool                    m_inRenderPass;
+
+	Move<VkCommandPool>     m_cmdPool;
+	Move<VkCommandBuffer>   m_cmdBuffer;
+	Move<VkFence>           m_fence;
+	Move<VkQueryPool>       m_queryPool;
+	deUint64*               m_timestampValues;
+};
+
+void TimestampTest::initPrograms(SourceCollections& programCollection) const
+{
+	vkt::TestCase::initPrograms(programCollection);
+}
+
+TestInstance* TimestampTest::createInstance(Context& context) const
+{
+	return new TimestampTestInstance(context,m_stages,m_inRenderPass);
+}
+
+TimestampTestInstance::TimestampTestInstance(Context&                context,
+											 const StageFlagVector&  stages,
+											 const bool              inRenderPass)
+	: TestInstance  (context)
+	, m_stages      (stages)
+	, m_inRenderPass(inRenderPass)
+{
+	const DeviceInterface&      vk                  = context.getDeviceInterface();
+	const VkDevice              vkDevice            = context.getDevice();
+	const deUint32              queueFamilyIndex    = context.getUniversalQueueFamilyIndex();
+
+	// Check support for timestamp queries
+	{
+		const std::vector<VkQueueFamilyProperties>   queueProperties = vk::getPhysicalDeviceQueueFamilyProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice());
+
+		DE_ASSERT(queueFamilyIndex < (deUint32)queueProperties.size());
+
+		if (!queueProperties[queueFamilyIndex].timestampValidBits)
+			throw tcu::NotSupportedError("Universal queue does not support timestamps");
+	}
+
+	// Create Query Pool
+	{
+		const VkQueryPoolCreateInfo queryPoolParams =
+		{
+		   VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,    // VkStructureType               sType;
+		   DE_NULL,                                     // const void*                   pNext;
+		   0u,                                          // VkQueryPoolCreateFlags        flags;
+		   VK_QUERY_TYPE_TIMESTAMP,                     // VkQueryType                   queryType;
+		   TimestampTest::ENTRY_COUNT,                  // deUint32                      entryCount;
+		   0u,                                          // VkQueryPipelineStatisticFlags pipelineStatistics;
+		};
+
+		m_queryPool = createQueryPool(vk, vkDevice, &queryPoolParams);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,   // VkStructureType      sType;
+			DE_NULL,                                      // const void*          pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,         // VkCmdPoolCreateFlags flags;
+			queueFamilyIndex,                             // deUint32             queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdAllocateParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, // VkStructureType         sType;
+			DE_NULL,                                        // const void*             pNext;
+			*m_cmdPool,                                     // VkCommandPool           cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,                // VkCommandBufferLevel    level;
+			1u,                                             // deUint32                bufferCount;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdAllocateParams);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,        // VkStructureType      sType;
+			DE_NULL,                                    // const void*          pNext;
+			0u,                                         // VkFenceCreateFlags   flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// alloc timestamp values
+	m_timestampValues = new deUint64[m_stages.size()];
+}
+
+TimestampTestInstance::~TimestampTestInstance(void)
+{
+	if(m_timestampValues)
+	{
+		delete[] m_timestampValues;
+		m_timestampValues = NULL;
+	}
+}
+
+void TimestampTestInstance::configCommandBuffer(void)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,    // VkStructureType                  sType;
+		DE_NULL,                                        // const void*                      pNext;
+		0u,                                             // VkCommandBufferUsageFlags        flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+	vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
+
+	deUint32 timestampEntry = 0;
+	for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	{
+		vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+}
+
+tcu::TestStatus TimestampTestInstance::iterate(void)
+{
+	const DeviceInterface&      vk          = m_context.getDeviceInterface();
+	const VkDevice              vkDevice    = m_context.getDevice();
+	const VkQueue               queue       = m_context.getUniversalQueue();
+
+	configCommandBuffer();
+
+	VK_CHECK(vk.resetFences(vkDevice, 1u, &m_fence.get()));
+
+	const VkSubmitInfo          submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,                      // VkStructureType         sType;
+		DE_NULL,                                            // const void*             pNext;
+		0u,                                                 // deUint32                waitSemaphoreCount;
+		DE_NULL,                                            // const VkSemaphore*      pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,                                                 // deUint32                commandBufferCount;
+		&m_cmdBuffer.get(),                                 // const VkCommandBuffer*  pCommandBuffers;
+		0u,                                                 // deUint32                signalSemaphoreCount;
+		DE_NULL,                                            // const VkSemaphore*      pSignalSemaphores;
+	};
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *m_fence));
+
+	VK_CHECK(vk.waitForFences(vkDevice, 1u, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	// Generate the timestamp mask
+	deUint64                    timestampMask;
+	const std::vector<VkQueueFamilyProperties>   queueProperties = vk::getPhysicalDeviceQueueFamilyProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice());
+	if(queueProperties[0].timestampValidBits == 0)
+	{
+		return tcu::TestStatus::fail("Device does not support timestamp!");
+	}
+	else if(queueProperties[0].timestampValidBits == 64)
+	{
+		timestampMask = 0xFFFFFFFFFFFFFFFF;
+	}
+	else
+	{
+		timestampMask = ((deUint64)1 << queueProperties[0].timestampValidBits) - 1;
+	}
+
+	// Get timestamp value from query pool
+	deUint32                    stageSize = (deUint32)m_stages.size();
+
+	vk.getQueryPoolResults(vkDevice, *m_queryPool, 0u, stageSize, sizeof(deUint64) * stageSize, (void*)m_timestampValues, sizeof(deUint64), VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
+
+	for (deUint32 ndx = 0; ndx < stageSize; ndx++)
+	{
+		m_timestampValues[ndx] &= timestampMask;
+	}
+
+	return verifyTimestamp();
+}
+
+tcu::TestStatus TimestampTestInstance::verifyTimestamp(void)
+{
+	for (deUint32 first = 0; first < m_stages.size(); first++)
+	{
+		for (deUint32 second = 0; second < first; second++)
+		{
+			if(m_timestampValues[first] < m_timestampValues[second])
+			{
+				return tcu::TestStatus::fail("Latter stage timestamp is smaller than the former stage timestamp.");
+			}
+		}
+	}
+
+	return tcu::TestStatus::pass("Timestamp increases steadily.");
+}
+
+Move<VkBuffer> TimestampTestInstance::createBufferAndBindMemory(VkDeviceSize size, VkBufferUsageFlags usage, de::MovePtr<Allocation>* pAlloc)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+	const VkDevice              vkDevice            = m_context.getDevice();
+	const deUint32              queueFamilyIndex    = m_context.getUniversalQueueFamilyIndex();
+	SimpleAllocator             memAlloc            (vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	const VkBufferCreateInfo vertexBufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,       // VkStructureType      sType;
+		DE_NULL,                                    // const void*          pNext;
+		0u,                                         // VkBufferCreateFlags  flags;
+		size,                                       // VkDeviceSize         size;
+		usage,                                      // VkBufferUsageFlags   usage;
+		VK_SHARING_MODE_EXCLUSIVE,                  // VkSharingMode        sharingMode;
+		1u,                                         // deUint32             queueFamilyCount;
+		&queueFamilyIndex                           // const deUint32*      pQueueFamilyIndices;
+	};
+
+	Move<VkBuffer> vertexBuffer = createBuffer(vk, vkDevice, &vertexBufferParams);
+	de::MovePtr<Allocation> vertexBufferAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *vertexBuffer), MemoryRequirement::HostVisible);
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *vertexBuffer, vertexBufferAlloc->getMemory(), vertexBufferAlloc->getOffset()));
+
+	DE_ASSERT(pAlloc);
+	*pAlloc = vertexBufferAlloc;
+
+	return vertexBuffer;
+}
+
+Move<VkImage> TimestampTestInstance::createImage2DAndBindMemory(VkFormat                          format,
+																deUint32                          width,
+																deUint32                          height,
+																VkImageUsageFlags                 usage,
+																VkSampleCountFlagBits             sampleCount,
+																de::details::MovePtr<Allocation>* pAlloc)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+	const VkDevice              vkDevice            = m_context.getDevice();
+	const deUint32              queueFamilyIndex    = m_context.getUniversalQueueFamilyIndex();
+	SimpleAllocator             memAlloc            (vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+
+	// Optimal tiling feature check
+	VkFormatProperties          formatProperty;
+	m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format, &formatProperty);
+	if((usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(formatProperty.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
+	{
+		// Remove color attachment usage if the optimal tiling feature does not support it
+		usage &= ~VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+	}
+	if((usage & VK_IMAGE_USAGE_STORAGE_BIT) && !(formatProperty.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
+	{
+		// Remove storage usage if the optimal tiling feature does not support it
+		usage &= ~VK_IMAGE_USAGE_STORAGE_BIT;
+	}
+	
+	const VkImageCreateInfo colorImageParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,                                        // VkStructureType      sType;
+		DE_NULL,                                                                    // const void*          pNext;
+		0u,                                                                         // VkImageCreateFlags   flags;
+		VK_IMAGE_TYPE_2D,                                                           // VkImageType          imageType;
+		format,                                                                     // VkFormat             format;
+		{ width, height, 1u },                                                      // VkExtent3D           extent;
+		1u,                                                                         // deUint32             mipLevels;
+		1u,                                                                         // deUint32             arraySize;
+		sampleCount,                                                                // deUint32             samples;
+		VK_IMAGE_TILING_OPTIMAL,                                                    // VkImageTiling        tiling;
+		usage,                                                                      // VkImageUsageFlags    usage;
+		VK_SHARING_MODE_EXCLUSIVE,                                                  // VkSharingMode        sharingMode;
+		1u,                                                                         // deUint32             queueFamilyCount;
+		&queueFamilyIndex,                                                          // const deUint32*      pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,                                                  // VkImageLayout        initialLayout;
+	};
+
+	Move<VkImage> image = createImage(vk, vkDevice, &colorImageParams);
+
+	// Allocate and bind image memory
+	de::MovePtr<Allocation> colorImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *image), MemoryRequirement::Any);
+	VK_CHECK(vk.bindImageMemory(vkDevice, *image, colorImageAlloc->getMemory(), colorImageAlloc->getOffset()));
+
+	DE_ASSERT(pAlloc);
+	*pAlloc = colorImageAlloc;
+
+	return image;
+}
+
+class BasicGraphicsTest : public TimestampTest
+{
+public:
+						  BasicGraphicsTest(tcu::TestContext&         testContext,
+											const std::string&        name,
+											const std::string&        description,
+											const TimestampTestParam* param)
+							  : TimestampTest (testContext, name, description, param)
+							  { }
+	virtual               ~BasicGraphicsTest (void) { }
+	virtual void          initPrograms       (SourceCollections&      programCollection) const;
+	virtual TestInstance* createInstance     (Context&                context) const;
+};
+
+class BasicGraphicsTestInstance : public TimestampTestInstance
+{
+public:
+	enum
+	{
+		VK_MAX_SHADER_STAGES = 6,
+	};
+				 BasicGraphicsTestInstance  (Context&              context,
+											 const StageFlagVector stages,
+											 const bool            inRenderPass);
+	virtual      ~BasicGraphicsTestInstance (void);
+protected:
+	virtual void configCommandBuffer        (void);
+	virtual void buildVertexBuffer          (void);
+	virtual void buildRenderPass            (VkFormat colorFormat,
+											 VkFormat depthFormat);
+	virtual void buildFrameBuffer           (tcu::UVec2 renderSize,
+											 VkFormat colorFormat,
+											 VkFormat depthFormat);
+protected:
+	const tcu::UVec2                    m_renderSize;
+	const VkFormat                      m_colorFormat;
+	const VkFormat                      m_depthFormat;
+
+	Move<VkImage>                       m_colorImage;
+	de::MovePtr<Allocation>             m_colorImageAlloc;
+	Move<VkImage>                       m_depthImage;
+	de::MovePtr<Allocation>             m_depthImageAlloc;
+	Move<VkImageView>                   m_colorAttachmentView;
+	Move<VkImageView>                   m_depthAttachmentView;
+	Move<VkRenderPass>                  m_renderPass;
+	Move<VkFramebuffer>                 m_framebuffer;
+
+	de::MovePtr<Allocation>             m_vertexBufferAlloc;
+	Move<VkBuffer>                      m_vertexBuffer;
+	std::vector<Vertex4RGBA>            m_vertices;
+
+	SimpleGraphicsPipelineBuilder       m_pipelineBuilder;
+	Move<VkPipeline>                    m_graphicsPipelines;
+};
+
+void BasicGraphicsTest::initPrograms (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("color_vert") << glu::VertexSource(
+		"#version 310 es\n"
+		"layout(location = 0) in vec4 position;\n"
+		"layout(location = 1) in vec4 color;\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"  gl_Position = position;\n"
+		"  vtxColor = color;\n"
+		"}\n");
+
+	programCollection.glslSources.add("color_frag") << glu::FragmentSource(
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"  fragColor = vtxColor;\n"
+		"}\n");
+}
+
+TestInstance* BasicGraphicsTest::createInstance(Context& context) const
+{
+	return new BasicGraphicsTestInstance(context,m_stages,m_inRenderPass);
+}
+
+void BasicGraphicsTestInstance::buildVertexBuffer(void)
+{
+	const DeviceInterface&      vk       = m_context.getDeviceInterface();
+	const VkDevice              vkDevice = m_context.getDevice();
+
+	// Create vertex buffer
+	{
+		m_vertexBuffer = createBufferAndBindMemory(1024u, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, &m_vertexBufferAlloc);
+
+		m_vertices          = createOverlappingQuads();
+		// Load vertices into vertex buffer
+		deMemcpy(m_vertexBufferAlloc->getHostPtr(), m_vertices.data(), m_vertices.size() * sizeof(Vertex4RGBA));
+		flushMappedMemoryRange(vk, vkDevice, m_vertexBufferAlloc->getMemory(), m_vertexBufferAlloc->getOffset(), 1024u);
+	}
+}
+
+void BasicGraphicsTestInstance::buildRenderPass(VkFormat colorFormat, VkFormat depthFormat)
+{
+	const DeviceInterface&      vk       = m_context.getDeviceInterface();
+	const VkDevice              vkDevice = m_context.getDevice();
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,                                                 // VkAttachmentDescriptionFlags    flags;
+			colorFormat,                                        // VkFormat                        format;
+			VK_SAMPLE_COUNT_1_BIT,                              // VkSampleCountFlagBits           samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,                        // VkAttachmentLoadOp              loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,                       // VkAttachmentStoreOp             storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,                    // VkAttachmentLoadOp              stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,                   // VkAttachmentStoreOp             stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,           // VkImageLayout                   initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,           // VkImageLayout                   finalLayout;
+		};
+
+		const VkAttachmentDescription depthAttachmentDescription =
+		{
+			0u,                                                 // VkAttachmentDescriptionFlags flags;
+			depthFormat,                                        // VkFormat                     format;
+			VK_SAMPLE_COUNT_1_BIT,                              // VkSampleCountFlagBits        samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,                        // VkAttachmentLoadOp           loadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,                   // VkAttachmentStoreOp          storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,                    // VkAttachmentLoadOp           stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,                   // VkAttachmentStoreOp          stencilStoreOp;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,   // VkImageLayout                initialLayout;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,   // VkImageLayout                finalLayout;
+		};
+
+		const VkAttachmentDescription attachments[2] =
+		{
+			colorAttachmentDescription,
+			depthAttachmentDescription
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,                                                 // deUint32         attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL            // VkImageLayout    layout;
+		};
+
+		const VkAttachmentReference depthAttachmentReference =
+		{
+			1u,                                                 // deUint32         attachment;
+			VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL    // VkImageLayout    layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,                                                 // VkSubpassDescriptionFlags        flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,                    // VkPipelineBindPoint              pipelineBindPoint;
+			0u,                                                 // deUint32                         inputAttachmentCount;
+			DE_NULL,                                            // const VkAttachmentReference*     pInputAttachments;
+			1u,                                                 // deUint32                         colorAttachmentCount;
+			&colorAttachmentReference,                          // const VkAttachmentReference*     pColorAttachments;
+			DE_NULL,                                            // const VkAttachmentReference*     pResolveAttachments;
+			&depthAttachmentReference,                          // const VkAttachmentReference*     pDepthStencilAttachment;
+			0u,                                                 // deUint32                         preserveAttachmentCount;
+			DE_NULL                                             // const VkAttachmentReference*     pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,          // VkStructureType                  sType;
+			DE_NULL,                                            // const void*                      pNext;
+			0u,                                                 // VkRenderPassCreateFlags          flags;
+			2u,                                                 // deUint32                         attachmentCount;
+			attachments,                                        // const VkAttachmentDescription*   pAttachments;
+			1u,                                                 // deUint32                         subpassCount;
+			&subpassDescription,                                // const VkSubpassDescription*      pSubpasses;
+			0u,                                                 // deUint32                         dependencyCount;
+			DE_NULL                                             // const VkSubpassDependency*       pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+}
+
+void BasicGraphicsTestInstance::buildFrameBuffer(tcu::UVec2 renderSize, VkFormat colorFormat, VkFormat depthFormat)
+{
+	const DeviceInterface&      vk                   = m_context.getDeviceInterface();
+	const VkDevice              vkDevice             = m_context.getDevice();
+	const VkComponentMapping    ComponentMappingRGBA = { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A};
+
+	// Create color image
+	{
+		m_colorImage = createImage2DAndBindMemory(colorFormat,
+												  renderSize.x(),
+												  renderSize.y(),
+												  VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+												  VK_SAMPLE_COUNT_1_BIT,
+												  &m_colorImageAlloc);
+	}
+
+	// Create depth image
+	{
+		m_depthImage = createImage2DAndBindMemory(depthFormat,
+												  renderSize.x(),
+												  renderSize.y(),
+												  VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+												  VK_SAMPLE_COUNT_1_BIT,
+												  &m_depthImageAlloc);
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,       // VkStructureType          sType;
+			DE_NULL,                                        // const void*              pNext;
+			0u,                                             // VkImageViewCreateFlags   flags;
+			*m_colorImage,                                  // VkImage                  image;
+			VK_IMAGE_VIEW_TYPE_2D,                          // VkImageViewType          viewType;
+			colorFormat,                                    // VkFormat                 format;
+			ComponentMappingRGBA,                           // VkComponentMapping       components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u },  // VkImageSubresourceRange  subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create depth attachment view
+	{
+		const VkImageViewCreateInfo depthAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,       // VkStructureType          sType;
+			DE_NULL,                                        // const void*              pNext;
+			0u,                                             // VkImageViewCreateFlags   flags;
+			*m_depthImage,                                  // VkImage                  image;
+			VK_IMAGE_VIEW_TYPE_2D,                          // VkImageViewType          viewType;
+			depthFormat,                                    // VkFormat                 format;
+			ComponentMappingRGBA,                           // VkComponentMapping       components;
+			{ VK_IMAGE_ASPECT_DEPTH_BIT, 0u, 1u, 0u, 1u },  // VkImageSubresourceRange  subresourceRange;
+		};
+
+		m_depthAttachmentView = createImageView(vk, vkDevice, &depthAttachmentViewParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkImageView attachmentBindInfos[2] =
+		{
+			*m_colorAttachmentView,
+			*m_depthAttachmentView,
+		};
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,          // VkStructureType              sType;
+			DE_NULL,                                            // const void*                  pNext;
+			0u,                                                 // VkFramebufferCreateFlags     flags;
+			*m_renderPass,                                      // VkRenderPass                 renderPass;
+			2u,                                                 // deUint32                     attachmentCount;
+			attachmentBindInfos,                                // const VkImageView*           pAttachments;
+			(deUint32)renderSize.x(),                           // deUint32                     width;
+			(deUint32)renderSize.y(),                           // deUint32                     height;
+			1u,                                                 // deUint32                     layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+}
+
+BasicGraphicsTestInstance::BasicGraphicsTestInstance(Context&              context,
+													 const StageFlagVector stages,
+													 const bool            inRenderPass)
+													 : TimestampTestInstance (context,stages,inRenderPass)
+													 , m_renderSize  (32, 32)
+													 , m_colorFormat (VK_FORMAT_R8G8B8A8_UNORM)
+													 , m_depthFormat (VK_FORMAT_D16_UNORM)
+													 , m_pipelineBuilder (context)
+{
+	buildVertexBuffer();
+
+	buildRenderPass(m_colorFormat, m_depthFormat);
+
+	buildFrameBuffer(m_renderSize, m_colorFormat, m_depthFormat);
+
+	m_pipelineBuilder.bindShaderStage(VK_SHADER_STAGE_VERTEX_BIT, "color_vert", "main");
+	m_pipelineBuilder.bindShaderStage(VK_SHADER_STAGE_FRAGMENT_BIT, "color_frag", "main");
+
+	m_graphicsPipelines = m_pipelineBuilder.buildPipeline(m_renderSize, *m_renderPass);
+
+}
+
+BasicGraphicsTestInstance::~BasicGraphicsTestInstance(void)
+{
+}
+
+void BasicGraphicsTestInstance::configCommandBuffer(void)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,    // VkStructureType                  sType;
+		DE_NULL,                                        // const void*                      pNext;
+		0u,                                             // VkCommandBufferUsageFlags        flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkClearValue attachmentClearValues[2] =
+	{
+		defaultClearValue(m_colorFormat),
+		defaultClearValue(m_depthFormat),
+	};
+
+	const VkRenderPassBeginInfo renderPassBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,               // VkStructureType      sType;
+		DE_NULL,                                                // const void*          pNext;
+		*m_renderPass,                                          // VkRenderPass         renderPass;
+		*m_framebuffer,                                         // VkFramebuffer        framebuffer;
+		{ { 0u, 0u }, { m_renderSize.x(), m_renderSize.y() } }, // VkRect2D             renderArea;
+		2u,                                                     // deUint32             clearValueCount;
+		attachmentClearValues                                   // const VkClearValue*  pClearValues;
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+	vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
+
+	vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+	vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines);
+	VkDeviceSize offsets = 0u;
+	vk.cmdBindVertexBuffers(*m_cmdBuffer, 0u, 1u, &m_vertexBuffer.get(), &offsets);
+	vk.cmdDraw(*m_cmdBuffer, (deUint32)m_vertices.size(), 1u, 0u, 0u);
+
+	if(m_inRenderPass)
+	{
+	  deUint32 timestampEntry = 0u;
+	  for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	  {
+		  vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	  }
+	}
+
+	vk.cmdEndRenderPass(*m_cmdBuffer);
+
+	if(!m_inRenderPass)
+	{
+	  deUint32 timestampEntry = 0u;
+	  for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	  {
+		  vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	  }
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+}
+
+class AdvGraphicsTest : public BasicGraphicsTest
+{
+public:
+						  AdvGraphicsTest  (tcu::TestContext&         testContext,
+											const std::string&        name,
+											const std::string&        description,
+											const TimestampTestParam* param)
+							  : BasicGraphicsTest(testContext, name, description, param)
+							  { }
+	virtual               ~AdvGraphicsTest (void) { }
+	virtual void          initPrograms     (SourceCollections&        programCollection) const;
+	virtual TestInstance* createInstance   (Context&                  context) const;
+};
+
+class AdvGraphicsTestInstance : public BasicGraphicsTestInstance
+{
+public:
+				 AdvGraphicsTestInstance  (Context&              context,
+										   const StageFlagVector stages,
+										   const bool            inRenderPass);
+	virtual      ~AdvGraphicsTestInstance (void);
+	virtual void configCommandBuffer      (void);
+protected:
+	virtual void featureSupportCheck      (void);
+protected:
+	VkPhysicalDeviceFeatures m_features;
+	deUint32                 m_draw_count;
+	de::MovePtr<Allocation>  m_indirectBufferAlloc;
+	Move<VkBuffer>           m_indirectBuffer;
+};
+
+void AdvGraphicsTest::initPrograms(SourceCollections& programCollection) const
+{
+	BasicGraphicsTest::initPrograms(programCollection);
+
+	programCollection.glslSources.add("dummy_geo") << glu::GeometrySource(
+		"#version 450 \n"
+		"layout (triangles) in;\n"
+		"layout (triangle_strip, max_vertices = 3) out;\n"
+		"void main (void)\n"
+		"{\n"
+		"  for(int ndx=0; ndx<3; ndx++)\n"
+		"  {\n"
+		"    gl_Position = gl_in[ndx].gl_Position;\n"
+		"    EmitVertex();\n"
+		"  }\n"
+		"  EndPrimitive();\n"
+		"}\n");
+
+	programCollection.glslSources.add("basic_tcs") << glu::TessellationControlSource(
+		"#version 450 \n"
+		"layout(vertices = 3) out;\n"
+		"layout(location = 0) in highp vec4 color[];\n"
+		"layout(location = 0) out highp vec4 vtxColor[];\n"
+		"void main()\n"
+		"{\n"
+		"  gl_TessLevelOuter[0] = 4.0;\n"
+		"  gl_TessLevelOuter[1] = 4.0;\n"
+		"  gl_TessLevelOuter[2] = 4.0;\n"
+		"  gl_TessLevelInner[0] = 4.0;\n"
+		"  gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
+		"  vtxColor[gl_InvocationID] = color[gl_InvocationID];\n"
+		"}\n");
+
+	programCollection.glslSources.add("basic_tes") << glu::TessellationEvaluationSource(
+		"#version 450 \n"
+		"layout(triangles, fractional_even_spacing, ccw) in;\n"
+		"layout(location = 0) in highp vec4 colors[];\n"
+		"layout(location = 0) out highp vec4 vtxColor;\n"
+		"void main() \n"
+		"{\n"
+		"  float u = gl_TessCoord.x;\n"
+		"  float v = gl_TessCoord.y;\n"
+		"  float w = gl_TessCoord.z;\n"
+		"  vec4 pos = vec4(0);\n"
+		"  vec4 color = vec4(0);\n"
+		"  pos.xyz += u * gl_in[0].gl_Position.xyz;\n"
+		"  color.xyz += u * colors[0].xyz;\n"
+		"  pos.xyz += v * gl_in[1].gl_Position.xyz;\n"
+		"  color.xyz += v * colors[1].xyz;\n"
+		"  pos.xyz += w * gl_in[2].gl_Position.xyz;\n"
+		"  color.xyz += w * colors[2].xyz;\n"
+		"  pos.w = 1.0;\n"
+		"  color.w = 1.0;\n"
+		"  gl_Position = pos;\n"
+		"  vtxColor = color;\n"
+		"}\n");
+}
+
+TestInstance* AdvGraphicsTest::createInstance(Context& context) const
+{
+	return new AdvGraphicsTestInstance(context,m_stages,m_inRenderPass);
+}
+
+void AdvGraphicsTestInstance::featureSupportCheck(void)
+{
+	for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	{
+		switch(*it)
+		{
+			case VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:
+				if (m_features.geometryShader == VK_FALSE)
+				{
+					TCU_THROW(NotSupportedError, "Geometry Shader Not Supported");
+				}
+				break;
+			case VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:
+			case VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:
+				if (m_features.tessellationShader == VK_FALSE)
+				{
+					TCU_THROW(NotSupportedError, "Tessellation Not Supported");
+				}
+				break;
+			case VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:
+			default:
+				break;
+		};
+	}
+}
+
+AdvGraphicsTestInstance::AdvGraphicsTestInstance(Context&              context,
+												 const StageFlagVector stages,
+												 const bool            inRenderPass)
+	: BasicGraphicsTestInstance(context, stages, inRenderPass)
+{
+	m_features = m_context.getDeviceFeatures();
+
+	// If necessary feature is not supported, throw error and fail current test
+	featureSupportCheck();
+
+	if(m_features.geometryShader == VK_TRUE)
+	{
+		m_pipelineBuilder.bindShaderStage(VK_SHADER_STAGE_GEOMETRY_BIT, "dummy_geo", "main");
+	}
+
+	if(m_features.tessellationShader == VK_TRUE)
+	{
+		m_pipelineBuilder.bindShaderStage(VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, "basic_tcs", "main");
+		m_pipelineBuilder.bindShaderStage(VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, "basic_tes", "main");
+		m_pipelineBuilder.enableTessellationStage(3);
+	}
+
+	m_graphicsPipelines = m_pipelineBuilder.buildPipeline(m_renderSize, *m_renderPass);
+
+	// Prepare the indirect draw buffer
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+	const VkDevice              vkDevice            = m_context.getDevice();
+
+	if(m_features.multiDrawIndirect == VK_TRUE)
+	{
+		m_draw_count = 2;
+	}
+	else
+	{
+		m_draw_count = 1;
+	}
+	m_indirectBuffer = createBufferAndBindMemory(32u, VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT, &m_indirectBufferAlloc);
+
+	const VkDrawIndirectCommand indirectCmds[] =
+	{
+		{
+			12u,                    // deUint32    vertexCount;
+			1u,                     // deUint32    instanceCount;
+			0u,                     // deUint32    firstVertex;
+			0u,                     // deUint32    firstInstance;
+		},
+		{
+			12u,                    // deUint32    vertexCount;
+			1u,                     // deUint32    instanceCount;
+			11u,                    // deUint32    firstVertex;
+			0u,                     // deUint32    firstInstance;
+		},
+	};
+	// Load data into indirect draw buffer
+	deMemcpy(m_indirectBufferAlloc->getHostPtr(), indirectCmds, m_draw_count * sizeof(VkDrawIndirectCommand));
+	flushMappedMemoryRange(vk, vkDevice, m_indirectBufferAlloc->getMemory(), m_indirectBufferAlloc->getOffset(), 32u);
+
+}
+
+AdvGraphicsTestInstance::~AdvGraphicsTestInstance(void)
+{
+}
+
+void AdvGraphicsTestInstance::configCommandBuffer(void)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,    // VkStructureType              sType;
+		DE_NULL,                                        // const void*                  pNext;
+		0u,                                             // VkCommandBufferUsageFlags    flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const VkClearValue attachmentClearValues[2] =
+	{
+		defaultClearValue(m_colorFormat),
+		defaultClearValue(m_depthFormat),
+	};
+
+	const VkRenderPassBeginInfo renderPassBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,               // VkStructureType      sType;
+		DE_NULL,                                                // const void*          pNext;
+		*m_renderPass,                                          // VkRenderPass         renderPass;
+		*m_framebuffer,                                         // VkFramebuffer        framebuffer;
+		{ { 0u, 0u }, { m_renderSize.x(), m_renderSize.y() } }, // VkRect2D             renderArea;
+		2u,                                                     // deUint32             clearValueCount;
+		attachmentClearValues                                   // const VkClearValue*  pClearValues;
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+	vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
+
+	vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+	vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipelines);
+
+	VkDeviceSize offsets = 0u;
+	vk.cmdBindVertexBuffers(*m_cmdBuffer, 0u, 1u, &m_vertexBuffer.get(), &offsets);
+
+	vk.cmdDrawIndirect(*m_cmdBuffer, *m_indirectBuffer, 0u, m_draw_count, sizeof(VkDrawIndirectCommand));
+
+	if(m_inRenderPass)
+	{
+	  deUint32 timestampEntry = 0u;
+	  for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	  {
+		  vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	  }
+	}
+
+	vk.cmdEndRenderPass(*m_cmdBuffer);
+
+	if(!m_inRenderPass)
+	{
+	  deUint32 timestampEntry = 0u;
+	  for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	  {
+		  vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	  }
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+}
+
+class BasicComputeTest : public TimestampTest
+{
+public:
+						  BasicComputeTest  (tcu::TestContext&         testContext,
+											 const std::string&        name,
+											 const std::string&        description,
+											 const TimestampTestParam* param)
+							  : TimestampTest(testContext, name, description, param)
+							  { }
+	virtual               ~BasicComputeTest (void) { }
+	virtual void          initPrograms      (SourceCollections&        programCollection) const;
+	virtual TestInstance* createInstance    (Context&                  context) const;
+};
+
+class BasicComputeTestInstance : public TimestampTestInstance
+{
+public:
+				 BasicComputeTestInstance  (Context&              context,
+											const StageFlagVector stages,
+											const bool            inRenderPass);
+	virtual      ~BasicComputeTestInstance (void);
+	virtual void configCommandBuffer       (void);
+protected:
+	de::MovePtr<Allocation>     m_inputBufAlloc;
+	Move<VkBuffer>              m_inputBuf;
+	de::MovePtr<Allocation>     m_outputBufAlloc;
+	Move<VkBuffer>              m_outputBuf;
+
+	Move<VkDescriptorPool>      m_descriptorPool;
+	Move<VkDescriptorSet>       m_descriptorSet;
+	Move<VkDescriptorSetLayout> m_descriptorSetLayout;
+
+	Move<VkPipelineLayout>      m_pipelineLayout;
+	Move<VkShaderModule>        m_computeShaderModule;
+	Move<VkPipeline>            m_computePipelines;
+};
+
+void BasicComputeTest::initPrograms(SourceCollections& programCollection) const
+{
+	TimestampTest::initPrograms(programCollection);
+
+	programCollection.glslSources.add("basic_compute") << glu::ComputeSource(
+		"#version 310 es\n"
+		"layout(local_size_x = 128) in;\n"
+		"layout(std430) buffer;\n"
+		"layout(binding = 0) readonly buffer Input0\n"
+		"{\n"
+		"  vec4 elements[];\n"
+		"} input_data0;\n"
+		"layout(binding = 1) writeonly buffer Output\n"
+		"{\n"
+		"  vec4 elements[];\n"
+		"} output_data;\n"
+		"void main()\n"
+		"{\n"
+		"  uint ident = gl_GlobalInvocationID.x;\n"
+		"  output_data.elements[ident] = input_data0.elements[ident] * input_data0.elements[ident];\n"
+		"}");
+}
+
+TestInstance* BasicComputeTest::createInstance(Context& context) const
+{
+	return new BasicComputeTestInstance(context,m_stages,m_inRenderPass);
+}
+
+BasicComputeTestInstance::BasicComputeTestInstance(Context&              context,
+												   const StageFlagVector stages,
+												   const bool            inRenderPass)
+	: TimestampTestInstance(context, stages, inRenderPass)
+{
+	const DeviceInterface&      vk                  = context.getDeviceInterface();
+	const VkDevice              vkDevice            = context.getDevice();
+
+	// Create buffer object, allocate storage, and generate input data
+	const VkDeviceSize          size                = sizeof(tcu::Vec4) * 128u;
+	m_inputBuf = createBufferAndBindMemory(size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, &m_inputBufAlloc);
+	// Load vertices into buffer
+	tcu::Vec4* pVec = reinterpret_cast<tcu::Vec4*>(m_inputBufAlloc->getHostPtr());
+	for (deUint32 ndx = 0u; ndx < 128u; ndx++)
+	{
+		for (deUint32 component = 0u; component < 4u; component++)
+		{
+			pVec[ndx][component]= (float)(ndx * (component + 1u));
+		}
+	}
+	flushMappedMemoryRange(vk, vkDevice, m_inputBufAlloc->getMemory(), m_inputBufAlloc->getOffset(), size);
+
+	m_outputBuf = createBufferAndBindMemory(size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT, &m_outputBufAlloc);
+
+	std::vector<VkDescriptorBufferInfo>        descriptorInfos;
+	descriptorInfos.push_back(makeDescriptorBufferInfo(*m_inputBuf, 0u, sizeof(tcu::Vec4) * 128u));
+	descriptorInfos.push_back(makeDescriptorBufferInfo(*m_outputBuf, 0u, sizeof(tcu::Vec4) * 128u));
+
+	// Create descriptor set layout
+	DescriptorSetLayoutBuilder descLayoutBuilder;
+
+	for (deUint32 bindingNdx = 0u; bindingNdx < 2u; bindingNdx++)
+	{
+		descLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+	}
+
+	m_descriptorSetLayout = descLayoutBuilder.build(vk, vkDevice);
+
+	// Create descriptor pool
+	m_descriptorPool = DescriptorPoolBuilder().addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 2).build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	// Create descriptor set
+	const VkDescriptorSetAllocateInfo descriptorSetAllocInfo =
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,     // VkStructureType                 sType;
+		DE_NULL,                                            // const void*                     pNext;
+		*m_descriptorPool,                                  // VkDescriptorPool                descriptorPool;
+		1u,                                                 // deUint32                        setLayoutCount;
+		&m_descriptorSetLayout.get(),                       // const VkDescriptorSetLayout*    pSetLayouts;
+	};
+	m_descriptorSet   = allocateDescriptorSet(vk, vkDevice, &descriptorSetAllocInfo);
+
+	DescriptorSetUpdateBuilder  builder;
+	for (deUint32 descriptorNdx = 0u; descriptorNdx < 2u; descriptorNdx++)
+	{
+		builder.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(descriptorNdx), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfos[descriptorNdx]);
+	}
+	builder.update(vk, vkDevice);
+
+	// Create compute pipeline layout
+	const VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,  // VkStructureType                 sType;
+		DE_NULL,                                        // const void*                     pNext;
+		0u,                                             // VkPipelineLayoutCreateFlags     flags;
+		1u,                                             // deUint32                        setLayoutCount;
+		&m_descriptorSetLayout.get(),                   // const VkDescriptorSetLayout*    pSetLayouts;
+		0u,                                             // deUint32                        pushConstantRangeCount;
+		DE_NULL,                                        // const VkPushConstantRange*      pPushConstantRanges;
+	};
+
+	m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutCreateInfo);
+
+	// Create compute shader
+	VkShaderModuleCreateInfo shaderModuleCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,                        // VkStructureType             sType;
+		DE_NULL,                                                            // const void*                 pNext;
+		0u,                                                                 // VkShaderModuleCreateFlags   flags;
+		m_context.getBinaryCollection().get("basic_compute").getSize(),     // deUintptr                   codeSize;
+		(deUint32*)m_context.getBinaryCollection().get("basic_compute").getBinary(),   // const deUint32*             pCode;
+
+	};
+
+	m_computeShaderModule = createShaderModule(vk, vkDevice, &shaderModuleCreateInfo);
+
+	// Create compute pipeline
+	const VkPipelineShaderStageCreateInfo stageCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO, // VkStructureType                     sType;
+		DE_NULL,                                             // const void*                         pNext;
+		0u,                                                  // VkPipelineShaderStageCreateFlags    flags;
+		VK_SHADER_STAGE_COMPUTE_BIT,                         // VkShaderStageFlagBits               stage;
+		*m_computeShaderModule,                              // VkShaderModule                      module;
+		"main",                                              // const char*                         pName;
+		DE_NULL,                                             // const VkSpecializationInfo*         pSpecializationInfo;
+	};
+
+	const VkComputePipelineCreateInfo pipelineCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,      // VkStructureType                 sType;
+		DE_NULL,                                             // const void*                     pNext;
+		0u,                                                  // VkPipelineCreateFlags           flags;
+		stageCreateInfo,                                     // VkPipelineShaderStageCreateInfo stage;
+		*m_pipelineLayout,                                   // VkPipelineLayout                layout;
+		(VkPipeline)0,                                       // VkPipeline                      basePipelineHandle;
+		0u,                                                  // deInt32                         basePipelineIndex;
+	};
+
+	m_computePipelines = createComputePipeline(vk, vkDevice, (VkPipelineCache)0u, &pipelineCreateInfo);
+
+}
+
+BasicComputeTestInstance::~BasicComputeTestInstance(void)
+{
+}
+
+void BasicComputeTestInstance::configCommandBuffer(void)
+{
+	const DeviceInterface&     vk                 = m_context.getDeviceInterface();
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,    // VkStructureType          sType;
+		DE_NULL,                                        // const void*              pNext;
+		0u,                                             // VkCmdBufferOptimizeFlags flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+	vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
+
+	vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_computePipelines);
+	vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
+	vk.cmdDispatch(*m_cmdBuffer, 128u, 1u, 1u);
+
+	deUint32 timestampEntry = 0u;
+	for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	{
+		vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+
+
+}
+
+class TransferTest : public TimestampTest
+{
+public:
+						  TransferTest   (tcu::TestContext&          testContext,
+										  const std::string&         name,
+										  const std::string&         description,
+										  const TimestampTestParam*  param);
+	virtual               ~TransferTest  (void) { }
+	virtual void          initPrograms   (SourceCollections&         programCollection) const;
+	virtual TestInstance* createInstance (Context&                   context) const;
+protected:
+	TransferMethod        m_method;
+};
+
+class TransferTestInstance : public TimestampTestInstance
+{
+public:
+					TransferTestInstance	(Context&					context,
+											 const StageFlagVector		stages,
+											 const bool					inRenderPass,
+											 const TransferMethod		method);
+	virtual         ~TransferTestInstance	(void);
+	virtual void    configCommandBuffer		(void);
+	virtual void	initialImageTransition	(VkCommandBuffer			cmdBuffer,
+											 VkImage					image,
+											 VkImageSubresourceRange	subRange,
+											 VkImageLayout				layout);
+protected:
+	TransferMethod			m_method;
+
+	VkDeviceSize			m_bufSize;
+	Move<VkBuffer>			m_srcBuffer;
+	Move<VkBuffer>			m_dstBuffer;
+	de::MovePtr<Allocation> m_srcBufferAlloc;
+	de::MovePtr<Allocation> m_dstBufferAlloc;
+
+	VkFormat				m_imageFormat;
+	deInt32					m_imageWidth;
+	deInt32					m_imageHeight;
+	VkDeviceSize			m_imageSize;
+	Move<VkImage>			m_srcImage;
+	Move<VkImage>			m_dstImage;
+	Move<VkImage>			m_depthImage;
+	Move<VkImage>			m_msImage;
+	de::MovePtr<Allocation>	m_srcImageAlloc;
+	de::MovePtr<Allocation>	m_dstImageAlloc;
+	de::MovePtr<Allocation>	m_depthImageAlloc;
+	de::MovePtr<Allocation>	m_msImageAlloc;
+};
+
+TransferTest::TransferTest(tcu::TestContext&                 testContext,
+						   const std::string&                name,
+						   const std::string&                description,
+						   const TimestampTestParam*         param)
+	: TimestampTest(testContext, name, description, param)
+{
+	const TransferTimestampTestParam* transferParam = dynamic_cast<const TransferTimestampTestParam*>(param);
+	m_method = transferParam->getMethod();
+}
+
+void TransferTest::initPrograms(SourceCollections& programCollection) const
+{
+	TimestampTest::initPrograms(programCollection);
+}
+
+TestInstance* TransferTest::createInstance(Context& context) const
+{
+  return new TransferTestInstance(context, m_stages, m_inRenderPass, m_method);
+}
+
+TransferTestInstance::TransferTestInstance(Context&              context,
+										   const StageFlagVector stages,
+										   const bool            inRenderPass,
+										   const TransferMethod  method)
+	: TimestampTestInstance(context, stages, inRenderPass)
+	, m_method(method)
+	, m_bufSize(256u)
+	, m_imageFormat(VK_FORMAT_R8G8B8A8_UNORM)
+	, m_imageWidth(4u)
+	, m_imageHeight(4u)
+	, m_imageSize(256u)
+{
+	const DeviceInterface&      vk                  = context.getDeviceInterface();
+	const VkDevice              vkDevice            = context.getDevice();
+
+	// Create src buffer
+	m_srcBuffer = createBufferAndBindMemory(m_bufSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_SRC_BIT, &m_srcBufferAlloc);
+
+	// Init the source buffer memory
+	char* pBuf = reinterpret_cast<char*>(m_srcBufferAlloc->getHostPtr());
+	memset(pBuf, 0xFF, sizeof(char)*(size_t)m_bufSize);
+	flushMappedMemoryRange(vk, vkDevice, m_srcBufferAlloc->getMemory(), m_srcBufferAlloc->getOffset(), m_bufSize);
+
+	// Create dst buffer
+	m_dstBuffer = createBufferAndBindMemory(m_bufSize, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT, &m_dstBufferAlloc);
+
+	// Create src/dst/depth image
+	m_srcImage   = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
+											  VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+											  VK_SAMPLE_COUNT_1_BIT,
+											  &m_srcImageAlloc);
+	m_dstImage   = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
+											  VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+											  VK_SAMPLE_COUNT_1_BIT,
+											  &m_dstImageAlloc);
+	m_depthImage = createImage2DAndBindMemory(VK_FORMAT_D16_UNORM, m_imageWidth, m_imageHeight,
+											  VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+											  VK_SAMPLE_COUNT_1_BIT,
+											  &m_depthImageAlloc);
+	m_msImage    = createImage2DAndBindMemory(m_imageFormat, m_imageWidth, m_imageHeight,
+											  VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+											  VK_SAMPLE_COUNT_4_BIT,
+											  &m_msImageAlloc);
+}
+
+TransferTestInstance::~TransferTestInstance(void)
+{
+}
+
+void TransferTestInstance::configCommandBuffer(void)
+{
+	const DeviceInterface&      vk                  = m_context.getDeviceInterface();
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,    // VkStructureType                  sType;
+		DE_NULL,                                        // const void*                      pNext;
+		0u,                                             // VkCmdBufferOptimizeFlags         flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+	// Initialize buffer/image
+	vk.cmdFillBuffer(*m_cmdBuffer, *m_dstBuffer, 0u, m_bufSize, 0x0);
+
+	const VkClearColorValue srcClearValue =
+	{
+		{1.0f, 1.0f, 1.0f, 1.0f}
+	};
+	const VkClearColorValue dstClearValue =
+	{
+		{0.0f, 0.0f, 0.0f, 0.0f}
+	};
+	const struct VkImageSubresourceRange subRangeColor =
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,  // VkImageAspectFlags  aspectMask;
+		0u,                         // deUint32            baseMipLevel;
+		1u,                         // deUint32            mipLevels;
+		0u,                         // deUint32            baseArrayLayer;
+		1u,                         // deUint32            arraySize;
+	};
+	const struct VkImageSubresourceRange subRangeDepth =
+	{
+		VK_IMAGE_ASPECT_DEPTH_BIT,  // VkImageAspectFlags  aspectMask;
+		0u,                  // deUint32            baseMipLevel;
+		1u,                  // deUint32            mipLevels;
+		0u,                  // deUint32            baseArrayLayer;
+		1u,                  // deUint32            arraySize;
+	};
+
+	initialImageTransition(*m_cmdBuffer, *m_srcImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
+	initialImageTransition(*m_cmdBuffer, *m_dstImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
+
+	vk.cmdClearColorImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
+	vk.cmdClearColorImage(*m_cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, &dstClearValue, 1u, &subRangeColor);
+
+	vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, TimestampTest::ENTRY_COUNT);
+
+	// Copy Operations
+	const VkImageSubresourceLayers imgSubResCopy =
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,              // VkImageAspectFlags  aspectMask;
+		0u,                                     // deUint32            mipLevel;
+		0u,                                     // deUint32            baseArrayLayer;
+		1u,                                     // deUint32            layerCount;
+	};
+
+	const VkOffset3D nullOffset  = {0u, 0u, 0u};
+	const VkExtent3D imageExtent = {(deUint32)m_imageWidth, (deUint32)m_imageHeight, 1u};
+	const VkOffset3D imageOffset = {(int)m_imageWidth, (int)m_imageHeight, 1};
+	switch(m_method)
+	{
+		case TRANSFER_METHOD_COPY_BUFFER:
+			{
+				const VkBufferCopy  copyBufRegion =
+				{
+					0u,      // VkDeviceSize    srcOffset;
+					0u,      // VkDeviceSize    destOffset;
+					512u,    // VkDeviceSize    copySize;
+				};
+				vk.cmdCopyBuffer(*m_cmdBuffer, *m_srcBuffer, *m_dstBuffer, 1u, &copyBufRegion);
+				break;
+			}
+		case TRANSFER_METHOD_COPY_IMAGE:
+			{
+				const VkImageCopy copyImageRegion =
+				{
+					imgSubResCopy,                          // VkImageSubresourceCopy  srcSubresource;
+					nullOffset,                             // VkOffset3D              srcOffset;
+					imgSubResCopy,                          // VkImageSubresourceCopy  destSubresource;
+					nullOffset,                             // VkOffset3D              destOffset;
+					imageExtent,                            // VkExtent3D              extent;
+
+				};
+				vk.cmdCopyImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u, &copyImageRegion);
+				break;
+			}
+		case TRANSFER_METHOD_COPY_BUFFER_TO_IMAGE:
+			{
+				const VkBufferImageCopy bufImageCopy =
+				{
+					0u,                                     // VkDeviceSize            bufferOffset;
+					(deUint32)m_bufSize,                    // deUint32                bufferRowLength;
+					(deUint32)m_imageHeight,                // deUint32                bufferImageHeight;
+					imgSubResCopy,                          // VkImageSubresourceCopy  imageSubresource;
+					nullOffset,                             // VkOffset3D              imageOffset;
+					imageExtent,                            // VkExtent3D              imageExtent;
+				};
+				vk.cmdCopyBufferToImage(*m_cmdBuffer, *m_srcBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u, &bufImageCopy);
+				break;
+			}
+		case TRANSFER_METHOD_COPY_IMAGE_TO_BUFFER:
+			{
+				const VkBufferImageCopy imgBufferCopy =
+				{
+					0u,                                     // VkDeviceSize            bufferOffset;
+					(deUint32)m_bufSize,                    // deUint32                bufferRowLength;
+					(deUint32)m_imageHeight,                // deUint32                bufferImageHeight;
+					imgSubResCopy,                          // VkImageSubresourceCopy  imageSubresource;
+					nullOffset,                             // VkOffset3D              imageOffset;
+					imageExtent,                            // VkExtent3D              imageExtent;
+				};
+				vk.cmdCopyImageToBuffer(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstBuffer, 1u, &imgBufferCopy);
+				break;
+			}
+		case TRANSFER_METHOD_BLIT_IMAGE:
+			{
+				const VkImageBlit imageBlt =
+				{
+					imgSubResCopy,                          // VkImageSubresourceCopy  srcSubresource;
+					{
+						nullOffset,
+						imageOffset,
+					},
+					imgSubResCopy,                          // VkImageSubresourceCopy  destSubresource;
+					{
+						nullOffset,
+						imageOffset,
+					}
+				};
+				vk.cmdBlitImage(*m_cmdBuffer, *m_srcImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u, &imageBlt, VK_FILTER_NEAREST);
+				break;
+			}
+		case TRANSFER_METHOD_CLEAR_COLOR_IMAGE:
+			{
+				vk.cmdClearColorImage(*m_cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
+				break;
+			}
+		case TRANSFER_METHOD_CLEAR_DEPTH_STENCIL_IMAGE:
+			{
+				initialImageTransition(*m_cmdBuffer, *m_depthImage, subRangeDepth, VK_IMAGE_LAYOUT_GENERAL);
+				const VkClearDepthStencilValue clearDSValue =
+				{
+					1.0f,                                   // float       depth;
+					0u,                                     // deUint32    stencil;
+				};
+				vk.cmdClearDepthStencilImage(*m_cmdBuffer, *m_depthImage, VK_IMAGE_LAYOUT_GENERAL, &clearDSValue, 1u, &subRangeDepth);
+				break;
+			}
+		case TRANSFER_METHOD_FILL_BUFFER:
+			{
+				vk.cmdFillBuffer(*m_cmdBuffer, *m_dstBuffer, 0u, m_bufSize, 0x0);
+				break;
+			}
+		case TRANSFER_METHOD_UPDATE_BUFFER:
+			{
+				const deUint32 data[] =
+				{
+					0xdeadbeef, 0xabcdef00, 0x12345678
+				};
+				vk.cmdUpdateBuffer(*m_cmdBuffer, *m_dstBuffer, 0x10, sizeof(data), data);
+				break;
+			}
+		case TRANSFER_METHOD_COPY_QUERY_POOL_RESULTS:
+			{
+				vk.cmdWriteTimestamp(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, *m_queryPool, 0u);
+				vk.cmdCopyQueryPoolResults(*m_cmdBuffer, *m_queryPool, 0u, 1u, *m_dstBuffer, 0u, 8u, VK_QUERY_RESULT_64_BIT | VK_QUERY_RESULT_WAIT_BIT);
+				vk.cmdResetQueryPool(*m_cmdBuffer, *m_queryPool, 0u, 1u);
+				break;
+			}
+		case TRANSFER_METHOD_RESOLVE_IMAGE:
+			{
+				const VkImageResolve imageResolve =
+				{
+					imgSubResCopy,                              // VkImageSubresourceLayers  srcSubresource;
+					nullOffset,                                 // VkOffset3D                srcOffset;
+					imgSubResCopy,                              // VkImageSubresourceLayers  destSubresource;
+					nullOffset,                                 // VkOffset3D                destOffset;
+					imageExtent,                                // VkExtent3D                extent;
+				};
+				initialImageTransition(*m_cmdBuffer, *m_msImage, subRangeColor, VK_IMAGE_LAYOUT_GENERAL);
+				vk.cmdClearColorImage(*m_cmdBuffer, *m_msImage, VK_IMAGE_LAYOUT_GENERAL, &srcClearValue, 1u, &subRangeColor);
+				vk.cmdResolveImage(*m_cmdBuffer, *m_msImage, VK_IMAGE_LAYOUT_GENERAL, *m_dstImage, VK_IMAGE_LAYOUT_GENERAL, 1u, &imageResolve);
+				break;
+			}
+		default:
+			DE_FATAL("Unknown Transfer Method!");
+			break;
+	};
+
+	deUint32 timestampEntry = 0u;
+	for (StageFlagVector::const_iterator it = m_stages.begin(); it != m_stages.end(); it++)
+	{
+		vk.cmdWriteTimestamp(*m_cmdBuffer, *it, *m_queryPool, timestampEntry++);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+}
+
+void TransferTestInstance::initialImageTransition (VkCommandBuffer cmdBuffer, VkImage image, VkImageSubresourceRange subRange, VkImageLayout layout)
+{
+	const DeviceInterface&		vk				= m_context.getDeviceInterface();
+	const VkImageMemoryBarrier	imageMemBarrier	=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // VkStructureType          sType;
+		DE_NULL,                                // const void*              pNext;
+		0u,                                     // VkAccessFlags            srcAccessMask;
+		0u,                                     // VkAccessFlags            dstAccessMask;
+		VK_IMAGE_LAYOUT_UNDEFINED,              // VkImageLayout            oldLayout;
+		layout,                                 // VkImageLayout            newLayout;
+		VK_QUEUE_FAMILY_IGNORED,                // uint32_t                 srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,                // uint32_t                 dstQueueFamilyIndex;
+		image,                                  // VkImage                  image;
+		subRange                                // VkImageSubresourceRange  subresourceRange;
+	};
+
+	vk.cmdPipelineBarrier(cmdBuffer, 0, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, DE_NULL, 0, DE_NULL, 1, &imageMemBarrier);
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTimestampTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> timestampTests (new tcu::TestCaseGroup(testCtx, "timestamp", "timestamp tests"));
+
+	// Basic Graphics Tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> basicGraphicsTests (new tcu::TestCaseGroup(testCtx, "basic_graphics_tests", "Record timestamp in different pipeline stages of basic graphics tests"));
+
+		const VkPipelineStageFlagBits basicGraphicsStages0[][2] =
+		{
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_VERTEX_INPUT_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_VERTEX_SHADER_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT},
+		  {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,   VK_PIPELINE_STAGE_ALL_COMMANDS_BIT},
+		};
+		for (deUint32 stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicGraphicsStages0); stageNdx++)
+		{
+			TimestampTestParam param(basicGraphicsStages0[stageNdx], 2u, true);
+			basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, &param));
+			param.toggleInRenderPass();
+			basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, &param));
+		}
+
+		const VkPipelineStageFlagBits basicGraphicsStages1[][3] =
+		{
+		  {VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,      VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT},
+		  {VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,  VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT},
+		};
+		for (deUint32 stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicGraphicsStages1); stageNdx++)
+		{
+			TimestampTestParam param(basicGraphicsStages1[stageNdx], 3u, true);
+			basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, &param));
+			param.toggleInRenderPass();
+			basicGraphicsTests->addChild(newTestCase<BasicGraphicsTest>(testCtx, &param));
+		}
+
+		timestampTests->addChild(basicGraphicsTests.release());
+	}
+
+	// Advanced Graphics Tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> advGraphicsTests (new tcu::TestCaseGroup(testCtx, "advanced_graphics_tests", "Record timestamp in different pipeline stages of advanced graphics tests"));
+
+		const VkPipelineStageFlagBits advGraphicsStages[][2] =
+		{
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT},
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT},
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT},
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT},
+		};
+		for (deUint32 stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(advGraphicsStages); stageNdx++)
+		{
+			TimestampTestParam param(advGraphicsStages[stageNdx], 2u, true);
+			advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, &param));
+			param.toggleInRenderPass();
+			advGraphicsTests->addChild(newTestCase<AdvGraphicsTest>(testCtx, &param));
+		}
+
+		timestampTests->addChild(advGraphicsTests.release());
+	}
+
+	// Basic Compute Tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> basicComputeTests (new tcu::TestCaseGroup(testCtx, "basic_compute_tests", "Record timestamp for computer stages"));
+
+		const VkPipelineStageFlagBits basicComputeStages[][2] =
+		{
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT},
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT},
+		};
+		for (deUint32 stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(basicComputeStages); stageNdx++)
+		{
+			TimestampTestParam param(basicComputeStages[stageNdx], 2u, false);
+			basicComputeTests->addChild(newTestCase<BasicComputeTest>(testCtx, &param));
+		}
+
+		timestampTests->addChild(basicComputeTests.release());
+	}
+
+	// Transfer Tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> transferTests (new tcu::TestCaseGroup(testCtx, "transfer_tests", "Record timestamp for transfer stages"));
+
+		const VkPipelineStageFlagBits transferStages[][2] =
+		{
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT},
+			{VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_HOST_BIT},
+		};
+		for (deUint32 stageNdx = 0u; stageNdx < DE_LENGTH_OF_ARRAY(transferStages); stageNdx++)
+		{
+			for (deUint32 method = 0u; method < TRANSFER_METHOD_LAST; method++)
+			{
+				TransferTimestampTestParam param(transferStages[stageNdx], 2u, false, method);
+				transferTests->addChild(newTestCase<TransferTest>(testCtx, &param));
+			}
+		}
+
+		timestampTests->addChild(transferTests.release());
+	}
+
+	// Misc Tests
+	{
+		de::MovePtr<tcu::TestCaseGroup> miscTests (new tcu::TestCaseGroup(testCtx, "misc_tests", "Misc tests that can not be categorized to other group."));
+
+		const VkPipelineStageFlagBits miscStages[] = {VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT};
+		TimestampTestParam param(miscStages, 1u, false);
+		miscTests->addChild(new TimestampTest(testCtx,
+											  "timestamp_only",
+											  "Only write timestamp command in the commmand buffer",
+											  &param));
+
+		timestampTests->addChild(miscTests.release());
+	}
+
+	return timestampTests.release();
+}
+
+} // pipeline
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.hpp
new file mode 100644
index 0000000..47b0641
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineTimestampTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINETIMESTAMPTESTS_HPP
+#define _VKTPIPELINETIMESTAMPTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 ARM Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Timestamp Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createTimestampTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINETIMESTAMPTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineUniqueRandomIterator.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineUniqueRandomIterator.hpp
new file mode 100644
index 0000000..da67bf2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineUniqueRandomIterator.hpp
@@ -0,0 +1,122 @@
+#ifndef _VKTPIPELINEUNIQUERANDOMITERATOR_HPP
+#define _VKTPIPELINEUNIQUERANDOMITERATOR_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Iterator over a unique sequence of items
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "deRandom.hpp"
+#include <set>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+template <typename T>
+class UniqueRandomIterator
+{
+public:
+							UniqueRandomIterator	(deUint32 numItems, deUint32 numValues, int seed);
+	virtual					~UniqueRandomIterator	(void) {}
+	bool					hasNext					(void) const;
+	T						next					(void);
+	void					reset					(void);
+
+protected:
+	virtual T				getIndexedValue			(deUint32 index) = 0;
+
+private:
+	std::vector<deUint32>	m_indices;
+	size_t					m_currentIndex;
+};
+
+template <typename T>
+UniqueRandomIterator<T>::UniqueRandomIterator (deUint32 numItems, deUint32 numValues, int seed)
+{
+	de::Random rnd(seed);
+
+	DE_ASSERT(numItems <= numValues);
+
+	if (numItems == numValues)
+	{
+		// Fast way to populate the index sequence
+		m_indices = std::vector<deUint32>(numItems);
+
+		for (deUint32 itemNdx = 0; itemNdx < numItems; itemNdx++)
+			m_indices[itemNdx] = itemNdx;
+	}
+	else
+	{
+		std::set<deUint32> uniqueIndices;
+
+		// Populate set with "numItems" unique values between 0 and numValues - 1
+		while (uniqueIndices.size() < numItems)
+			uniqueIndices.insert(rnd.getUint32() % numValues);
+
+		// Copy set into index sequence
+		m_indices = std::vector<deUint32>(uniqueIndices.begin(), uniqueIndices.end());
+	}
+
+	// Scramble the indices
+	rnd.shuffle(m_indices.begin(), m_indices.end());
+
+	reset();
+}
+
+template <typename T>
+bool UniqueRandomIterator<T>::hasNext (void) const
+{
+	return m_currentIndex < m_indices.size();
+}
+
+template <typename T>
+T UniqueRandomIterator<T>::next (void)
+{
+	DE_ASSERT(m_currentIndex < m_indices.size());
+
+	return getIndexedValue(m_indices[m_currentIndex++]);
+}
+
+template <typename T>
+void UniqueRandomIterator<T>::reset (void)
+{
+	m_currentIndex = 0;
+}
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEUNIQUERANDOMITERATOR_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.cpp
new file mode 100644
index 0000000..801a831
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.cpp
@@ -0,0 +1,1701 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vertex Input Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineVertexInputTests.hpp"
+#include "vktPipelineCombinationsIterator.hpp"
+#include "vktPipelineClearUtil.hpp"
+#include "vktPipelineImageUtil.hpp"
+#include "vktPipelineVertexUtil.hpp"
+#include "vktPipelineReferenceRenderer.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "tcuFloat.hpp"
+#include "tcuImageCompare.hpp"
+#include "deFloat16.h"
+#include "deMemory.h"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+
+#include <sstream>
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+namespace
+{
+
+bool isSupportedVertexFormat (Context& context, VkFormat format)
+{
+	if (isVertexFormatDouble(format) && !context.getDeviceFeatures().shaderFloat64)
+		return false;
+
+	VkFormatProperties  formatProps;
+	deMemset(&formatProps, 0, sizeof(VkFormatProperties));
+	context.getInstanceInterface().getPhysicalDeviceFormatProperties(context.getPhysicalDevice(), format, &formatProps);
+
+	return (formatProps.bufferFeatures & VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT) != 0u;
+}
+
+float getRepresentableDifferenceUnorm (VkFormat format)
+{
+	DE_ASSERT(isVertexFormatUnorm(format) || isVertexFormatSRGB(format));
+
+	return 1.0f / float((1 << (getVertexFormatComponentSize(format) * 8)) - 1);
+}
+
+float getRepresentableDifferenceSnorm (VkFormat format)
+{
+	DE_ASSERT(isVertexFormatSnorm(format));
+
+	return 1.0f / float((1 << (getVertexFormatComponentSize(format) * 8 - 1)) - 1);
+}
+
+deUint32 getNextMultipleOffset (deUint32 divisor, deUint32 value)
+{
+	if (value % divisor == 0)
+		return 0;
+	else
+		return divisor - (value % divisor);
+}
+
+class VertexInputTest : public vkt::TestCase
+{
+public:
+	enum GlslType
+	{
+		GLSL_TYPE_INT,
+		GLSL_TYPE_IVEC2,
+		GLSL_TYPE_IVEC3,
+		GLSL_TYPE_IVEC4,
+
+		GLSL_TYPE_UINT,
+		GLSL_TYPE_UVEC2,
+		GLSL_TYPE_UVEC3,
+		GLSL_TYPE_UVEC4,
+
+		GLSL_TYPE_FLOAT,
+		GLSL_TYPE_VEC2,
+		GLSL_TYPE_VEC3,
+		GLSL_TYPE_VEC4,
+		GLSL_TYPE_MAT2,
+		GLSL_TYPE_MAT3,
+		GLSL_TYPE_MAT4,
+
+		GLSL_TYPE_DOUBLE,
+		GLSL_TYPE_DVEC2,
+		GLSL_TYPE_DVEC3,
+		GLSL_TYPE_DVEC4,
+		GLSL_TYPE_DMAT2,
+		GLSL_TYPE_DMAT3,
+		GLSL_TYPE_DMAT4,
+
+		GLSL_TYPE_COUNT
+	};
+
+	enum GlslBasicType
+	{
+		GLSL_BASIC_TYPE_INT,
+		GLSL_BASIC_TYPE_UINT,
+		GLSL_BASIC_TYPE_FLOAT,
+		GLSL_BASIC_TYPE_DOUBLE
+	};
+
+	enum BindingMapping
+	{
+		BINDING_MAPPING_ONE_TO_ONE,	// Vertex input bindings will not contain data for more than one attribute.
+		BINDING_MAPPING_ONE_TO_MANY	// Vertex input bindings can contain data for more than one attribute.
+	};
+
+	struct AttributeInfo
+	{
+		GlslType				glslType;
+		VkFormat				vkType;
+		VkVertexInputRate		inputRate;
+	};
+
+	struct GlslTypeDescription
+	{
+		const char*		name;
+		int				vertexInputComponentCount;
+		int				vertexInputCount;
+		GlslBasicType	basicType;
+	};
+
+	static const GlslTypeDescription		s_glslTypeDescriptions[GLSL_TYPE_COUNT];
+
+											VertexInputTest				(tcu::TestContext&					testContext,
+																		 const std::string&					name,
+																		 const std::string&					description,
+																		 const std::vector<AttributeInfo>&	attributeInfos,
+																		 BindingMapping						bindingMapping);
+
+	virtual									~VertexInputTest			(void) {}
+	virtual void							initPrograms				(SourceCollections& programCollection) const;
+	virtual TestInstance*					createInstance				(Context& context) const;
+	static bool								isCompatibleType			(VkFormat format, GlslType glslType);
+
+private:
+	std::string								getGlslInputDeclarations	(void) const;
+	std::string								getGlslVertexCheck			(void) const;
+	std::string								getGlslAttributeConditions	(const AttributeInfo& attributeInfo, deUint32 attributeIndex) const;
+	static tcu::Vec4						getFormatThreshold			(VkFormat format);
+
+	const std::vector<AttributeInfo>		m_attributeInfos;
+	const BindingMapping					m_bindingMapping;
+	bool									m_usesDoubleType;
+};
+
+class GlslTypeCombinationsIterator : public CombinationsIterator< std::vector<VertexInputTest::GlslType> >
+{
+public:
+													GlslTypeCombinationsIterator	(deUint32 numValues, deUint32 combinationSize);
+	virtual											~GlslTypeCombinationsIterator	(void) {}
+
+protected:
+	virtual std::vector<VertexInputTest::GlslType>	getCombinationValue				(const std::vector<deUint32>& combination);
+
+private:
+	std::vector<VertexInputTest::GlslType>			m_combinationValue;
+};
+
+class VertexInputInstance : public vkt::TestInstance
+{
+public:
+	struct VertexInputAttributeDescription
+	{
+		VertexInputTest::GlslType			glslType;
+		int									vertexInputIndex;
+		VkVertexInputAttributeDescription	vkDescription;
+	};
+
+	typedef	std::vector<VertexInputAttributeDescription>	AttributeDescriptionList;
+
+											VertexInputInstance			(Context&												context,
+																		 const AttributeDescriptionList&						attributeDescriptions,
+																		 const std::vector<VkVertexInputBindingDescription>&	bindingDescriptions,
+																		 const std::vector<VkDeviceSize>&						bindingOffsets);
+
+	virtual									~VertexInputInstance		(void);
+	virtual tcu::TestStatus					iterate						(void);
+
+
+	static void								writeVertexInputData		(deUint8* destPtr, const VkVertexInputBindingDescription& bindingDescription, const VkDeviceSize bindingOffset, const AttributeDescriptionList& attributes);
+	static void								writeVertexInputValue		(deUint8* destPtr, const VertexInputAttributeDescription& attributes, int indexId);
+
+private:
+	tcu::TestStatus							verifyImage					(void);
+
+private:
+	std::vector<VkBuffer>					m_vertexBuffers;
+	std::vector<Allocation*>				m_vertexBufferAllocs;
+
+	const tcu::UVec2						m_renderSize;
+	const VkFormat							m_colorFormat;
+
+	Move<VkImage>							m_colorImage;
+	de::MovePtr<Allocation>					m_colorImageAlloc;
+	Move<VkImage>							m_depthImage;
+	Move<VkImageView>						m_colorAttachmentView;
+	Move<VkRenderPass>						m_renderPass;
+	Move<VkFramebuffer>						m_framebuffer;
+
+	Move<VkShaderModule>					m_vertexShaderModule;
+	Move<VkShaderModule>					m_fragmentShaderModule;
+
+	Move<VkPipelineLayout>					m_pipelineLayout;
+	Move<VkPipeline>						m_graphicsPipeline;
+
+	Move<VkCommandPool>						m_cmdPool;
+	Move<VkCommandBuffer>					m_cmdBuffer;
+
+	Move<VkFence>							m_fence;
+};
+
+const VertexInputTest::GlslTypeDescription VertexInputTest::s_glslTypeDescriptions[GLSL_TYPE_COUNT] =
+{
+	{ "int",	1, 1, GLSL_BASIC_TYPE_INT },
+	{ "ivec2",	2, 1, GLSL_BASIC_TYPE_INT },
+	{ "ivec3",	3, 1, GLSL_BASIC_TYPE_INT },
+	{ "ivec4",	4, 1, GLSL_BASIC_TYPE_INT },
+
+	{ "uint",	1, 1, GLSL_BASIC_TYPE_UINT },
+	{ "uvec2",	2, 1, GLSL_BASIC_TYPE_UINT },
+	{ "uvec3",	3, 1, GLSL_BASIC_TYPE_UINT },
+	{ "uvec4",	4, 1, GLSL_BASIC_TYPE_UINT },
+
+	{ "float",	1, 1, GLSL_BASIC_TYPE_FLOAT },
+	{ "vec2",	2, 1, GLSL_BASIC_TYPE_FLOAT },
+	{ "vec3",	3, 1, GLSL_BASIC_TYPE_FLOAT },
+	{ "vec4",	4, 1, GLSL_BASIC_TYPE_FLOAT },
+	{ "mat2",	2, 2, GLSL_BASIC_TYPE_FLOAT },
+	{ "mat3",	3, 3, GLSL_BASIC_TYPE_FLOAT },
+	{ "mat4",	4, 4, GLSL_BASIC_TYPE_FLOAT },
+
+	{ "double",	1, 1, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dvec2",	2, 1, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dvec3",	3, 1, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dvec4",	4, 1, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dmat2",	2, 2, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dmat3",	3, 3, GLSL_BASIC_TYPE_DOUBLE },
+	{ "dmat4",	4, 4, GLSL_BASIC_TYPE_DOUBLE }
+};
+
+
+VertexInputTest::VertexInputTest (tcu::TestContext&						testContext,
+								  const std::string&					name,
+								  const std::string&					description,
+								  const std::vector<AttributeInfo>&		attributeInfos,
+								  BindingMapping						bindingMapping)
+
+	: vkt::TestCase			(testContext, name, description)
+	, m_attributeInfos		(attributeInfos)
+	, m_bindingMapping		(bindingMapping)
+{
+	m_usesDoubleType = false;
+
+	for (size_t attributeNdx = 0; attributeNdx < m_attributeInfos.size(); attributeNdx++)
+	{
+		if (s_glslTypeDescriptions[m_attributeInfos[attributeNdx].glslType].basicType == GLSL_BASIC_TYPE_DOUBLE)
+		{
+			m_usesDoubleType = true;
+			break;
+		}
+	}
+}
+
+TestInstance* VertexInputTest::createInstance (Context& context) const
+{
+	// Create enough binding descriptions with random offsets
+	std::vector<VkVertexInputBindingDescription>	bindingDescriptions;
+	std::vector<VkDeviceSize>						bindingOffsets;
+
+	for (size_t bindingNdx = 0; bindingNdx < m_attributeInfos.size() * 2; bindingNdx++)
+	{
+		// Use STEP_RATE_VERTEX in even bindings and STEP_RATE_INSTANCE in odd bindings
+		const VkVertexInputRate						inputRate			= (bindingNdx % 2 == 0) ? VK_VERTEX_INPUT_RATE_VERTEX : VK_VERTEX_INPUT_RATE_INSTANCE;
+
+		// .strideInBytes will be updated when creating the attribute descriptions
+		const VkVertexInputBindingDescription	bindingDescription	=
+		{
+			(deUint32)bindingNdx,	// deUint32				binding;
+			0,						// deUint32				stride;
+			inputRate				// VkVertexInputRate	inputRate;
+		};
+
+		bindingDescriptions.push_back(bindingDescription);
+		bindingOffsets.push_back(4 * bindingNdx);
+	}
+
+	// Create attribute descriptions, assign them to bindings and update .strideInBytes
+	std::vector<VertexInputInstance::VertexInputAttributeDescription>	attributeDescriptions;
+	deUint32															attributeLocation		= 0;
+	std::vector<deUint32>												attributeOffsets		(bindingDescriptions.size(), 0);
+
+	for (size_t attributeNdx = 0; attributeNdx < m_attributeInfos.size(); attributeNdx++)
+	{
+		const AttributeInfo&		attributeInfo			= m_attributeInfos[attributeNdx];
+		const GlslTypeDescription&	glslTypeDescription		= s_glslTypeDescriptions[attributeInfo.glslType];
+		const deUint32				inputSize				= getVertexFormatSize(attributeInfo.vkType);
+		deUint32					attributeBinding;
+
+		if (m_bindingMapping == BINDING_MAPPING_ONE_TO_ONE)
+		{
+			if (attributeInfo.inputRate == VK_VERTEX_INPUT_RATE_VERTEX)
+			{
+				attributeBinding = (deUint32)attributeNdx * 2; // Odd binding number
+			}
+			else // attributeInfo.inputRate == VK_VERTEX_INPUT_STEP_RATE_INSTANCE
+			{
+				attributeBinding = (deUint32)attributeNdx * 2 + 1; // Even binding number
+			}
+		}
+		else // m_bindingMapping == BINDING_MAPPING_ONE_TO_MANY
+		{
+			if (attributeInfo.inputRate == VK_VERTEX_INPUT_RATE_VERTEX)
+			{
+				attributeBinding = 0;
+			}
+			else // attributeInfo.inputRate == VK_VERTEX_INPUT_STEP_RATE_INSTANCE
+			{
+				attributeBinding = 1;
+			}
+		}
+
+		for (int descNdx = 0; descNdx < glslTypeDescription.vertexInputCount; descNdx++)
+		{
+			const deUint32	offsetToComponentAlignment	= getNextMultipleOffset(getVertexFormatComponentSize(attributeInfo.vkType),
+																				(deUint32)bindingOffsets[attributeBinding] + attributeOffsets[attributeBinding]);
+
+			attributeOffsets[attributeBinding] += offsetToComponentAlignment;
+
+			const VertexInputInstance::VertexInputAttributeDescription attributeDescription =
+			{
+				attributeInfo.glslType,							// GlslType	glslType;
+				descNdx,										// int		index;
+				{
+					attributeLocation,							// deUint32	location;
+					attributeBinding,							// deUint32	binding;
+					attributeInfo.vkType,						// VkFormat	format;
+					attributeOffsets[attributeBinding],			// deUint32	offset;
+				},
+			};
+
+			bindingDescriptions[attributeBinding].stride	+= offsetToComponentAlignment + inputSize;
+			attributeOffsets[attributeBinding]				+= inputSize;
+
+			//double formats with more than 2 components will take 2 locations
+			const GlslType type = attributeInfo.glslType;
+			if ((type == GLSL_TYPE_DMAT2 || type == GLSL_TYPE_DMAT3 || type == GLSL_TYPE_DMAT4) &&
+				(attributeInfo.vkType == VK_FORMAT_R64G64B64_SFLOAT || attributeInfo.vkType == VK_FORMAT_R64G64B64A64_SFLOAT))
+			{
+				attributeLocation += 2;
+			}
+			else
+				attributeLocation++;
+
+			attributeDescriptions.push_back(attributeDescription);
+		}
+	}
+
+	return new VertexInputInstance(context, attributeDescriptions, bindingDescriptions, bindingOffsets);
+}
+
+void VertexInputTest::initPrograms (SourceCollections& programCollection) const
+{
+	std::ostringstream vertexSrc;
+
+	vertexSrc << "#version 440\n"
+			  << getGlslInputDeclarations()
+			  << "layout(location = 0) out highp vec4 vtxColor;\n"
+			  << "out gl_PerVertex {\n"
+			  << "  vec4 gl_Position;\n"
+			  << "};\n";
+
+	// NOTE: double abs(double x) undefined in glslang ??
+	if (m_usesDoubleType)
+		vertexSrc << "double abs (double x) { if (x < 0.0LF) return -x; else return x; }\n";
+
+	vertexSrc << "void main (void)\n"
+			  << "{\n"
+			  << getGlslVertexCheck()
+			  << "}\n";
+
+	programCollection.glslSources.add("attribute_test_vert") << glu::VertexSource(vertexSrc.str());
+
+	programCollection.glslSources.add("attribute_test_frag") << glu::FragmentSource(
+		"#version 440\n"
+		"layout(location = 0) in highp vec4 vtxColor;\n"
+		"layout(location = 0) out highp vec4 fragColor;\n"
+		"void main (void)\n"
+		"{\n"
+		"	fragColor = vtxColor;\n"
+		"}\n");
+}
+
+std::string VertexInputTest::getGlslInputDeclarations (void) const
+{
+	std::ostringstream	glslInputs;
+	deUint32			location = 0;
+
+	for (size_t attributeNdx = 0; attributeNdx < m_attributeInfos.size(); attributeNdx++)
+	{
+		const GlslTypeDescription& glslTypeDesc = s_glslTypeDescriptions[m_attributeInfos[attributeNdx].glslType];
+
+		glslInputs << "layout(location = " << location << ") in highp " << glslTypeDesc.name << " attr" << attributeNdx << ";\n";
+		location += glslTypeDesc.vertexInputCount;
+	}
+
+	return glslInputs.str();
+}
+
+std::string VertexInputTest::getGlslVertexCheck (void) const
+{
+	std::ostringstream	glslCode;
+	int					totalInputComponentCount	= 0;
+
+
+	glslCode << "	int okCount = 0;\n";
+
+	for (size_t attributeNdx = 0; attributeNdx < m_attributeInfos.size(); attributeNdx++)
+	{
+		glslCode << getGlslAttributeConditions(m_attributeInfos[attributeNdx], (deUint32)attributeNdx);
+
+		const int vertexInputCount	= VertexInputTest::s_glslTypeDescriptions[m_attributeInfos[attributeNdx].glslType].vertexInputCount;
+		totalInputComponentCount	+= vertexInputCount * VertexInputTest::s_glslTypeDescriptions[m_attributeInfos[attributeNdx].glslType].vertexInputComponentCount;
+	}
+
+	glslCode <<
+		"	if (okCount == " << totalInputComponentCount << ")\n"
+		"	{\n"
+		"		if (gl_InstanceIndex == 0)\n"
+		"			vtxColor = vec4(1.0, 0.0, 0.0, 1.0);\n"
+		"		else\n"
+		"			vtxColor = vec4(0.0, 0.0, 1.0, 1.0);\n"
+		"	}\n"
+		"	else\n"
+		"	{\n"
+		"		vtxColor = vec4(okCount / float(" << totalInputComponentCount << "), 0.0f, 0.0f, 1.0);\n" <<
+		"	}\n\n"
+		"	if (gl_InstanceIndex == 0)\n"
+		"	{\n"
+		"		if (gl_VertexIndex == 0) gl_Position = vec4(-1.0, -1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 1) gl_Position = vec4(0.0, -1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 2) gl_Position = vec4(-1.0, 1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 3) gl_Position = vec4(0.0, 1.0, 0.0, 1.0);\n"
+		"		else gl_Position = vec4(0.0);\n"
+		"	}\n"
+		"	else\n"
+		"	{\n"
+		"		if (gl_VertexIndex == 0) gl_Position = vec4(0.0, -1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 1) gl_Position = vec4(1.0, -1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 2) gl_Position = vec4(0.0, 1.0, 0.0, 1.0);\n"
+		"		else if (gl_VertexIndex == 3) gl_Position = vec4(1.0, 1.0, 0.0, 1.0);\n"
+		"		else gl_Position = vec4(0.0);\n"
+		"	}\n";
+
+	return glslCode.str();
+}
+
+std::string VertexInputTest::getGlslAttributeConditions (const AttributeInfo& attributeInfo, deUint32 attributeIndex) const
+{
+	std::ostringstream	glslCode;
+	std::ostringstream	attributeVar;
+	const std::string	indexId				= (attributeInfo.inputRate == VK_VERTEX_INPUT_RATE_VERTEX) ? "gl_VertexIndex" : "gl_InstanceIndex";
+	const int			componentCount		= VertexInputTest::s_glslTypeDescriptions[attributeInfo.glslType].vertexInputComponentCount;
+	const int			vertexInputCount	= VertexInputTest::s_glslTypeDescriptions[attributeInfo.glslType].vertexInputCount;
+	const deUint32		totalComponentCount	= componentCount * vertexInputCount;
+	const tcu::Vec4		threshold			= getFormatThreshold(attributeInfo.vkType);
+	deUint32			componentIndex		= 0;
+
+	attributeVar << "attr" << attributeIndex;
+
+	glslCode << std::fixed;
+
+	for (int columnNdx = 0; columnNdx< vertexInputCount; columnNdx++)
+	{
+		for (int rowNdx = 0; rowNdx < componentCount; rowNdx++)
+		{
+			std::string accessStr;
+			{
+				// Build string representing the access to the attribute component
+				std::ostringstream accessStream;
+				accessStream << attributeVar.str();
+
+				if (vertexInputCount == 1)
+				{
+					if (componentCount > 1)
+						accessStream << "[" << rowNdx << "]";
+				}
+				else
+				{
+					accessStream << "[" << columnNdx << "][" << rowNdx << "]";
+				}
+
+				accessStr = accessStream.str();
+			}
+
+			if (isVertexFormatSint(attributeInfo.vkType))
+			{
+				glslCode << "\tif (" << accessStr << " == -(" << totalComponentCount << " * " << indexId << " + " << componentIndex << "))\n";
+			}
+			else if (isVertexFormatUint(attributeInfo.vkType))
+			{
+				glslCode << "\tif (" << accessStr << " == uint(" << totalComponentCount << " * " << indexId << " + " << componentIndex << "))\n";
+			}
+			else if (isVertexFormatSfloat(attributeInfo.vkType))
+			{
+				if (VertexInputTest::s_glslTypeDescriptions[attributeInfo.glslType].basicType == VertexInputTest::GLSL_BASIC_TYPE_DOUBLE)
+				{
+					glslCode << "\tif (abs(" << accessStr << " + double(0.01 * (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0))) < double(" << threshold[rowNdx] << "))\n";
+				}
+				else
+				{
+					glslCode << "\tif (abs(" << accessStr << " + (0.01 * (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0))) < " << threshold[rowNdx] << ")\n";
+				}
+			}
+			else if (isVertexFormatSscaled(attributeInfo.vkType))
+			{
+				glslCode << "\tif (abs(" << accessStr << " + (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0)) < " << threshold[rowNdx] << ")\n";
+			}
+			else if (isVertexFormatUscaled(attributeInfo.vkType))
+			{
+				glslCode << "\t if (abs(" << accessStr << " - (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0)) < " << threshold[rowNdx] << ")\n";
+			}
+			else if (isVertexFormatSnorm(attributeInfo.vkType))
+			{
+				const float representableDiff = getRepresentableDifferenceSnorm(attributeInfo.vkType);
+
+				glslCode << "\tif (abs(" << accessStr << " - (-1.0 + " << representableDiff << " * (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0))) < " << threshold[rowNdx] << ")\n";
+			}
+			else if (isVertexFormatUnorm(attributeInfo.vkType) || isVertexFormatSRGB(attributeInfo.vkType))
+			{
+				const float representableDiff = getRepresentableDifferenceUnorm(attributeInfo.vkType);
+
+				glslCode << "\tif (abs(" << accessStr << " - " << "(" << representableDiff << " * (" << totalComponentCount << ".0 * float(" << indexId << ") + " << componentIndex << ".0))) < " << threshold[rowNdx] << ")\n";
+			}
+			else
+			{
+				DE_ASSERT(false);
+			}
+
+			glslCode << "\t\tokCount++;\n\n";
+
+			componentIndex++;
+		}
+	}
+	return glslCode.str();
+}
+
+tcu::Vec4 VertexInputTest::getFormatThreshold (VkFormat format)
+{
+	using tcu::Vec4;
+
+	switch (format)
+	{
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+		case VK_FORMAT_R64_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return Vec4(0.00001f);
+
+		default:
+			break;
+	}
+
+	if (isVertexFormatSnorm(format))
+	{
+		return Vec4(1.5f * getRepresentableDifferenceSnorm(format));
+	}
+	else if (isVertexFormatUnorm(format))
+	{
+		return Vec4(1.5f * getRepresentableDifferenceUnorm(format));
+	}
+
+	return Vec4(0.001f);
+}
+
+GlslTypeCombinationsIterator::GlslTypeCombinationsIterator (deUint32 numValues, deUint32 combinationSize)
+	: CombinationsIterator< std::vector<VertexInputTest::GlslType> >	(numValues, combinationSize)
+	, m_combinationValue												(std::vector<VertexInputTest::GlslType>(combinationSize))
+{
+	DE_ASSERT(numValues <= VertexInputTest::GLSL_TYPE_COUNT);
+}
+
+std::vector<VertexInputTest::GlslType> GlslTypeCombinationsIterator::getCombinationValue (const std::vector<deUint32>& combination)
+{
+	for (size_t combinationItemNdx = 0; combinationItemNdx < combination.size(); combinationItemNdx++)
+		m_combinationValue[combinationItemNdx] = (VertexInputTest::GlslType)combination[combinationItemNdx];
+
+	return m_combinationValue;
+}
+
+VertexInputInstance::VertexInputInstance (Context&												context,
+										  const AttributeDescriptionList&						attributeDescriptions,
+										  const std::vector<VkVertexInputBindingDescription>&	bindingDescriptions,
+										  const std::vector<VkDeviceSize>&						bindingOffsets)
+	: vkt::TestInstance			(context)
+	, m_renderSize				(16, 16)
+	, m_colorFormat				(VK_FORMAT_R8G8B8A8_UNORM)
+{
+	DE_ASSERT(bindingDescriptions.size() == bindingOffsets.size());
+
+	const DeviceInterface&		vk						= context.getDeviceInterface();
+	const VkDevice				vkDevice				= context.getDevice();
+	const deUint32				queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	SimpleAllocator				memAlloc				(vk, vkDevice, getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()));
+	const VkComponentMapping	componentMappingRGBA	= { VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G, VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A };
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			m_colorFormat,																// VkFormat					format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arrayLayers;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyIndexCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout			initialLayout;
+		};
+
+		m_colorImage			= createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorAttachmentViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkImageViewCreateFlags	flags;
+			*m_colorImage,									// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,							// VkImageViewType			viewType;
+			m_colorFormat,									// VkFormat					format;
+			componentMappingRGBA,							// VkComponentMapping		components;
+			{ VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u },  // VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorAttachmentView = createImageView(vk, vkDevice, &colorAttachmentViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptionFlags		flags;
+			m_colorFormat,										// VkFormat							format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits			samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp				loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp				storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp				stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp				stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout					initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout					finalLayout;
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputAttachmentCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorAttachmentCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pDepthStencilAttachment;
+			0u,													// deUint32						preserveAttachmentCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			&colorAttachmentDescription,						// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkFramebufferCreateFlags	flags;
+			*m_renderPass,										// VkRenderPass				renderPass;
+			1u,													// deUint32					attachmentCount;
+			&m_colorAttachmentView.get(),						// const VkImageView*		pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32					width;
+			(deUint32)m_renderSize.y(),							// deUint32					height;
+			1u													// deUint32					layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkPipelineLayoutCreateFlags		flags;
+			0u,													// deUint32							setLayoutCount;
+			DE_NULL,											// const VkDescriptorSetLayout*		pSetLayouts;
+			0u,													// deUint32							pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*		pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("attribute_test_vert"), 0);
+	m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("attribute_test_frag"), 0);
+
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStageParams[2] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+				*m_vertexShaderModule,										// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				0u,															// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+				*m_fragmentShaderModule,									// VkShaderModule						module;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		// Create vertex attribute array and check if their VK formats are supported
+		std::vector<VkVertexInputAttributeDescription> vkAttributeDescriptions;
+		for (size_t attributeNdx = 0; attributeNdx < attributeDescriptions.size(); attributeNdx++)
+		{
+			const VkVertexInputAttributeDescription& attributeDescription = attributeDescriptions[attributeNdx].vkDescription;
+
+			if (!isSupportedVertexFormat(context, attributeDescription.format))
+				throw tcu::NotSupportedError(std::string("Unsupported format for vertex input: ") + getFormatName(attributeDescription.format));
+
+			vkAttributeDescriptions.push_back(attributeDescription);
+		}
+
+		const VkPipelineVertexInputStateCreateInfo	vertexInputStateParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineVertexInputStateCreateFlags	flags;
+			(deUint32)bindingDescriptions.size(),							// deUint32									vertexBindingDescriptionCount;
+			bindingDescriptions.data(),										// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			(deUint32)vkAttributeDescriptions.size(),						// deUint32									vertexAttributeDescriptionCount;
+			vkAttributeDescriptions.data()									// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,							// VkPrimitiveTopology						topology;
+			false															// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	x;
+			0.0f,						// float	y;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor = { { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } };
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			0u,																// VkPipelineViewportStateCreateFlags	flags;
+			1u,																// deUint32								viewportCount;
+			&viewport,														// const VkViewport*					pViewports;
+			1u,																// deUint32								scissorCount;
+			&scissor														// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineRasterizationStateCreateFlags	flags;
+			false,															// VkBool32									depthClampEnable;
+			false,															// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags							cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBiasConstantFactor;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									depthBiasSlopeFactor;
+			1.0f,															// float									lineWidth;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			false,																		// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor			srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor			dstColorBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp				colorBlendOp;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor			srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor			dstAlphaBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp				alphaBlendOp;
+			VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |						// VkColorComponentFlags	colorWriteMask;
+				VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConstants[4];
+		};
+
+		const VkPipelineMultisampleStateCreateInfo	multisampleStateParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+			false,														// VkBool32									sampleShadingEnable;
+			0.0f,														// float									minSampleShading;
+			DE_NULL,													// const VkSampleMask*						pSampleMask;
+			false,														// VkBool32									alphaToCoverageEnable;
+			false														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineDynamicStateCreateInfo	dynamicStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			0u,															// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		VkPipelineDepthStencilStateCreateInfo depthStencilStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,													// const void*								pNext;
+			0u,															// VkPipelineDepthStencilStateCreateFlags	flags;
+			false,														// VkBool32									depthTestEnable;
+			false,														// VkBool32									depthWriteEnable;
+			VK_COMPARE_OP_LESS,											// VkCompareOp								depthCompareOp;
+			false,														// VkBool32									depthBoundsTestEnable;
+			false,														// VkBool32									stencilTestEnable;
+			// VkStencilOpState	front;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			// VkStencilOpState	back;
+			{
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	failOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	passOp;
+				VK_STENCIL_OP_KEEP,		// VkStencilOp	depthFailOp;
+				VK_COMPARE_OP_NEVER,	// VkCompareOp	compareOp;
+				0u,						// deUint32		compareMask;
+				0u,						// deUint32		writeMask;
+				0u,						// deUint32		reference;
+			},
+			-1.0f,														// float			minDepthBounds;
+			+1.0f,														// float			maxDepthBounds;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			&depthStencilStateParams,							// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateParams,								// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipeline	= createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex buffer
+	{
+		const VkBufferCreateInfo vertexBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			4096u,										// VkDeviceSize			size;
+			VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyIndexCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		// Upload data for each vertex input binding
+		for (deUint32 bindingNdx = 0; bindingNdx < bindingDescriptions.size(); bindingNdx++)
+		{
+			Move<VkBuffer>			vertexBuffer		= createBuffer(vk, vkDevice, &vertexBufferParams);
+			de::MovePtr<Allocation>	vertexBufferAlloc	= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *vertexBuffer), MemoryRequirement::HostVisible);
+
+			VK_CHECK(vk.bindBufferMemory(vkDevice, *vertexBuffer, vertexBufferAlloc->getMemory(), vertexBufferAlloc->getOffset()));
+
+			writeVertexInputData((deUint8*)vertexBufferAlloc->getHostPtr(), bindingDescriptions[bindingNdx], bindingOffsets[bindingNdx], attributeDescriptions);
+			flushMappedMemoryRange(vk, vkDevice, vertexBufferAlloc->getMemory(), vertexBufferAlloc->getOffset(), vertexBufferParams.size);
+
+			m_vertexBuffers.push_back(vertexBuffer.disown());
+			m_vertexBufferAllocs.push_back(vertexBufferAlloc.release());
+		}
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// VkStructureType				sType;
+			DE_NULL,									// const void*					pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,		// VkCommandPoolCreateFlags		flags;
+			queueFamilyIndex,							// deUint32						queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCommandBufferUsageFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue attachmentClearValue = defaultClearValue(m_colorFormat);
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			1u,														// deUint32				clearValueCount;
+			&attachmentClearValue									// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
+
+		std::vector<VkBuffer> vertexBuffers;
+		for (size_t bufferNdx = 0; bufferNdx < m_vertexBuffers.size(); bufferNdx++)
+			vertexBuffers.push_back(m_vertexBuffers[bufferNdx]);
+
+		if (vertexBuffers.size() <= 1)
+		{
+			// One vertex buffer
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, (deUint32)vertexBuffers.size(), vertexBuffers.data(), bindingOffsets.data());
+		}
+		else
+		{
+			// Smoke-test vkCmdBindVertexBuffers(..., startBinding, ... )
+
+			const deUint32 firstHalfLength = (deUint32)vertexBuffers.size() / 2;
+			const deUint32 secondHalfLength = firstHalfLength + (deUint32)(vertexBuffers.size() % 2);
+
+			// Bind first half of vertex buffers
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, firstHalfLength, vertexBuffers.data(), bindingOffsets.data());
+
+			// Bind second half of vertex buffers
+			vk.cmdBindVertexBuffers(*m_cmdBuffer, firstHalfLength, secondHalfLength,
+									vertexBuffers.data() + firstHalfLength,
+									bindingOffsets.data() + firstHalfLength);
+		}
+
+		vk.cmdDraw(*m_cmdBuffer, 4, 2, 0, 0);
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+}
+
+VertexInputInstance::~VertexInputInstance (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+
+	for (size_t bufferNdx = 0; bufferNdx < m_vertexBuffers.size(); bufferNdx++)
+		vk.destroyBuffer(vkDevice, m_vertexBuffers[bufferNdx], DE_NULL);
+
+	for (size_t allocNdx = 0; allocNdx < m_vertexBufferAllocs.size(); allocNdx++)
+		delete m_vertexBufferAllocs[allocNdx];
+}
+
+void VertexInputInstance::writeVertexInputData(deUint8* destPtr, const VkVertexInputBindingDescription& bindingDescription, const VkDeviceSize bindingOffset, const AttributeDescriptionList& attributes)
+{
+	const deUint32 vertexCount = (bindingDescription.inputRate == VK_VERTEX_INPUT_RATE_VERTEX) ? (4 * 2) : 2;
+
+	deUint8* destOffsetPtr = ((deUint8 *)destPtr) + bindingOffset;
+	for (deUint32 vertexNdx = 0; vertexNdx < vertexCount; vertexNdx++)
+	{
+		for (size_t attributeNdx = 0; attributeNdx < attributes.size(); attributeNdx++)
+		{
+			const VertexInputAttributeDescription& attribDesc = attributes[attributeNdx];
+
+			// Only write vertex input data to bindings referenced by attribute descriptions
+			if (attribDesc.vkDescription.binding == bindingDescription.binding)
+			{
+				writeVertexInputValue(destOffsetPtr + attribDesc.vkDescription.offset, attribDesc, vertexNdx);
+			}
+		}
+		destOffsetPtr += bindingDescription.stride;
+	}
+}
+
+void writeVertexInputValueSint (deUint8* destPtr, VkFormat format, int componentNdx, deInt32 value)
+{
+	const deUint32	componentSize	= getVertexFormatComponentSize(format);
+	deUint8*		destFormatPtr	= ((deUint8*)destPtr) + componentSize * componentNdx;
+
+	switch (componentSize)
+	{
+		case 1:
+			*((deInt8*)destFormatPtr) = (deInt8)value;
+			break;
+
+		case 2:
+			*((deInt16*)destFormatPtr) = (deInt16)value;
+			break;
+
+		case 4:
+			*((deInt32*)destFormatPtr) = (deInt32)value;
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+}
+
+void writeVertexInputValueUint (deUint8* destPtr, VkFormat format, int componentNdx, deUint32 value)
+{
+	const deUint32	componentSize	= getVertexFormatComponentSize(format);
+	deUint8*		destFormatPtr	= ((deUint8*)destPtr) + componentSize * componentNdx;
+
+	switch (componentSize)
+	{
+		case 1:
+			*((deUint8 *)destFormatPtr) = (deUint8)value;
+			break;
+
+		case 2:
+			*((deUint16 *)destFormatPtr) = (deUint16)value;
+			break;
+
+		case 4:
+			*((deUint32 *)destFormatPtr) = (deUint32)value;
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+}
+
+void writeVertexInputValueSfloat (deUint8* destPtr, VkFormat format, int componentNdx, float value)
+{
+	const deUint32	componentSize	= getVertexFormatComponentSize(format);
+	deUint8*		destFormatPtr	= ((deUint8*)destPtr) + componentSize * componentNdx;
+
+	switch (componentSize)
+	{
+		case 2:
+		{
+			deFloat16 f16 = deFloat32To16(value);
+			deMemcpy(destFormatPtr, &f16, sizeof(f16));
+			break;
+		}
+
+		case 4:
+			deMemcpy(destFormatPtr, &value, sizeof(value));
+			break;
+
+		default:
+			DE_ASSERT(false);
+	}
+}
+
+void VertexInputInstance::writeVertexInputValue (deUint8* destPtr, const VertexInputAttributeDescription& attribute, int indexId)
+{
+	const int		vertexInputCount	= VertexInputTest::s_glslTypeDescriptions[attribute.glslType].vertexInputCount;
+	const int		componentCount		= VertexInputTest::s_glslTypeDescriptions[attribute.glslType].vertexInputComponentCount;
+	const deUint32	totalComponentCount	= componentCount * vertexInputCount;
+	const deUint32	vertexInputIndex	= indexId * totalComponentCount + attribute.vertexInputIndex * componentCount;
+	const bool		hasBGROrder			= isVertexFormatComponentOrderBGR(attribute.vkDescription.format);
+	int				swizzledNdx;
+
+	for (int componentNdx = 0; componentNdx < componentCount; componentNdx++)
+	{
+		if (hasBGROrder)
+		{
+			if (componentNdx == 0)
+				swizzledNdx = 2;
+			else if (componentNdx == 2)
+				swizzledNdx = 0;
+			else
+				swizzledNdx = componentNdx;
+		}
+		else
+			swizzledNdx = componentNdx;
+
+		switch (attribute.glslType)
+		{
+			case VertexInputTest::GLSL_TYPE_INT:
+			case VertexInputTest::GLSL_TYPE_IVEC2:
+			case VertexInputTest::GLSL_TYPE_IVEC3:
+			case VertexInputTest::GLSL_TYPE_IVEC4:
+				writeVertexInputValueSint(destPtr, attribute.vkDescription.format, componentNdx, -(deInt32)(vertexInputIndex + swizzledNdx));
+				break;
+
+			case VertexInputTest::GLSL_TYPE_UINT:
+			case VertexInputTest::GLSL_TYPE_UVEC2:
+			case VertexInputTest::GLSL_TYPE_UVEC3:
+			case VertexInputTest::GLSL_TYPE_UVEC4:
+				writeVertexInputValueUint(destPtr, attribute.vkDescription.format, componentNdx, vertexInputIndex + swizzledNdx);
+				break;
+
+			case VertexInputTest::GLSL_TYPE_FLOAT:
+			case VertexInputTest::GLSL_TYPE_VEC2:
+			case VertexInputTest::GLSL_TYPE_VEC3:
+			case VertexInputTest::GLSL_TYPE_VEC4:
+			case VertexInputTest::GLSL_TYPE_MAT2:
+			case VertexInputTest::GLSL_TYPE_MAT3:
+			case VertexInputTest::GLSL_TYPE_MAT4:
+				if (isVertexFormatSfloat(attribute.vkDescription.format))
+				{
+					writeVertexInputValueSfloat(destPtr, attribute.vkDescription.format, componentNdx, -(0.01f * (float)(vertexInputIndex + swizzledNdx)));
+				}
+				else if (isVertexFormatSscaled(attribute.vkDescription.format))
+				{
+					writeVertexInputValueSint(destPtr, attribute.vkDescription.format, componentNdx, -(deInt32)(vertexInputIndex + swizzledNdx));
+				}
+				else if (isVertexFormatUscaled(attribute.vkDescription.format) || isVertexFormatUnorm(attribute.vkDescription.format) || isVertexFormatSRGB(attribute.vkDescription.format))
+				{
+					writeVertexInputValueUint(destPtr, attribute.vkDescription.format, componentNdx, vertexInputIndex + swizzledNdx);
+				}
+				else if (isVertexFormatSnorm(attribute.vkDescription.format))
+				{
+					const deInt32 minIntValue = -((1 << (getVertexFormatComponentSize(attribute.vkDescription.format) * 8 - 1))) + 1;
+					writeVertexInputValueSint(destPtr, attribute.vkDescription.format, componentNdx, minIntValue + (vertexInputIndex + swizzledNdx));
+				}
+				else
+					DE_ASSERT(false);
+				break;
+
+			case VertexInputTest::GLSL_TYPE_DOUBLE:
+			case VertexInputTest::GLSL_TYPE_DVEC2:
+			case VertexInputTest::GLSL_TYPE_DVEC3:
+			case VertexInputTest::GLSL_TYPE_DVEC4:
+			case VertexInputTest::GLSL_TYPE_DMAT2:
+			case VertexInputTest::GLSL_TYPE_DMAT3:
+			case VertexInputTest::GLSL_TYPE_DMAT4:
+				*(reinterpret_cast<double *>(destPtr) + componentNdx) = -0.01 * (vertexInputIndex + swizzledNdx);
+
+				break;
+
+			default:
+				DE_ASSERT(false);
+		}
+	}
+}
+
+tcu::TestStatus VertexInputInstance::iterate (void)
+{
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const VkQueue				queue		= m_context.getUniversalQueue();
+	const VkSubmitInfo			submitInfo	=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&m_cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+
+	return verifyImage();
+}
+
+bool VertexInputTest::isCompatibleType (VkFormat format, GlslType glslType)
+{
+	const GlslTypeDescription glslTypeDesc = s_glslTypeDescriptions[glslType];
+
+	if ((deUint32)s_glslTypeDescriptions[glslType].vertexInputComponentCount == getVertexFormatComponentCount(format))
+	{
+		switch (glslTypeDesc.basicType)
+		{
+			case GLSL_BASIC_TYPE_INT:
+				return isVertexFormatSint(format);
+
+			case GLSL_BASIC_TYPE_UINT:
+				return isVertexFormatUint(format);
+
+			case GLSL_BASIC_TYPE_FLOAT:
+				return getVertexFormatComponentSize(format) <= 4 && (isVertexFormatSfloat(format) || isVertexFormatSnorm(format) || isVertexFormatUnorm(format) || isVertexFormatSscaled(format) || isVertexFormatUscaled(format) || isVertexFormatSRGB(format));
+
+			case GLSL_BASIC_TYPE_DOUBLE:
+				return isVertexFormatSfloat(format) && getVertexFormatComponentSize(format) == 8;
+
+			default:
+				DE_ASSERT(false);
+				return false;
+		}
+	}
+	else
+		return false;
+}
+
+tcu::TestStatus VertexInputInstance::verifyImage (void)
+{
+	bool							compareOk			= false;
+	const tcu::TextureFormat		tcuColorFormat		= mapVkFormat(m_colorFormat);
+	tcu::TextureLevel				reference			(tcuColorFormat, m_renderSize.x(), m_renderSize.y());
+	const tcu::PixelBufferAccess	refRedSubregion		(tcu::getSubregion(reference.getAccess(),
+																		   deRoundFloatToInt32((float)m_renderSize.x() * 0.0f),
+																		   deRoundFloatToInt32((float)m_renderSize.y() * 0.0f),
+																		   deRoundFloatToInt32((float)m_renderSize.x() * 0.5f),
+																		   deRoundFloatToInt32((float)m_renderSize.y() * 1.0f)));
+	const tcu::PixelBufferAccess	refBlueSubregion	(tcu::getSubregion(reference.getAccess(),
+																		   deRoundFloatToInt32((float)m_renderSize.x() * 0.5f),
+																		   deRoundFloatToInt32((float)m_renderSize.y() * 0.0f),
+																		   deRoundFloatToInt32((float)m_renderSize.x() * 0.5f),
+																		   deRoundFloatToInt32((float)m_renderSize.y() * 1.0f)));
+
+	// Create reference image
+	tcu::clear(reference.getAccess(), defaultClearColor(tcuColorFormat));
+	tcu::clear(refRedSubregion, tcu::Vec4(1.0f, 0.0f, 0.0f, 1.0f));
+	tcu::clear(refBlueSubregion, tcu::Vec4(0.0f, 0.0f, 1.0f, 1.0f));
+
+	// Compare result with reference image
+	{
+		const DeviceInterface&			vk					= m_context.getDeviceInterface();
+		const VkDevice					vkDevice			= m_context.getDevice();
+		const VkQueue					queue				= m_context.getUniversalQueue();
+		const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+		SimpleAllocator					allocator			(vk, vkDevice, getPhysicalDeviceMemoryProperties(m_context.getInstanceInterface(), m_context.getPhysicalDevice()));
+		de::MovePtr<tcu::TextureLevel>	result				= readColorAttachment(vk, vkDevice, queue, queueFamilyIndex, allocator, *m_colorImage, m_colorFormat, m_renderSize);
+
+		compareOk = tcu::intThresholdPositionDeviationCompare(m_context.getTestContext().getLog(),
+															  "IntImageCompare",
+															  "Image comparison",
+															  reference.getAccess(),
+															  result->getAccess(),
+															  tcu::UVec4(2, 2, 2, 2),
+															  tcu::IVec3(1, 1, 0),
+															  true,
+															  tcu::COMPARE_LOG_RESULT);
+	}
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+std::string getAttributeInfoCaseName (const VertexInputTest::AttributeInfo& attributeInfo)
+{
+	std::ostringstream	caseName;
+	const std::string	formatName	= getFormatName(attributeInfo.vkType);
+
+	caseName << VertexInputTest::s_glslTypeDescriptions[attributeInfo.glslType].name << "_as_" << de::toLower(formatName.substr(10)) << "_rate_";
+
+	if (attributeInfo.inputRate == VK_VERTEX_INPUT_RATE_VERTEX)
+		caseName <<  "vertex";
+	else
+		caseName <<  "instance";
+
+	return caseName.str();
+}
+
+std::string getAttributeInfosCaseName (const std::vector<VertexInputTest::AttributeInfo>& attributeInfos)
+{
+	std::ostringstream caseName;
+
+	for (size_t attributeNdx = 0; attributeNdx < attributeInfos.size(); attributeNdx++)
+	{
+		caseName << getAttributeInfoCaseName(attributeInfos[attributeNdx]);
+
+		if (attributeNdx < attributeInfos.size() - 1)
+			caseName << "-";
+	}
+
+	return caseName.str();
+}
+
+std::string getAttributeInfoDescription (const VertexInputTest::AttributeInfo& attributeInfo)
+{
+	std::ostringstream caseDesc;
+
+	caseDesc << std::string(VertexInputTest::s_glslTypeDescriptions[attributeInfo.glslType].name) << " from type " << getFormatName(attributeInfo.vkType) <<  " with ";
+
+	if (attributeInfo.inputRate == VK_VERTEX_INPUT_RATE_VERTEX)
+		caseDesc <<  "vertex input rate ";
+	else
+		caseDesc <<  "instance input rate ";
+
+	return caseDesc.str();
+}
+
+std::string getAttributeInfosDescription (const std::vector<VertexInputTest::AttributeInfo>& attributeInfos)
+{
+	std::ostringstream caseDesc;
+
+	caseDesc << "Uses vertex attributes:\n";
+
+	for (size_t attributeNdx = 0; attributeNdx < attributeInfos.size(); attributeNdx++)
+		caseDesc << "\t- " << getAttributeInfoDescription (attributeInfos[attributeNdx]) << "\n";
+
+	return caseDesc.str();
+}
+
+struct CompatibleFormats
+{
+	VertexInputTest::GlslType	glslType;
+	std::vector<VkFormat>		compatibleVkFormats;
+};
+
+de::MovePtr<tcu::TestCaseGroup> createSingleAttributeTests (tcu::TestContext& testCtx)
+{
+	const VkFormat vertexFormats[] =
+	{
+		// Required, unpacked
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R32G32B32_SINT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT,
+
+		// Scaled formats
+		VK_FORMAT_R8G8_USCALED,
+		VK_FORMAT_R8G8_SSCALED,
+		VK_FORMAT_R16_USCALED,
+		VK_FORMAT_R16_SSCALED,
+		VK_FORMAT_R8G8B8_USCALED,
+		VK_FORMAT_R8G8B8_SSCALED,
+		VK_FORMAT_B8G8R8_USCALED,
+		VK_FORMAT_B8G8R8_SSCALED,
+		VK_FORMAT_R8G8B8A8_USCALED,
+		VK_FORMAT_R8G8B8A8_SSCALED,
+		VK_FORMAT_B8G8R8A8_USCALED,
+		VK_FORMAT_B8G8R8A8_SSCALED,
+		VK_FORMAT_R16G16_USCALED,
+		VK_FORMAT_R16G16_SSCALED,
+		VK_FORMAT_R16G16B16_USCALED,
+		VK_FORMAT_R16G16B16_SSCALED,
+		VK_FORMAT_R16G16B16A16_USCALED,
+		VK_FORMAT_R16G16B16A16_SSCALED,
+
+		// SRGB formats
+		VK_FORMAT_R8_SRGB,
+		VK_FORMAT_R8G8_SRGB,
+		VK_FORMAT_R8G8B8_SRGB,
+		VK_FORMAT_B8G8R8_SRGB,
+		VK_FORMAT_R8G8B8A8_SRGB,
+		VK_FORMAT_B8G8R8A8_SRGB,
+
+		// Double formats
+		VK_FORMAT_R64_SFLOAT,
+		VK_FORMAT_R64G64_SFLOAT,
+		VK_FORMAT_R64G64B64_SFLOAT,
+		VK_FORMAT_R64G64B64A64_SFLOAT,
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>	singleAttributeTests (new tcu::TestCaseGroup(testCtx, "single_attribute", "Uses one attribute"));
+
+	for (int formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(vertexFormats); formatNdx++)
+	{
+		for (int glslTypeNdx = 0; glslTypeNdx < VertexInputTest::GLSL_TYPE_COUNT; glslTypeNdx++)
+		{
+			if (VertexInputTest::isCompatibleType(vertexFormats[formatNdx], (VertexInputTest::GlslType)glslTypeNdx))
+			{
+				// Create test case for RATE_VERTEX
+				VertexInputTest::AttributeInfo attributeInfo;
+				attributeInfo.vkType	= vertexFormats[formatNdx];
+				attributeInfo.glslType	= (VertexInputTest::GlslType)glslTypeNdx;
+				attributeInfo.inputRate	= VK_VERTEX_INPUT_RATE_VERTEX;
+
+				singleAttributeTests->addChild(new VertexInputTest(testCtx,
+																   getAttributeInfoCaseName(attributeInfo),
+																   getAttributeInfoDescription(attributeInfo),
+																   std::vector<VertexInputTest::AttributeInfo>(1, attributeInfo),
+																   VertexInputTest::BINDING_MAPPING_ONE_TO_ONE));
+
+				// Create test case for RATE_INSTANCE
+				attributeInfo.inputRate	= VK_VERTEX_INPUT_RATE_INSTANCE;
+
+				singleAttributeTests->addChild(new VertexInputTest(testCtx,
+																   getAttributeInfoCaseName(attributeInfo),
+																   getAttributeInfoDescription(attributeInfo),
+																   std::vector<VertexInputTest::AttributeInfo>(1, attributeInfo),
+																   VertexInputTest::BINDING_MAPPING_ONE_TO_ONE));
+			}
+		}
+	}
+
+	return singleAttributeTests;
+}
+
+de::MovePtr<tcu::TestCaseGroup> createMultipleAttributeTests (tcu::TestContext& testCtx)
+{
+	// Required vertex formats, unpacked
+	const VkFormat vertexFormats[] =
+	{
+		VK_FORMAT_R8_UNORM,
+		VK_FORMAT_R8_SNORM,
+		VK_FORMAT_R8_UINT,
+		VK_FORMAT_R8_SINT,
+		VK_FORMAT_R8G8_UNORM,
+		VK_FORMAT_R8G8_SNORM,
+		VK_FORMAT_R8G8_UINT,
+		VK_FORMAT_R8G8_SINT,
+		VK_FORMAT_R8G8B8A8_UNORM,
+		VK_FORMAT_R8G8B8A8_SNORM,
+		VK_FORMAT_R8G8B8A8_UINT,
+		VK_FORMAT_R8G8B8A8_SINT,
+		VK_FORMAT_B8G8R8A8_UNORM,
+		VK_FORMAT_R16_UNORM,
+		VK_FORMAT_R16_SNORM,
+		VK_FORMAT_R16_UINT,
+		VK_FORMAT_R16_SINT,
+		VK_FORMAT_R16_SFLOAT,
+		VK_FORMAT_R16G16_UNORM,
+		VK_FORMAT_R16G16_SNORM,
+		VK_FORMAT_R16G16_UINT,
+		VK_FORMAT_R16G16_SINT,
+		VK_FORMAT_R16G16_SFLOAT,
+		VK_FORMAT_R16G16B16A16_UNORM,
+		VK_FORMAT_R16G16B16A16_SNORM,
+		VK_FORMAT_R16G16B16A16_UINT,
+		VK_FORMAT_R16G16B16A16_SINT,
+		VK_FORMAT_R16G16B16A16_SFLOAT,
+		VK_FORMAT_R32_UINT,
+		VK_FORMAT_R32_SINT,
+		VK_FORMAT_R32_SFLOAT,
+		VK_FORMAT_R32G32_UINT,
+		VK_FORMAT_R32G32_SINT,
+		VK_FORMAT_R32G32_SFLOAT,
+		VK_FORMAT_R32G32B32_UINT,
+		VK_FORMAT_R32G32B32_SINT,
+		VK_FORMAT_R32G32B32_SFLOAT,
+		VK_FORMAT_R32G32B32A32_UINT,
+		VK_FORMAT_R32G32B32A32_SINT,
+		VK_FORMAT_R32G32B32A32_SFLOAT
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>	multipleAttributeTests (new tcu::TestCaseGroup(testCtx, "multiple_attributes", "Uses more than one attribute"));
+
+	// Find compatible VK formats for each GLSL vertex type
+	CompatibleFormats compatibleFormats[VertexInputTest::GLSL_TYPE_COUNT];
+	{
+		for (int glslTypeNdx = 0; glslTypeNdx < VertexInputTest::GLSL_TYPE_COUNT; glslTypeNdx++)
+		{
+			for (int formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(vertexFormats); formatNdx++)
+			{
+				if (VertexInputTest::isCompatibleType(vertexFormats[formatNdx], (VertexInputTest::GlslType)glslTypeNdx))
+					compatibleFormats[glslTypeNdx].compatibleVkFormats.push_back(vertexFormats[formatNdx]);
+			}
+		}
+	}
+
+	de::Random						randomFunc				(102030);
+	GlslTypeCombinationsIterator	glslTypeCombinationsItr	(VertexInputTest::GLSL_TYPE_DOUBLE, 3); // Exclude double values, which are not included in vertexFormats
+	de::MovePtr<tcu::TestCaseGroup> oneToOneAttributeTests	(new tcu::TestCaseGroup(testCtx, "attributes", ""));
+	de::MovePtr<tcu::TestCaseGroup> oneToManyAttributeTests	(new tcu::TestCaseGroup(testCtx, "attributes", ""));
+
+	while (glslTypeCombinationsItr.hasNext())
+	{
+		const std::vector<VertexInputTest::GlslType>	glslTypes		= glslTypeCombinationsItr.next();
+		std::vector<VertexInputTest::AttributeInfo>		attributeInfos	(glslTypes.size());
+
+		for (size_t attributeNdx = 0; attributeNdx < attributeInfos.size(); attributeNdx++)
+		{
+			DE_ASSERT(!compatibleFormats[glslTypes[attributeNdx]].compatibleVkFormats.empty());
+
+			// Select a random compatible format
+			const std::vector<VkFormat>& formats = compatibleFormats[glslTypes[attributeNdx]].compatibleVkFormats;
+			const VkFormat format = formats[randomFunc.getUint32() % formats.size()];
+
+			attributeInfos[attributeNdx].glslType	= glslTypes[attributeNdx];
+			attributeInfos[attributeNdx].inputRate	= (attributeNdx % 2 == 0) ? VK_VERTEX_INPUT_RATE_VERTEX : VK_VERTEX_INPUT_RATE_INSTANCE;
+			attributeInfos[attributeNdx].vkType		= format;
+		}
+
+		const std::string	caseName	= getAttributeInfosCaseName(attributeInfos);
+		const std::string	caseDesc	= getAttributeInfosDescription(attributeInfos);
+
+		oneToOneAttributeTests->addChild(new VertexInputTest(testCtx, caseName, caseDesc, attributeInfos, VertexInputTest::BINDING_MAPPING_ONE_TO_ONE));
+		oneToManyAttributeTests->addChild(new VertexInputTest(testCtx, caseName, caseDesc, attributeInfos, VertexInputTest::BINDING_MAPPING_ONE_TO_MANY));
+	}
+
+	de::MovePtr<tcu::TestCaseGroup> bindingOneToOneTests	(new tcu::TestCaseGroup(testCtx, "binding_one_to_one", "Each attribute uses a unique binding"));
+	bindingOneToOneTests->addChild(oneToOneAttributeTests.release());
+	multipleAttributeTests->addChild(bindingOneToOneTests.release());
+
+	de::MovePtr<tcu::TestCaseGroup> bindingOneToManyTests	(new tcu::TestCaseGroup(testCtx, "binding_one_to_many", "Attributes share the same binding"));
+	bindingOneToManyTests->addChild(oneToManyAttributeTests.release());
+	multipleAttributeTests->addChild(bindingOneToManyTests.release());
+
+	return multipleAttributeTests;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createVertexInputTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	vertexInputTests (new tcu::TestCaseGroup(testCtx, "vertex_input", ""));
+
+	vertexInputTests->addChild(createSingleAttributeTests(testCtx).release());
+	vertexInputTests->addChild(createMultipleAttributeTests(testCtx).release());
+
+	return vertexInputTests.release();
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.hpp
new file mode 100644
index 0000000..161efca
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexInputTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTPIPELINEVERTEXINPUTTESTS_HPP
+#define _VKTPIPELINEVERTEXINPUTTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vertex Input Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+tcu::TestCaseGroup* createVertexInputTests (tcu::TestContext& testCtx);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEVERTEXINPUTTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.cpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.cpp
new file mode 100644
index 0000000..13b9fd4
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.cpp
@@ -0,0 +1,967 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for vertex buffers.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktPipelineVertexUtil.hpp"
+#include "vkStrUtil.hpp"
+#include "tcuVectorUtil.hpp"
+#include "deStringUtil.hpp"
+
+namespace vkt
+{
+namespace pipeline
+{
+
+using namespace vk;
+
+deUint32 getVertexFormatSize (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_UNORM:
+		case VK_FORMAT_R8_SNORM:
+		case VK_FORMAT_R8_USCALED:
+		case VK_FORMAT_R8_SSCALED:
+		case VK_FORMAT_R8_UINT:
+		case VK_FORMAT_R8_SINT:
+		case VK_FORMAT_R8_SRGB:
+		case VK_FORMAT_R4G4_UNORM_PACK8:
+			return 1;
+
+		case VK_FORMAT_R8G8_UNORM:
+		case VK_FORMAT_R8G8_SNORM:
+		case VK_FORMAT_R8G8_USCALED:
+		case VK_FORMAT_R8G8_SSCALED:
+		case VK_FORMAT_R8G8_UINT:
+		case VK_FORMAT_R8G8_SINT:
+		case VK_FORMAT_R8G8_SRGB:
+		case VK_FORMAT_R16_UNORM:
+		case VK_FORMAT_R16_SNORM:
+		case VK_FORMAT_R16_USCALED:
+		case VK_FORMAT_R16_SSCALED:
+		case VK_FORMAT_R16_UINT:
+		case VK_FORMAT_R16_SINT:
+		case VK_FORMAT_R16_SFLOAT:
+		case VK_FORMAT_R5G6B5_UNORM_PACK16:
+		case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+			return 2;
+
+		case VK_FORMAT_R8G8B8_UNORM:
+		case VK_FORMAT_R8G8B8_SNORM:
+		case VK_FORMAT_R8G8B8_USCALED:
+		case VK_FORMAT_R8G8B8_SSCALED:
+		case VK_FORMAT_R8G8B8_UINT:
+		case VK_FORMAT_R8G8B8_SINT:
+		case VK_FORMAT_R8G8B8_SRGB:
+		case VK_FORMAT_B8G8R8_UNORM:
+		case VK_FORMAT_B8G8R8_SNORM:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_B8G8R8_UINT:
+		case VK_FORMAT_B8G8R8_SINT:
+		case VK_FORMAT_B8G8R8_SRGB:
+			return 3;
+
+		case VK_FORMAT_R8G8B8A8_UNORM:
+		case VK_FORMAT_R8G8B8A8_SNORM:
+		case VK_FORMAT_R8G8B8A8_USCALED:
+		case VK_FORMAT_R8G8B8A8_SSCALED:
+		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_R8G8B8A8_SINT:
+		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+		case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+		case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+		case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+		case VK_FORMAT_R16G16_UNORM:
+		case VK_FORMAT_R16G16_SNORM:
+		case VK_FORMAT_R16G16_USCALED:
+		case VK_FORMAT_R16G16_SSCALED:
+		case VK_FORMAT_R16G16_UINT:
+		case VK_FORMAT_R16G16_SINT:
+		case VK_FORMAT_R16G16_SFLOAT:
+		case VK_FORMAT_R32_UINT:
+		case VK_FORMAT_R32_SINT:
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+		case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SNORM:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_B8G8R8A8_UINT:
+		case VK_FORMAT_B8G8R8A8_SINT:
+		case VK_FORMAT_B8G8R8A8_SRGB:
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+			return 4;
+
+		case VK_FORMAT_R16G16B16_UNORM:
+		case VK_FORMAT_R16G16B16_SNORM:
+		case VK_FORMAT_R16G16B16_USCALED:
+		case VK_FORMAT_R16G16B16_SSCALED:
+		case VK_FORMAT_R16G16B16_UINT:
+		case VK_FORMAT_R16G16B16_SINT:
+		case VK_FORMAT_R16G16B16_SFLOAT:
+			return 6;
+
+		case VK_FORMAT_R16G16B16A16_UNORM:
+		case VK_FORMAT_R16G16B16A16_SNORM:
+		case VK_FORMAT_R16G16B16A16_USCALED:
+		case VK_FORMAT_R16G16B16A16_SSCALED:
+		case VK_FORMAT_R16G16B16A16_UINT:
+		case VK_FORMAT_R16G16B16A16_SINT:
+		case VK_FORMAT_R16G16B16A16_SFLOAT:
+		case VK_FORMAT_R32G32_UINT:
+		case VK_FORMAT_R32G32_SINT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R64_SFLOAT:
+			return 8;
+
+		case VK_FORMAT_R32G32B32_UINT:
+		case VK_FORMAT_R32G32B32_SINT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+			return 12;
+
+		case VK_FORMAT_R32G32B32A32_UINT:
+		case VK_FORMAT_R32G32B32A32_SINT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+			return 16;
+
+		case VK_FORMAT_R64G64B64_SFLOAT:
+			return 24;
+
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return 32;
+
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return 0;
+}
+
+deUint32 getVertexFormatComponentCount (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_USCALED:
+		case VK_FORMAT_R8_UNORM:
+		case VK_FORMAT_R8_UINT:
+		case VK_FORMAT_R8_SSCALED:
+		case VK_FORMAT_R8_SRGB:
+		case VK_FORMAT_R8_SNORM:
+		case VK_FORMAT_R8_SINT:
+		case VK_FORMAT_R16_USCALED:
+		case VK_FORMAT_R16_UNORM:
+		case VK_FORMAT_R16_UINT:
+		case VK_FORMAT_R16_SSCALED:
+		case VK_FORMAT_R16_SNORM:
+		case VK_FORMAT_R16_SINT:
+		case VK_FORMAT_R16_SFLOAT:
+		case VK_FORMAT_R32_UINT:
+		case VK_FORMAT_R32_SINT:
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_R64_SFLOAT:
+			return 1;
+
+		case VK_FORMAT_R4G4_UNORM_PACK8:
+		case VK_FORMAT_R8G8_UNORM:
+		case VK_FORMAT_R8G8_SNORM:
+		case VK_FORMAT_R8G8_USCALED:
+		case VK_FORMAT_R8G8_SSCALED:
+		case VK_FORMAT_R8G8_UINT:
+		case VK_FORMAT_R8G8_SINT:
+		case VK_FORMAT_R8G8_SRGB:
+		case VK_FORMAT_R16G16_UNORM:
+		case VK_FORMAT_R16G16_SNORM:
+		case VK_FORMAT_R16G16_USCALED:
+		case VK_FORMAT_R16G16_SSCALED:
+		case VK_FORMAT_R16G16_UINT:
+		case VK_FORMAT_R16G16_SINT:
+		case VK_FORMAT_R16G16_SFLOAT:
+		case VK_FORMAT_R32G32_UINT:
+		case VK_FORMAT_R32G32_SINT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+			return 2;
+
+		case VK_FORMAT_R8G8B8_UNORM:
+		case VK_FORMAT_R8G8B8_SNORM:
+		case VK_FORMAT_R8G8B8_USCALED:
+		case VK_FORMAT_R8G8B8_SSCALED:
+		case VK_FORMAT_R8G8B8_UINT:
+		case VK_FORMAT_R8G8B8_SINT:
+		case VK_FORMAT_R8G8B8_SRGB:
+		case VK_FORMAT_B8G8R8_UNORM:
+		case VK_FORMAT_B8G8R8_SNORM:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_B8G8R8_UINT:
+		case VK_FORMAT_B8G8R8_SINT:
+		case VK_FORMAT_B8G8R8_SRGB:
+		case VK_FORMAT_R16G16B16_UNORM:
+		case VK_FORMAT_R16G16B16_SNORM:
+		case VK_FORMAT_R16G16B16_USCALED:
+		case VK_FORMAT_R16G16B16_SSCALED:
+		case VK_FORMAT_R16G16B16_UINT:
+		case VK_FORMAT_R16G16B16_SINT:
+		case VK_FORMAT_R16G16B16_SFLOAT:
+		case VK_FORMAT_R32G32B32_UINT:
+		case VK_FORMAT_R32G32B32_SINT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R5G6B5_UNORM_PACK16:
+		case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+		case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+			return 3;
+
+		case VK_FORMAT_R8G8B8A8_UNORM:
+		case VK_FORMAT_R8G8B8A8_SNORM:
+		case VK_FORMAT_R8G8B8A8_USCALED:
+		case VK_FORMAT_R8G8B8A8_SSCALED:
+		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_R8G8B8A8_SINT:
+		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SNORM:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_B8G8R8A8_UINT:
+		case VK_FORMAT_B8G8R8A8_SINT:
+		case VK_FORMAT_B8G8R8A8_SRGB:
+		case VK_FORMAT_R16G16B16A16_UNORM:
+		case VK_FORMAT_R16G16B16A16_SNORM:
+		case VK_FORMAT_R16G16B16A16_USCALED:
+		case VK_FORMAT_R16G16B16A16_SSCALED:
+		case VK_FORMAT_R16G16B16A16_UINT:
+		case VK_FORMAT_R16G16B16A16_SINT:
+		case VK_FORMAT_R16G16B16A16_SFLOAT:
+		case VK_FORMAT_R32G32B32A32_UINT:
+		case VK_FORMAT_R32G32B32A32_SINT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+		case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+		case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+		case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+		case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+			return 4;
+
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return 0;
+}
+
+deUint32 getVertexFormatComponentSize (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_UNORM:
+		case VK_FORMAT_R8_SNORM:
+		case VK_FORMAT_R8_USCALED:
+		case VK_FORMAT_R8_SSCALED:
+		case VK_FORMAT_R8_UINT:
+		case VK_FORMAT_R8_SINT:
+		case VK_FORMAT_R8_SRGB:
+		case VK_FORMAT_R8G8_UNORM:
+		case VK_FORMAT_R8G8_SNORM:
+		case VK_FORMAT_R8G8_USCALED:
+		case VK_FORMAT_R8G8_SSCALED:
+		case VK_FORMAT_R8G8_UINT:
+		case VK_FORMAT_R8G8_SINT:
+		case VK_FORMAT_R8G8_SRGB:
+		case VK_FORMAT_R8G8B8_UNORM:
+		case VK_FORMAT_R8G8B8_SNORM:
+		case VK_FORMAT_R8G8B8_USCALED:
+		case VK_FORMAT_R8G8B8_SSCALED:
+		case VK_FORMAT_R8G8B8_UINT:
+		case VK_FORMAT_R8G8B8_SINT:
+		case VK_FORMAT_R8G8B8_SRGB:
+		case VK_FORMAT_B8G8R8_UNORM:
+		case VK_FORMAT_B8G8R8_SNORM:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_B8G8R8_UINT:
+		case VK_FORMAT_B8G8R8_SINT:
+		case VK_FORMAT_B8G8R8_SRGB:
+		case VK_FORMAT_R8G8B8A8_UNORM:
+		case VK_FORMAT_R8G8B8A8_SNORM:
+		case VK_FORMAT_R8G8B8A8_USCALED:
+		case VK_FORMAT_R8G8B8A8_SSCALED:
+		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_R8G8B8A8_SINT:
+		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SNORM:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_B8G8R8A8_UINT:
+		case VK_FORMAT_B8G8R8A8_SINT:
+		case VK_FORMAT_B8G8R8A8_SRGB:
+			return 1;
+
+		case VK_FORMAT_R16_UNORM:
+		case VK_FORMAT_R16_SNORM:
+		case VK_FORMAT_R16_USCALED:
+		case VK_FORMAT_R16_SSCALED:
+		case VK_FORMAT_R16_UINT:
+		case VK_FORMAT_R16_SINT:
+		case VK_FORMAT_R16_SFLOAT:
+		case VK_FORMAT_R16G16_UNORM:
+		case VK_FORMAT_R16G16_SNORM:
+		case VK_FORMAT_R16G16_USCALED:
+		case VK_FORMAT_R16G16_SSCALED:
+		case VK_FORMAT_R16G16_UINT:
+		case VK_FORMAT_R16G16_SINT:
+		case VK_FORMAT_R16G16_SFLOAT:
+		case VK_FORMAT_R16G16B16_UNORM:
+		case VK_FORMAT_R16G16B16_SNORM:
+		case VK_FORMAT_R16G16B16_USCALED:
+		case VK_FORMAT_R16G16B16_SSCALED:
+		case VK_FORMAT_R16G16B16_UINT:
+		case VK_FORMAT_R16G16B16_SINT:
+		case VK_FORMAT_R16G16B16_SFLOAT:
+		case VK_FORMAT_R16G16B16A16_UNORM:
+		case VK_FORMAT_R16G16B16A16_SNORM:
+		case VK_FORMAT_R16G16B16A16_USCALED:
+		case VK_FORMAT_R16G16B16A16_SSCALED:
+		case VK_FORMAT_R16G16B16A16_UINT:
+		case VK_FORMAT_R16G16B16A16_SINT:
+		case VK_FORMAT_R16G16B16A16_SFLOAT:
+			return 2;
+
+		case VK_FORMAT_R32_UINT:
+		case VK_FORMAT_R32_SINT:
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_R32G32_UINT:
+		case VK_FORMAT_R32G32_SINT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R32G32B32_UINT:
+		case VK_FORMAT_R32G32B32_SINT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+		case VK_FORMAT_R32G32B32A32_UINT:
+		case VK_FORMAT_R32G32B32A32_SINT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+			return 4;
+
+		case VK_FORMAT_R64_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return 8;
+
+		default:
+			break;
+	}
+
+	DE_ASSERT(false);
+	return 0;
+}
+
+bool isVertexFormatComponentOrderBGR (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_B8G8R8_UNORM:
+		case VK_FORMAT_B8G8R8_SNORM:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_B8G8R8_UINT:
+		case VK_FORMAT_B8G8R8_SINT:
+		case VK_FORMAT_B8G8R8_SRGB:
+		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SNORM:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_B8G8R8A8_UINT:
+		case VK_FORMAT_B8G8R8A8_SINT:
+		case VK_FORMAT_B8G8R8A8_SRGB:
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+			return true;
+
+		default:
+			break;
+	}
+	return false;
+}
+
+bool isVertexFormatSint (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_SINT:
+		case VK_FORMAT_R8G8_SINT:
+		case VK_FORMAT_R16_SINT:
+		case VK_FORMAT_R8G8B8_SINT:
+		case VK_FORMAT_B8G8R8_SINT:
+		case VK_FORMAT_R8G8B8A8_SINT:
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+		case VK_FORMAT_R16G16_SINT:
+		case VK_FORMAT_R32_SINT:
+		case VK_FORMAT_B8G8R8A8_SINT:
+		case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+		case VK_FORMAT_R16G16B16_SINT:
+		case VK_FORMAT_R16G16B16A16_SINT:
+		case VK_FORMAT_R32G32_SINT:
+		case VK_FORMAT_R32G32B32_SINT:
+		case VK_FORMAT_R32G32B32A32_SINT:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+}
+
+bool isVertexFormatUint (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_UINT:
+		case VK_FORMAT_R8G8_UINT:
+		case VK_FORMAT_R16_UINT:
+		case VK_FORMAT_R8G8B8_UINT:
+		case VK_FORMAT_B8G8R8_UINT:
+		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+		case VK_FORMAT_R16G16_UINT:
+		case VK_FORMAT_R32_UINT:
+		case VK_FORMAT_B8G8R8A8_UINT:
+		case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+		case VK_FORMAT_R16G16B16_UINT:
+		case VK_FORMAT_R16G16B16A16_UINT:
+		case VK_FORMAT_R32G32_UINT:
+		case VK_FORMAT_R32G32B32_UINT:
+		case VK_FORMAT_R32G32B32A32_UINT:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatSfloat (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R16_SFLOAT:
+		case VK_FORMAT_R16G16_SFLOAT:
+		case VK_FORMAT_R32_SFLOAT:
+		case VK_FORMAT_R16G16B16_SFLOAT:
+		case VK_FORMAT_R16G16B16A16_SFLOAT:
+		case VK_FORMAT_R32G32_SFLOAT:
+		case VK_FORMAT_R64_SFLOAT:
+		case VK_FORMAT_R32G32B32_SFLOAT:
+		case VK_FORMAT_R32G32B32A32_SFLOAT:
+		case VK_FORMAT_R64G64_SFLOAT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatUfloat (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+		case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatUnorm (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_UNORM:
+		case VK_FORMAT_R4G4_UNORM_PACK8:
+		case VK_FORMAT_R8G8_UNORM:
+		case VK_FORMAT_R16_UNORM:
+		case VK_FORMAT_R5G6B5_UNORM_PACK16:
+		case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+		case VK_FORMAT_R8G8B8_UNORM:
+		case VK_FORMAT_B8G8R8_UNORM:
+		case VK_FORMAT_R8G8B8A8_UNORM:
+		case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+		case VK_FORMAT_R16G16_UNORM:
+		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+		case VK_FORMAT_R16G16B16_UNORM:
+		case VK_FORMAT_R16G16B16A16_UNORM:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatSnorm (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_SNORM:
+		case VK_FORMAT_R8G8_SNORM:
+		case VK_FORMAT_R16_SNORM:
+		case VK_FORMAT_R8G8B8_SNORM:
+		case VK_FORMAT_B8G8R8_SNORM:
+		case VK_FORMAT_R8G8B8A8_SNORM:
+		case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+		case VK_FORMAT_R16G16_SNORM:
+		case VK_FORMAT_B8G8R8A8_SNORM:
+		case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+		case VK_FORMAT_R16G16B16_SNORM:
+		case VK_FORMAT_R16G16B16A16_SNORM:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatSRGB (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_SRGB:
+		case VK_FORMAT_R8G8_SRGB:
+		case VK_FORMAT_R8G8B8_SRGB:
+		case VK_FORMAT_B8G8R8_SRGB:
+		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_B8G8R8A8_SRGB:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatSscaled (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_SSCALED:
+		case VK_FORMAT_R8G8_SSCALED:
+		case VK_FORMAT_R16_SSCALED:
+		case VK_FORMAT_R8G8B8_SSCALED:
+		case VK_FORMAT_B8G8R8_SSCALED:
+		case VK_FORMAT_R8G8B8A8_SSCALED:
+		case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+		case VK_FORMAT_R16G16_SSCALED:
+		case VK_FORMAT_B8G8R8A8_SSCALED:
+		case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+		case VK_FORMAT_R16G16B16_SSCALED:
+		case VK_FORMAT_R16G16B16A16_SSCALED:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatUscaled (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R8_USCALED:
+		case VK_FORMAT_R8G8_USCALED:
+		case VK_FORMAT_R16_USCALED:
+		case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+		case VK_FORMAT_R8G8B8_USCALED:
+		case VK_FORMAT_B8G8R8_USCALED:
+		case VK_FORMAT_R8G8B8A8_USCALED:
+		case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+		case VK_FORMAT_R16G16_USCALED:
+		case VK_FORMAT_B8G8R8A8_USCALED:
+		case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+		case VK_FORMAT_R16G16B16_USCALED:
+		case VK_FORMAT_R16G16B16A16_USCALED:
+			return true;
+
+		default:
+			break;
+	}
+
+	return false;
+
+}
+
+bool isVertexFormatDouble (VkFormat format)
+{
+	switch (format)
+	{
+		case VK_FORMAT_R64_UINT:
+		case VK_FORMAT_R64_SINT:
+		case VK_FORMAT_R64_SFLOAT:
+		case VK_FORMAT_R64G64_UINT:
+		case VK_FORMAT_R64G64_SINT:
+		case VK_FORMAT_R64G64_SFLOAT:
+		case VK_FORMAT_R64G64B64_UINT:
+		case VK_FORMAT_R64G64B64_SINT:
+		case VK_FORMAT_R64G64B64_SFLOAT:
+		case VK_FORMAT_R64G64B64A64_UINT:
+		case VK_FORMAT_R64G64B64A64_SINT:
+		case VK_FORMAT_R64G64B64A64_SFLOAT:
+			return true;
+
+		default:
+			break;
+	}
+	return false;
+}
+
+std::vector<Vertex4RGBA> createOverlappingQuads (void)
+{
+	using tcu::Vec2;
+	using tcu::Vec4;
+
+	std::vector<Vertex4RGBA> vertices;
+
+	const Vec2 translations[4] =
+	{
+		Vec2(-0.25f, -0.25f),
+		Vec2(-1.0f, -0.25f),
+		Vec2(-1.0f, -1.0f),
+		Vec2(-0.25f, -1.0f)
+	};
+
+	const Vec4 quadColors[4] =
+	{
+		Vec4(1.0f, 0.0f, 0.0f, 1.0),
+		Vec4(0.0f, 1.0f, 0.0f, 1.0),
+		Vec4(0.0f, 0.0f, 1.0f, 1.0),
+		Vec4(1.0f, 0.0f, 1.0f, 1.0)
+	};
+
+	const float quadSize = 1.25f;
+
+	for (int quadNdx = 0; quadNdx < 4; quadNdx++)
+	{
+		const Vec2&	translation	= translations[quadNdx];
+		const Vec4&	color		= quadColors[quadNdx];
+
+		const Vertex4RGBA lowerLeftVertex =
+		{
+			Vec4(translation.x(), translation.y(), 0.0f, 1.0f),
+			color
+		};
+		const Vertex4RGBA upperLeftVertex =
+		{
+			Vec4(translation.x(), translation.y() + quadSize, 0.0f, 1.0f),
+			color
+		};
+		const Vertex4RGBA lowerRightVertex =
+		{
+			Vec4(translation.x() + quadSize, translation.y(), 0.0f, 1.0f),
+			color
+		};
+		const Vertex4RGBA upperRightVertex =
+		{
+			Vec4(translation.x() + quadSize, translation.y() + quadSize, 0.0f, 1.0f),
+			color
+		};
+
+		// Triangle 1, CCW
+		vertices.push_back(lowerLeftVertex);
+		vertices.push_back(lowerRightVertex);
+		vertices.push_back(upperLeftVertex);
+
+		// Triangle 2, CW
+		vertices.push_back(lowerRightVertex);
+		vertices.push_back(upperLeftVertex);
+		vertices.push_back(upperRightVertex);
+	}
+
+	return vertices;
+}
+
+std::vector<Vertex4Tex4> createFullscreenQuad (void)
+{
+	using tcu::Vec4;
+
+	const Vertex4Tex4 lowerLeftVertex =
+	{
+		Vec4(-1.0f, -1.0f, 0.0f, 1.0f),
+		Vec4(0.0f, 0.0f, 0.0f, 0.0f)
+	};
+	const Vertex4Tex4 upperLeftVertex =
+	{
+		Vec4(-1.0f, 1.0f, 0.0f, 1.0f),
+		Vec4(0.0f, 1.0f, 0.0f, 0.0f)
+	};
+	const Vertex4Tex4 lowerRightVertex =
+	{
+		Vec4(1.0f, -1.0f, 0.0f, 1.0f),
+		Vec4(1.0f, 0.0f, 0.0f, 0.0f)
+	};
+	const Vertex4Tex4 upperRightVertex =
+	{
+		Vec4(1.0f, 1.0f, 0.0f, 1.0f),
+		Vec4(1.0f, 1.0f, 0.0f, 0.0f)
+	};
+
+	const Vertex4Tex4 vertices[6] =
+	{
+		lowerLeftVertex,
+		lowerRightVertex,
+		upperLeftVertex,
+
+		upperLeftVertex,
+		lowerRightVertex,
+		upperRightVertex
+	};
+
+	return std::vector<Vertex4Tex4>(vertices, vertices + DE_LENGTH_OF_ARRAY(vertices));
+}
+
+std::vector<Vertex4Tex4> createQuadMosaic (int rows, int columns)
+{
+	using tcu::Vec4;
+
+	DE_ASSERT(rows >= 1);
+	DE_ASSERT(columns >= 1);
+
+	std::vector<Vertex4Tex4>	vertices;
+	const float					rowSize		= 2.0f / (float)rows;
+	const float					columnSize	= 2.0f / (float)columns;
+	int							arrayIndex	= 0;
+
+	for (int rowNdx = 0; rowNdx < rows; rowNdx++)
+	{
+		for (int columnNdx = 0; columnNdx < columns; columnNdx++)
+		{
+			const Vertex4Tex4 lowerLeftVertex =
+			{
+				Vec4(-1.0f + (float)columnNdx * columnSize, -1.0f + (float)rowNdx * rowSize, 0.0f, 1.0f),
+				Vec4(0.0f, 0.0f, (float)arrayIndex, 0.0f)
+			};
+			const Vertex4Tex4 upperLeftVertex =
+			{
+				Vec4(lowerLeftVertex.position.x(), lowerLeftVertex.position.y() + rowSize, 0.0f, 1.0f),
+				Vec4(0.0f, 1.0f, (float)arrayIndex, 0.0f)
+			};
+			const Vertex4Tex4 lowerRightVertex =
+			{
+				Vec4(lowerLeftVertex.position.x() + columnSize, lowerLeftVertex.position.y(), 0.0f, 1.0f),
+				Vec4(1.0f, 0.0f, (float)arrayIndex, 0.0f)
+			};
+			const Vertex4Tex4 upperRightVertex =
+			{
+				Vec4(lowerLeftVertex.position.x() + columnSize, lowerLeftVertex.position.y() + rowSize, 0.0f, 1.0f),
+				Vec4(1.0f, 1.0f, (float)arrayIndex, 0.0f)
+			};
+
+			vertices.push_back(lowerLeftVertex);
+			vertices.push_back(lowerRightVertex);
+			vertices.push_back(upperLeftVertex);
+			vertices.push_back(upperLeftVertex);
+			vertices.push_back(lowerRightVertex);
+			vertices.push_back(upperRightVertex);
+
+			arrayIndex++;
+		}
+	}
+
+	return vertices;
+}
+
+std::vector<Vertex4Tex4> createQuadMosaicCube (void)
+{
+	using tcu::Vec3;
+
+	static const Vec3 texCoordsCube[8] =
+	{
+		Vec3(-1.0f, -1.0f, -1.0f),	// 0: -X, -Y, -Z
+		Vec3(1.0f, -1.0f, -1.0f),	// 1:  X, -Y, -Z
+		Vec3(1.0f, -1.0f, 1.0f),	// 2:  X, -Y,  Z
+		Vec3(-1.0f, -1.0f, 1.0f),	// 3: -X, -Y,  Z
+
+		Vec3(-1.0f, 1.0f, -1.0f),	// 4: -X,  Y, -Z
+		Vec3(1.0f, 1.0f, -1.0f),	// 5:  X,  Y, -Z
+		Vec3(1.0f, 1.0f, 1.0f),		// 6:  X,  Y,  Z
+		Vec3(-1.0f, 1.0f, 1.0f),	// 7: -X,  Y,  Z
+	};
+
+	static const int texCoordCubeIndices[6][6] =
+	{
+		{ 6, 5, 2, 2, 5, 1 },		// +X face
+		{ 3, 0, 7, 7, 0, 4 },		// -X face
+		{ 4, 5, 7, 7, 5, 6 },		// +Y face
+		{ 3, 2, 0, 0, 2, 1 },		// -Y face
+		{ 2, 3, 6, 6, 3, 7 },		// +Z face
+		{ 0, 1, 4, 4, 1, 5 }		// -Z face
+	};
+
+	// Create 6 quads and set appropriate texture coordinates for cube mapping
+
+	std::vector<Vertex4Tex4>			vertices	= createQuadMosaic(2, 3);
+	std::vector<Vertex4Tex4>::iterator	vertexItr	= vertices.begin();
+
+	for (int quadNdx = 0; quadNdx < 6; quadNdx++)
+	{
+		for (int vertexNdx = 0; vertexNdx < 6; vertexNdx++)
+		{
+			vertexItr->texCoord.xyz() = texCoordsCube[texCoordCubeIndices[quadNdx][vertexNdx]];
+			vertexItr++;
+		}
+	}
+
+	return vertices;
+}
+
+std::vector<Vertex4Tex4> createQuadMosaicCubeArray (int faceArrayIndices[6])
+{
+	std::vector<Vertex4Tex4>			vertices	= createQuadMosaicCube();
+	std::vector<Vertex4Tex4>::iterator	vertexItr	= vertices.begin();
+
+	for (int quadNdx = 0; quadNdx < 6; quadNdx++)
+	{
+		for (int vertexNdx = 0; vertexNdx < 6; vertexNdx++)
+		{
+			vertexItr->texCoord.w() = (float)faceArrayIndices[quadNdx];
+			vertexItr++;
+		}
+	}
+
+	return vertices;
+}
+
+std::vector<Vertex4Tex4> createTestQuadMosaic (vk::VkImageViewType viewType)
+{
+	std::vector<Vertex4Tex4> vertices;
+
+	switch (viewType)
+	{
+		case vk::VK_IMAGE_VIEW_TYPE_1D:
+		case vk::VK_IMAGE_VIEW_TYPE_2D:
+			vertices = createFullscreenQuad();
+			break;
+
+		case vk::VK_IMAGE_VIEW_TYPE_1D_ARRAY:
+			vertices = createQuadMosaic(2, 3);
+
+			// Set up array indices
+			for (size_t quadNdx = 0; quadNdx < 6; quadNdx++)
+				for (size_t vertexNdx = 0; vertexNdx < 6; vertexNdx++)
+					vertices[quadNdx * 6 + vertexNdx].texCoord.y() = (float)quadNdx;
+
+			break;
+
+		case vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY:
+			vertices = createQuadMosaic(2, 3);
+			break;
+
+		case vk::VK_IMAGE_VIEW_TYPE_3D:
+			vertices = createQuadMosaic(2, 3);
+
+			// Use z between 0.0 and 1.0.
+			for (size_t vertexNdx = 0; vertexNdx < vertices.size(); vertexNdx++)
+				vertices[vertexNdx].texCoord.z() /= 5.0f;
+
+			break;
+
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE:
+			vertices = createQuadMosaicCube();
+			break;
+
+		case vk::VK_IMAGE_VIEW_TYPE_CUBE_ARRAY:
+			{
+				int faceArrayIndices[6] = { 0, 1, 2, 3, 4, 5 };
+				vertices = createQuadMosaicCubeArray(faceArrayIndices);
+			}
+			break;
+
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	return vertices;
+}
+
+} // pipeline
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.hpp b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.hpp
new file mode 100644
index 0000000..c00e9fe
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/pipeline/vktPipelineVertexUtil.hpp
@@ -0,0 +1,94 @@
+#ifndef _VKTPIPELINEVERTEXUTIL_HPP
+#define _VKTPIPELINEVERTEXUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Imagination Technologies Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utilities for vertex buffers.
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuDefs.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include <vector>
+
+namespace vkt
+{
+namespace pipeline
+{
+
+struct Vertex4RGBA
+{
+	tcu::Vec4 position;
+	tcu::Vec4 color;
+};
+
+struct Vertex4Tex4
+{
+	tcu::Vec4 position;
+	tcu::Vec4 texCoord;
+};
+
+deUint32					getVertexFormatSize				(vk::VkFormat format);
+deUint32					getVertexFormatComponentCount	(vk::VkFormat format);
+deUint32					getVertexFormatComponentSize	(vk::VkFormat format);
+bool						isVertexFormatComponentOrderBGR	(vk::VkFormat format);
+bool						isVertexFormatSint				(vk::VkFormat format);
+bool						isVertexFormatUint				(vk::VkFormat format);
+bool						isVertexFormatSfloat			(vk::VkFormat format);
+bool						isVertexFormatUfloat			(vk::VkFormat format);
+bool						isVertexFormatUnorm				(vk::VkFormat format);
+bool						isVertexFormatSnorm				(vk::VkFormat format);
+bool						isVertexFormatSRGB				(vk::VkFormat format);
+bool						isVertexFormatSscaled			(vk::VkFormat format);
+bool						isVertexFormatUscaled			(vk::VkFormat format);
+bool						isVertexFormatDouble			(vk::VkFormat format);
+
+/*! \brief Creates a pattern of 4 overlapping quads.
+ *
+ *  The quads are alined along the plane Z = 0, with X,Y taking values between -1 and 1.
+ *  Each quad covers one of the quadrants of the scene and partially extends to the other 3 quadrants.
+ *  The triangles of each quad have different winding orders (CW/CCW).
+ */
+std::vector<Vertex4RGBA>	createOverlappingQuads		(void);
+
+std::vector<Vertex4Tex4>	createFullscreenQuad		(void);
+std::vector<Vertex4Tex4>	createQuadMosaic			(int rows, int columns);
+std::vector<Vertex4Tex4>	createQuadMosaicCube		(void);
+std::vector<Vertex4Tex4>	createQuadMosaicCubeArray	(int faceArrayIndices[6]);
+
+std::vector<Vertex4Tex4>	createTestQuadMosaic		(vk::VkImageViewType viewType);
+
+} // pipeline
+} // vkt
+
+#endif // _VKTPIPELINEVERTEXUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/query_pool/CMakeLists.txt b/external/vulkancts/modules/vulkan/query_pool/CMakeLists.txt
new file mode 100644
index 0000000..e9a68a1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/CMakeLists.txt
@@ -0,0 +1,24 @@
+include_directories(..)
+
+set(DEQP_VK_QUERY_POOL_SRCS
+	vktQueryPoolTests.hpp
+	vktQueryPoolTests.cpp
+	vktQueryPoolOcclusionTests.hpp
+	vktQueryPoolOcclusionTests.cpp
+	vktQueryPoolCreateInfoUtil.hpp
+	vktQueryPoolCreateInfoUtil.cpp
+	vktQueryPoolBufferObjectUtil.hpp
+	vktQueryPoolBufferObjectUtil.cpp
+	vktQueryPoolImageObjectUtil.hpp
+	vktQueryPoolImageObjectUtil.cpp
+)
+
+set(DEQP_VK_QUERY_POOL_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+)
+
+add_library(deqp-vk-query-pool STATIC ${DEQP_VK_QUERY_POOL_SRCS})
+target_link_libraries(deqp-vk-query-pool ${DEQP_VK_QUERY_POOL_LIBS})
+
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.cpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.cpp
new file mode 100644
index 0000000..27d2d4a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.cpp
@@ -0,0 +1,83 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktQueryPoolBufferObjectUtil.hpp"
+
+#include "vkQueryUtil.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+Buffer::Buffer (const vk::DeviceInterface& vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object_)
+	: m_allocation  (DE_NULL)
+	, m_object		(object_)
+	, m_vk			(vk)
+	, m_device		(device)
+{
+}
+
+void Buffer::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindBufferMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Buffer> Buffer::createAndAlloc (const vk::DeviceInterface& vk,
+											  vk::VkDevice device,
+											  const vk::VkBufferCreateInfo &createInfo,
+											  vk::Allocator &allocator,
+											  vk::MemoryRequirement memoryRequirement)
+{
+	de::SharedPtr<Buffer> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements bufferRequirements = vk::getBufferMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(bufferRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Buffer> Buffer::create (const vk::DeviceInterface& vk,
+									  vk::VkDevice device,
+									  const vk::VkBufferCreateInfo& createInfo)
+{
+	return de::SharedPtr<Buffer>(new Buffer(vk, device, vk::createBuffer(vk, device, &createInfo)));
+}
+
+} // QueryPool
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.hpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.hpp
new file mode 100644
index 0000000..f008d1d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolBufferObjectUtil.hpp
@@ -0,0 +1,82 @@
+#ifndef _VKTQUERYPOOLBUFFEROBJECTUTIL_HPP
+#define _VKTQUERYPOOLBUFFEROBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Buffer Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+class Buffer
+{
+public:
+
+	static de::SharedPtr<Buffer> create			(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkBufferCreateInfo &createInfo);
+
+	static de::SharedPtr<Buffer> createAndAlloc (const vk::DeviceInterface&		vk,
+												 vk::VkDevice					device,
+												 const vk::VkBufferCreateInfo&	createInfo,
+												 vk::Allocator&					allocator,
+												 vk::MemoryRequirement			allocationMemoryProperties = vk::MemoryRequirement::Any);
+
+								Buffer			(const vk::DeviceInterface &vk, vk::VkDevice device, vk::Move<vk::VkBuffer> object);
+
+	void						bindMemory		(de::MovePtr<vk::Allocation> allocation);
+
+	vk::VkBuffer				object			(void) const								{ return *m_object;		}
+	vk::Allocation				getBoundMemory	(void) const								{ return *m_allocation;	}
+
+private:
+
+	Buffer										(const Buffer& other);	// Not allowed!
+	Buffer&						operator=		(const Buffer& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>		m_allocation;
+	vk::Unique<vk::VkBuffer>		m_object;
+
+	const vk::DeviceInterface&		m_vk;
+	vk::VkDevice					m_device;
+};
+
+} //DynamicState
+} //vkt
+
+#endif // _VKTQUERYPOOLBUFFEROBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.cpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.cpp
new file mode 100644
index 0000000..93f840e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.cpp
@@ -0,0 +1,1204 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vktQueryPoolCreateInfoUtil.hpp"
+
+#include "vkImageUtil.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+ImageSubresourceRange::ImageSubresourceRange (vk::VkImageAspectFlags	_aspectMask,
+											  deUint32					_baseMipLevel,
+											  deUint32					_levelCount,
+											  deUint32					_baseArrayLayer,
+											  deUint32					_layerCount)
+{
+	aspectMask		= _aspectMask;
+	baseMipLevel	= _baseMipLevel;
+	levelCount		= _levelCount;
+	baseArrayLayer	= _baseArrayLayer;
+	layerCount		= _layerCount;
+}
+
+ComponentMapping::ComponentMapping (vk::VkComponentSwizzle _r,
+								    vk::VkComponentSwizzle _g,
+								    vk::VkComponentSwizzle _b,
+								    vk::VkComponentSwizzle _a)
+{
+	r = _r;
+	g = _g;
+	b = _b;
+	a = _a;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage							_image,
+										  vk::VkImageViewType					_viewType,
+										  vk::VkFormat							_format,
+										  const vk::VkImageSubresourceRange&	_subresourceRange,
+										  const vk::VkComponentMapping&			_components,
+										  vk::VkImageViewCreateFlags			_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0u;
+	image				= _image;
+	viewType			= _viewType;
+	format				= _format;
+	components.r		= _components.r;
+	components.g		= _components.g;
+	components.b		= _components.b;
+	components.a		= _components.a;
+	subresourceRange	= _subresourceRange;
+	flags				= _flags;
+}
+
+ImageViewCreateInfo::ImageViewCreateInfo (vk::VkImage					_image,
+										  vk::VkImageViewType			_viewType,
+										  vk::VkFormat					_format,
+										  const vk::VkComponentMapping&	_components,
+										  vk::VkImageViewCreateFlags	_flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	image			= _image;
+	viewType		= _viewType;
+	format			= _format;
+	components.r	= _components.r;
+	components.g	= _components.g;
+	components.b	= _components.b;
+	components.a	= _components.a;
+
+	vk::VkImageAspectFlags aspectFlags;
+	const tcu::TextureFormat tcuFormat = vk::mapVkFormat(_format);
+
+	switch (tcuFormat.order)
+	{
+		case tcu::TextureFormat::D:
+			aspectFlags = vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		case tcu::TextureFormat::S:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT;
+			break;
+		case tcu::TextureFormat::DS:
+			aspectFlags = vk::VK_IMAGE_ASPECT_STENCIL_BIT | vk::VK_IMAGE_ASPECT_DEPTH_BIT;
+			break;
+		default:
+			aspectFlags = vk::VK_IMAGE_ASPECT_COLOR_BIT;
+			break;
+	}
+
+	subresourceRange = ImageSubresourceRange(aspectFlags);;
+	flags = _flags;
+}
+
+BufferViewCreateInfo::BufferViewCreateInfo (vk::VkBuffer	_buffer,
+											vk::VkFormat		_format,
+											vk::VkDeviceSize _offset,
+											vk::VkDeviceSize _range)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags	= 0u;
+	buffer	= _buffer;
+	format	= _format;
+	offset	= _offset;
+	range	= _range;
+}
+
+BufferCreateInfo::BufferCreateInfo (vk::VkDeviceSize		_size,
+									vk::VkBufferUsageFlags	_usage,
+									vk::VkSharingMode		_sharingMode,
+									deUint32				_queueFamilyIndexCount,
+									const deUint32*			_pQueueFamilyIndices,
+									vk::VkBufferCreateFlags _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	size					= _size;
+	usage					= _usage;
+	flags					= _flags;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(
+			_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = _pQueueFamilyIndices;
+	}
+}
+
+BufferCreateInfo::BufferCreateInfo (const BufferCreateInfo &other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	size					= other.size;
+	usage					= other.usage;
+	flags					= other.flags;
+	sharingMode				= other.sharingMode;
+	queueFamilyIndexCount	= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices	= other.m_queueFamilyIndices;
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+}
+
+BufferCreateInfo & BufferCreateInfo::operator= (const BufferCreateInfo &other)
+{
+	sType						= other.sType;
+	pNext						= other.pNext;
+	size						= other.size;
+	usage						= other.usage;
+	flags						= other.flags;
+	sharingMode					= other.sharingMode;
+	queueFamilyIndexCount		= other.queueFamilyIndexCount;
+
+	m_queueFamilyIndices		= other.m_queueFamilyIndices;
+
+	DE_ASSERT(m_queueFamilyIndices.size() == queueFamilyIndexCount);
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+
+	return *this;
+}
+
+ImageCreateInfo::ImageCreateInfo (vk::VkImageType			_imageType,
+								  vk::VkFormat				_format,
+								  vk::VkExtent3D			_extent,
+								  deUint32					_mipLevels,
+								  deUint32					_arrayLayers,
+								  vk::VkSampleCountFlagBits	_samples,
+								  vk::VkImageTiling			_tiling,
+								  vk::VkImageUsageFlags		_usage,
+								  vk::VkSharingMode			_sharingMode,
+								  deUint32					_queueFamilyIndexCount,
+								  const deUint32*			_pQueueFamilyIndices,
+								  vk::VkImageCreateFlags	_flags,
+								  vk::VkImageLayout			_initialLayout)
+{
+	if (_queueFamilyIndexCount)
+	{
+		m_queueFamilyIndices = std::vector<deUint32>(_pQueueFamilyIndices, _pQueueFamilyIndices + _queueFamilyIndexCount);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= _flags;
+	imageType				= _imageType;
+	format					= _format;
+	extent					= _extent;
+	mipLevels				= _mipLevels;
+	arrayLayers				= _arrayLayers;
+	samples					= _samples;
+	tiling					= _tiling;
+	usage					= _usage;
+	sharingMode				= _sharingMode;
+	queueFamilyIndexCount	= _queueFamilyIndexCount;
+
+	if (m_queueFamilyIndices.size())
+	{
+		pQueueFamilyIndices = &m_queueFamilyIndices[0];
+	}
+	else
+	{
+		pQueueFamilyIndices = DE_NULL;
+	}
+	initialLayout	= _initialLayout;
+}
+
+FramebufferCreateInfo::FramebufferCreateInfo (vk::VkRenderPass						_renderPass,
+											  const std::vector<vk::VkImageView>&	atachments,
+											  deUint32								_width,
+											  deUint32								_height,
+											  deUint32								_layers)
+{
+	sType = vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+
+	renderPass		= _renderPass;
+	attachmentCount	= static_cast<deUint32>(atachments.size());
+
+	if (attachmentCount)
+	{
+		pAttachments = const_cast<vk::VkImageView *>(&atachments[0]);
+	}
+
+	width	= _width;
+	height	= _height;
+	layers	= _layers;
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+											const std::vector<vk::VkSubpassDescription>&	subpasses,
+											const std::vector<vk::VkSubpassDependency>&		dependiences)
+
+	: m_attachments			(attachments.begin(), attachments.end())
+	, m_subpasses			(subpasses.begin(), subpasses.end())
+	, m_dependiences		(dependiences.begin(), dependiences.end())
+	, m_attachmentsStructs	(m_attachments.begin(), m_attachments.end())
+	, m_subpassesStructs	(m_subpasses.begin(), m_subpasses.end())
+	, m_dependiencesStructs	(m_dependiences.begin(), m_dependiences.end())
+{
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+	pAttachments	= &m_attachmentsStructs[0];
+	subpassCount	= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses		= &m_subpassesStructs[0];
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+	pDependencies	= &m_dependiencesStructs[0];
+}
+
+RenderPassCreateInfo::RenderPassCreateInfo (deUint32							_attachmentCount,
+											const vk::VkAttachmentDescription*	_pAttachments,
+											deUint32							_subpassCount,
+											const vk::VkSubpassDescription*		_pSubpasses,
+											deUint32							_dependencyCount,
+											const vk::VkSubpassDependency*		_pDependiences)
+{
+
+	m_attachments	= std::vector<AttachmentDescription>(_pAttachments, _pAttachments + _attachmentCount);
+	m_subpasses		= std::vector<SubpassDescription>(_pSubpasses, _pSubpasses + _subpassCount);
+	m_dependiences	= std::vector<SubpassDependency>(_pDependiences, _pDependiences + _dependencyCount);
+
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>	(m_attachments.begin(),		m_attachments.end());
+	m_subpassesStructs		= std::vector<vk::VkSubpassDescription>		(m_subpasses.begin(),		m_subpasses.end());
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>		(m_dependiences.begin(),	m_dependiences.end());
+
+	sType = vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+
+	attachmentCount = static_cast<deUint32>(m_attachments.size());
+
+	if (attachmentCount)
+	{
+		pAttachments = &m_attachmentsStructs[0];
+	}
+	else
+	{
+		pAttachments = DE_NULL;
+	}
+
+	subpassCount = static_cast<deUint32>(m_subpasses.size());
+
+	if (subpassCount)
+	{
+		pSubpasses = &m_subpassesStructs[0];
+	}
+	else
+	{
+		pSubpasses = DE_NULL;
+	}
+
+	dependencyCount = static_cast<deUint32>(m_dependiences.size());
+
+	if (dependencyCount)
+	{
+		pDependencies = &m_dependiencesStructs[0];
+	}
+	else
+	{
+		pDependencies = DE_NULL;
+	}
+}
+
+void
+RenderPassCreateInfo::addAttachment (vk::VkAttachmentDescription attachment)
+{
+
+	m_attachments.push_back(attachment);
+	m_attachmentsStructs	= std::vector<vk::VkAttachmentDescription>(m_attachments.begin(), m_attachments.end());
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachmentsStructs[0];
+}
+
+void
+RenderPassCreateInfo::addSubpass (vk::VkSubpassDescription subpass)
+{
+
+	m_subpasses.push_back(subpass);
+	m_subpassesStructs	= std::vector<vk::VkSubpassDescription>(m_subpasses.begin(), m_subpasses.end());
+	subpassCount		= static_cast<deUint32>(m_subpasses.size());
+	pSubpasses			= &m_subpassesStructs[0];
+}
+
+void
+RenderPassCreateInfo::addDependency (vk::VkSubpassDependency dependency)
+{
+
+	m_dependiences.push_back(dependency);
+	m_dependiencesStructs	= std::vector<vk::VkSubpassDependency>(m_dependiences.begin(), m_dependiences.end());
+
+	dependencyCount			= static_cast<deUint32>(m_dependiences.size());
+	pDependencies			= &m_dependiencesStructs[0];
+}
+
+RenderPassBeginInfo::RenderPassBeginInfo (vk::VkRenderPass						_renderPass,
+										  vk::VkFramebuffer						_framebuffer,
+										  vk::VkRect2D							_renderArea,
+										  const std::vector<vk::VkClearValue>&	_clearValues)
+{
+
+	m_clearValues	= _clearValues;
+
+	sType			= vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
+	pNext			= DE_NULL;
+	renderPass		= _renderPass;
+	framebuffer		= _framebuffer;
+	renderArea		= _renderArea;
+	clearValueCount = static_cast<deUint32>(m_clearValues.size());
+	pClearValues	= m_clearValues.size() ? &m_clearValues[0] : DE_NULL;
+}
+
+CmdPoolCreateInfo::CmdPoolCreateInfo (deUint32 _queueFamilyIndex, unsigned int _flags)
+{
+	sType = vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+
+	queueFamilyIndex = _queueFamilyIndex;
+	flags				= _flags;
+}
+
+AttachmentDescription::AttachmentDescription (vk::VkFormat				_format,
+											  vk::VkSampleCountFlagBits	_samples,
+											  vk::VkAttachmentLoadOp	_loadOp,
+											  vk::VkAttachmentStoreOp	_storeOp,
+											  vk::VkAttachmentLoadOp	_stencilLoadOp,
+											  vk::VkAttachmentStoreOp	_stencilStoreOp,
+											  vk::VkImageLayout			_initialLayout,
+											  vk::VkImageLayout			_finalLayout)
+{
+	flags = 0;
+	format			= _format;
+	samples			= _samples;
+	loadOp			= _loadOp;
+	storeOp			= _storeOp;
+	stencilLoadOp	= _stencilLoadOp;
+	stencilStoreOp	= _stencilStoreOp;
+	initialLayout	= _initialLayout;
+	finalLayout		= _finalLayout;
+}
+
+AttachmentDescription::AttachmentDescription (const vk::VkAttachmentDescription& rhs)
+{
+	flags			= rhs.flags;
+	format			= rhs.format;
+	samples			= rhs.samples;
+	loadOp			= rhs.loadOp;
+	storeOp			= rhs.storeOp;
+	stencilLoadOp	= rhs.stencilLoadOp;
+	stencilStoreOp	= rhs.stencilStoreOp;
+	initialLayout	= rhs.initialLayout;
+	finalLayout		= rhs.finalLayout;
+}
+
+AttachmentReference::AttachmentReference (deUint32 _attachment, vk::VkImageLayout _layout)
+{
+	attachment	= _attachment;
+	layout		= _layout;
+}
+
+AttachmentReference::AttachmentReference (void)
+{
+	attachment = vk::VK_ATTACHMENT_UNUSED;
+	layout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+}
+
+SubpassDescription::SubpassDescription (vk::VkPipelineBindPoint				_pipelineBindPoint,
+										vk::VkSubpassDescriptionFlags		_flags,
+										deUint32							_inputAttachmentCount,
+										const vk::VkAttachmentReference*	_inputAttachments,
+										deUint32							_colorAttachmentCount,
+										const vk::VkAttachmentReference*	_colorAttachments,
+										const vk::VkAttachmentReference*	_resolveAttachments,
+										vk::VkAttachmentReference			depthStencilAttachment,
+										deUint32							_preserveAttachmentCount,
+										const deUint32*						_preserveAttachments)
+{
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(_inputAttachments, _inputAttachments + _inputAttachmentCount);
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(_colorAttachments, _colorAttachments + _colorAttachmentCount);
+
+	if (_resolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(_resolveAttachments, _resolveAttachments + _colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(_preserveAttachments, _preserveAttachments + _preserveAttachmentCount);
+
+	m_depthStencilAttachment = depthStencilAttachment;
+
+	flags					= _flags;
+	pipelineBindPoint		= _pipelineBindPoint;
+	inputAttachmentCount	= _inputAttachmentCount;
+	pInputAttachments		= DE_NULL;
+	colorAttachmentCount	= _colorAttachmentCount;
+	pColorAttachments		= DE_NULL;
+	pResolveAttachments		= DE_NULL;
+	pDepthStencilAttachment	= &m_depthStencilAttachment;
+	pPreserveAttachments	= DE_NULL;
+	preserveAttachmentCount	= _preserveAttachmentCount;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const vk::VkSubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pInputAttachments, rhs.pInputAttachments + rhs.inputAttachmentCount);
+
+	m_colorAttachments = std::vector<vk::VkAttachmentReference>(
+		rhs.pColorAttachments, rhs.pColorAttachments + rhs.colorAttachmentCount);
+
+	if (rhs.pResolveAttachments)
+		m_resolveAttachments = std::vector<vk::VkAttachmentReference>(
+			rhs.pResolveAttachments, rhs.pResolveAttachments + rhs.colorAttachmentCount);
+
+	m_preserveAttachments = std::vector<deUint32>(
+		rhs.pPreserveAttachments, rhs.pPreserveAttachments + rhs.preserveAttachmentCount);
+
+	if (rhs.pDepthStencilAttachment)
+		m_depthStencilAttachment = *rhs.pDepthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+}
+
+SubpassDescription::SubpassDescription (const SubpassDescription& rhs) {
+	*this = rhs;
+}
+
+SubpassDescription& SubpassDescription::operator= (const SubpassDescription& rhs)
+{
+	*static_cast<vk::VkSubpassDescription*>(this) = rhs;
+
+	m_inputAttachments		= rhs.m_inputAttachments;
+	m_colorAttachments		= rhs.m_colorAttachments;
+	m_resolveAttachments	= rhs.m_resolveAttachments;
+	m_preserveAttachments	= rhs.m_preserveAttachments;
+	m_depthStencilAttachment = rhs.m_depthStencilAttachment;
+
+	if (!m_inputAttachments.empty())
+		pInputAttachments = &m_inputAttachments[0];
+
+	if (!m_colorAttachments.empty())
+		pColorAttachments = &m_colorAttachments[0];
+
+	if (!m_resolveAttachments.empty())
+		pResolveAttachments = &m_resolveAttachments[0];
+
+	pDepthStencilAttachment = &m_depthStencilAttachment;
+
+	if (!m_preserveAttachments.empty())
+		pPreserveAttachments = &m_preserveAttachments[0];
+
+	return *this;
+}
+
+SubpassDependency::SubpassDependency (deUint32					_srcSubpass,
+									  deUint32					_dstSubpass,
+									  vk::VkPipelineStageFlags	_srcStageMask,
+									  vk::VkPipelineStageFlags	_dstStageMask,
+									  vk::VkAccessFlags			_srcAccessMask,
+									  vk::VkAccessFlags			_dstAccessMask,
+									  vk::VkDependencyFlags		_dependencyFlags)
+{
+	srcSubpass		= _srcSubpass;
+	dstSubpass		= _dstSubpass;
+	srcStageMask	= _srcStageMask;
+	dstStageMask	= _dstStageMask;
+	srcAccessMask	= _srcAccessMask;
+	dstAccessMask	= _dstAccessMask;
+	dependencyFlags	= _dependencyFlags;
+}
+
+SubpassDependency::SubpassDependency (const vk::VkSubpassDependency& rhs)
+{
+	srcSubpass		= rhs.srcSubpass;
+	dstSubpass		= rhs.dstSubpass;
+	srcStageMask	= rhs.srcStageMask;
+	dstStageMask	= rhs.dstStageMask;
+	srcAccessMask	= rhs.srcAccessMask;
+	dstAccessMask	= rhs.dstAccessMask;
+	dependencyFlags	= rhs.dependencyFlags;
+}
+
+CmdBufferBeginInfo::CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags _flags)
+{
+	sType				= vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
+	pNext				= DE_NULL;
+	flags				= _flags;
+	pInheritanceInfo	= DE_NULL;
+}
+
+DescriptorPoolCreateInfo::DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+													vk::VkDescriptorPoolCreateFlags					_flags,
+													deUint32										_maxSets)
+	: m_poolSizeCounts(poolSizeCounts)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= _flags;
+	maxSets			= _maxSets;
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+}
+
+DescriptorPoolCreateInfo& DescriptorPoolCreateInfo::addDescriptors (vk::VkDescriptorType type, deUint32 count)
+{
+	vk::VkDescriptorPoolSize descriptorTypeCount = { type, count };
+	m_poolSizeCounts.push_back(descriptorTypeCount);
+
+	poolSizeCount	= static_cast<deUint32>(m_poolSizeCounts.size());
+	pPoolSizes		= &m_poolSizeCounts[0];
+
+	return *this;
+}
+
+DescriptorSetLayoutCreateInfo::DescriptorSetLayoutCreateInfo (deUint32 _bindingCount, const vk::VkDescriptorSetLayoutBinding* _pBindings)
+{
+	sType = vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0;
+	bindingCount = _bindingCount;
+	pBindings	 = _pBindings;
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (deUint32							_descriptorSetCount,
+													const vk::VkDescriptorSetLayout*	_pSetLayouts,
+													deUint32							_pushConstantRangeCount,
+													const vk::VkPushConstantRange*		_pPushConstantRanges)
+	: m_pushConstantRanges(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	for (unsigned int i = 0; i < _descriptorSetCount; i++)
+	{
+		m_setLayouts.push_back(_pSetLayouts[i]);
+	}
+
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags					= 0u;
+	setLayoutCount			= static_cast<deUint32>(m_setLayouts.size());
+	pSetLayouts				= setLayoutCount > 0 ? &m_setLayouts[0] : DE_NULL;
+	pushConstantRangeCount	= static_cast<deUint32>(m_pushConstantRanges.size());
+
+	if (m_pushConstantRanges.size())
+	{
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineLayoutCreateInfo::PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts,
+													deUint32										_pushConstantRangeCount,
+													const vk::VkPushConstantRange*					_pPushConstantRanges)
+	: m_setLayouts			(setLayouts)
+	, m_pushConstantRanges	(_pPushConstantRanges, _pPushConstantRanges + _pushConstantRangeCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
+	pNext = DE_NULL;
+
+	flags			= 0u;
+	setLayoutCount	= static_cast<deUint32>(m_setLayouts.size());
+
+	if (setLayoutCount)
+	{
+		pSetLayouts = &m_setLayouts[0];
+	}
+	else
+	{
+		pSetLayouts = DE_NULL;
+	}
+
+	pushConstantRangeCount = static_cast<deUint32>(m_pushConstantRanges.size());
+	if (pushConstantRangeCount)
+	{
+		pPushConstantRanges = &m_pushConstantRanges[0];
+	}
+	else
+	{
+		pPushConstantRanges = DE_NULL;
+	}
+}
+
+PipelineCreateInfo::PipelineShaderStage::PipelineShaderStage (vk::VkShaderModule _module, const char* _pName, vk::VkShaderStageFlagBits _stage)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+	stage				= _stage;
+	module				= _module;
+	pName				= _pName;
+	pSpecializationInfo = DE_NULL;
+}
+
+PipelineCreateInfo::VertexInputState::VertexInputState (deUint32										_vertexBindingDescriptionCount,
+														const vk::VkVertexInputBindingDescription*		_pVertexBindingDescriptions,
+														deUint32										_vertexAttributeDescriptionCount,
+														const vk::VkVertexInputAttributeDescription*	_pVertexAttributeDescriptions)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags							= 0u;
+	vertexBindingDescriptionCount	= _vertexBindingDescriptionCount;
+	pVertexBindingDescriptions		= _pVertexBindingDescriptions;
+	vertexAttributeDescriptionCount	= _vertexAttributeDescriptionCount;
+	pVertexAttributeDescriptions	= _pVertexAttributeDescriptions;
+}
+
+PipelineCreateInfo::InputAssemblerState::InputAssemblerState (vk::VkPrimitiveTopology	_topology,
+															  vk::VkBool32				_primitiveRestartEnable)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	topology				= _topology;
+	primitiveRestartEnable	= _primitiveRestartEnable;
+}
+
+PipelineCreateInfo::TessellationState::TessellationState (deUint32 _patchControlPoints)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= 0u;
+	patchControlPoints	= _patchControlPoints;
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (deUint32						_viewportCount,
+												  std::vector<vk::VkViewport>	_viewports,
+												  std::vector<vk::VkRect2D>		_scissors)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags			= 0u;
+	viewportCount	= _viewportCount;
+	scissorCount	= _viewportCount;
+
+	if (!_viewports.size())
+	{
+		m_viewports.resize(viewportCount);
+		deMemset(&m_viewports[0], 0, sizeof(m_viewports[0]) * m_viewports.size());
+	}
+	else
+	{
+		m_viewports = _viewports;
+	}
+
+	if (!_scissors.size())
+	{
+		m_scissors.resize(scissorCount);
+		deMemset(&m_scissors[0], 0, sizeof(m_scissors[0]) * m_scissors.size());
+	}
+	else
+	{
+		m_scissors = _scissors;
+	}
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState::ViewportState (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports = std::vector<vk::VkViewport>(other.pViewports, other.pViewports + viewportCount);
+	m_scissors	= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports	= &m_viewports[0];
+	pScissors	= &m_scissors[0];
+}
+
+PipelineCreateInfo::ViewportState& PipelineCreateInfo::ViewportState::operator= (const ViewportState& other)
+{
+	sType			= other.sType;
+	pNext			= other.pNext;
+	flags			= other.flags;
+	viewportCount	= other.viewportCount;
+	scissorCount	= other.scissorCount;
+
+	m_viewports		= std::vector<vk::VkViewport>(other.pViewports, other.pViewports + scissorCount);
+	m_scissors		= std::vector<vk::VkRect2D>(other.pScissors, other.pScissors + scissorCount);
+
+	pViewports		= &m_viewports[0];
+	pScissors		= &m_scissors[0];
+	return *this;
+}
+
+PipelineCreateInfo::RasterizerState::RasterizerState (vk::VkBool32			_depthClampEnable,
+													  vk::VkBool32			_rasterizerDiscardEnable,
+													  vk::VkPolygonMode		_polygonMode,
+													  vk::VkCullModeFlags	_cullMode,
+													  vk::VkFrontFace		_frontFace,
+													  vk::VkBool32			_depthBiasEnable,
+													  float					_depthBiasConstantFactor,
+													  float					_depthBiasClamp,
+													  float					_depthBiasSlopeFactor,
+													  float					_lineWidth)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthClampEnable		= _depthClampEnable;
+	rasterizerDiscardEnable = _rasterizerDiscardEnable;
+	polygonMode				= _polygonMode;
+	cullMode				= _cullMode;
+	frontFace				= _frontFace;
+
+	depthBiasEnable			= _depthBiasEnable;
+	depthBiasConstantFactor	= _depthBiasConstantFactor;
+	depthBiasClamp			= _depthBiasClamp;
+	depthBiasSlopeFactor	= _depthBiasSlopeFactor;
+	lineWidth				= _lineWidth;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (vk::VkSampleCountFlagBits				_rasterizationSamples,
+														vk::VkBool32							_sampleShadingEnable,
+														float									_minSampleShading,
+														const std::vector<vk::VkSampleMask>&	_sampleMask,
+														bool									_alphaToCoverageEnable,
+														bool									_alphaToOneEnable)
+	: m_sampleMask(_sampleMask)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	rasterizationSamples	= _rasterizationSamples;
+	sampleShadingEnable		= _sampleShadingEnable;
+	minSampleShading		= _minSampleShading;
+	pSampleMask				= &m_sampleMask[0];
+	alphaToCoverageEnable   = _alphaToCoverageEnable;
+	alphaToOneEnable		= _alphaToOneEnable;
+}
+
+PipelineCreateInfo::MultiSampleState::MultiSampleState (const MultiSampleState& other)
+{
+	sType					= other.sType;
+	pNext					= other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+}
+
+PipelineCreateInfo::MultiSampleState& PipelineCreateInfo::MultiSampleState::operator= (const MultiSampleState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags					= other.flags;
+	rasterizationSamples	= other.rasterizationSamples;
+	sampleShadingEnable		= other.sampleShadingEnable;
+	minSampleShading		= other.minSampleShading;
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + other.rasterizationSamples) / (sizeof(vk::VkSampleMask) * 8);
+
+	m_sampleMask	= std::vector<vk::VkSampleMask>(other.pSampleMask, other.pSampleMask + sampleMaskArrayLen);
+	pSampleMask		= &m_sampleMask[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	_attachments,
+													  vk::VkBool32													_logicOpEnable,
+													  vk::VkLogicOp													_logicOp)
+	: m_attachments(_attachments)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (deUint32											_attachmentCount,
+													  const vk::VkPipelineColorBlendAttachmentState*	_attachments,
+													  vk::VkBool32										_logicOpEnable,
+													  vk::VkLogicOp										_logicOp)
+	: m_attachments(_attachments, _attachments + _attachmentCount)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+	pNext	= DE_NULL;
+	flags					= 0u;
+	logicOpEnable			= _logicOpEnable;
+	logicOp					= _logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo& createInfo)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+}
+
+PipelineCreateInfo::ColorBlendState::ColorBlendState (const ColorBlendState& createInfo, std::vector<float> _blendConstants)
+	: m_attachments (createInfo.pAttachments, createInfo.pAttachments + createInfo.attachmentCount)
+{
+	sType = createInfo.sType;
+	pNext = createInfo.pNext;
+	flags					= createInfo.flags;
+	logicOpEnable			= createInfo.logicOpEnable;
+	logicOp					= createInfo.logicOp;
+	attachmentCount			= static_cast<deUint32>(m_attachments.size());
+	pAttachments			= &m_attachments[0];
+	deMemcpy(blendConstants, &_blendConstants[0], 4 * sizeof(float));
+}
+
+PipelineCreateInfo::ColorBlendState::Attachment::Attachment (vk::VkBool32		_blendEnable,
+															 vk::VkBlendFactor	_srcColorBlendFactor,
+															 vk::VkBlendFactor	_dstColorBlendFactor,
+															 vk::VkBlendOp		_colorBlendOp,
+															 vk::VkBlendFactor	_srcAlphaBlendFactor,
+															 vk::VkBlendFactor	_dstAlphaBlendFactor,
+															 vk::VkBlendOp		_alphaBlendOp,
+															 deUint8			_colorWriteMask)
+{
+	blendEnable			= _blendEnable;
+	srcColorBlendFactor	= _srcColorBlendFactor;
+	dstColorBlendFactor	= _dstColorBlendFactor;
+	colorBlendOp		= _colorBlendOp;
+	srcAlphaBlendFactor	= _srcAlphaBlendFactor;
+	dstAlphaBlendFactor	= _dstAlphaBlendFactor;
+	alphaBlendOp		= _alphaBlendOp;
+	colorWriteMask	= _colorWriteMask;
+}
+
+PipelineCreateInfo::DepthStencilState::StencilOpState::StencilOpState (vk::VkStencilOp	_failOp,
+																	   vk::VkStencilOp	_passOp,
+																	   vk::VkStencilOp	_depthFailOp,
+																	   vk::VkCompareOp	_compareOp,
+																	   deUint32			_compareMask,
+																	   deUint32			_writeMask,
+																	   deUint32			_reference)
+{
+	failOp		= _failOp;
+	passOp		= _passOp;
+	depthFailOp	= _depthFailOp;
+	compareOp	= _compareOp;
+
+	compareMask	= _compareMask;
+	writeMask	= _writeMask;
+	reference	= _reference;
+}
+
+PipelineCreateInfo::DepthStencilState::DepthStencilState (vk::VkBool32		_depthTestEnable,
+														  vk::VkBool32		_depthWriteEnable,
+														  vk::VkCompareOp	_depthCompareOp,
+														  vk::VkBool32		_depthBoundsTestEnable,
+														  vk::VkBool32		_stencilTestEnable,
+														  StencilOpState	_front,
+														  StencilOpState	_back,
+														  float				_minDepthBounds,
+														  float				_maxDepthBounds)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags					= 0u;
+	depthTestEnable			= _depthTestEnable;
+	depthWriteEnable		= _depthWriteEnable;
+	depthCompareOp			= _depthCompareOp;
+	depthBoundsTestEnable	= _depthBoundsTestEnable;
+	stencilTestEnable		= _stencilTestEnable;
+	front	= _front;
+	back	= _back;
+
+	minDepthBounds = _minDepthBounds;
+	maxDepthBounds = _maxDepthBounds;
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const std::vector<vk::VkDynamicState>& _dynamicStates)
+{
+	sType = vk::VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags = 0u;
+
+	if (!_dynamicStates.size())
+	{
+		for (size_t i = 0; i < vk::VK_DYNAMIC_STATE_LAST; ++i)
+		{
+			m_dynamicStates.push_back(static_cast<vk::VkDynamicState>(i));
+		}
+	}
+	else
+		m_dynamicStates = _dynamicStates;
+
+	dynamicStateCount = static_cast<deUint32>(m_dynamicStates.size());
+	pDynamicStates = &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState::DynamicState (const DynamicState &other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags = other.flags;
+
+	dynamicStateCount = other.dynamicStateCount;
+
+	m_dynamicStates = std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates = &m_dynamicStates[0];
+}
+
+PipelineCreateInfo::DynamicState& PipelineCreateInfo::DynamicState::operator= (const DynamicState& other)
+{
+	sType = other.sType;
+	pNext = other.pNext;
+	flags = other.flags;
+
+	dynamicStateCount = other.dynamicStateCount;
+
+	m_dynamicStates = std::vector<vk::VkDynamicState>(other.pDynamicStates, other.pDynamicStates + dynamicStateCount);
+	pDynamicStates = &m_dynamicStates[0];
+
+	return *this;
+}
+
+PipelineCreateInfo::PipelineCreateInfo (vk::VkPipelineLayout		_layout,
+										vk::VkRenderPass			_renderPass,
+										int							_subpass,
+										vk::VkPipelineCreateFlags	_flags)
+{
+	deMemset(static_cast<vk::VkGraphicsPipelineCreateInfo *>(this), 0,
+		sizeof(vk::VkGraphicsPipelineCreateInfo));
+
+	sType = vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
+	pNext = DE_NULL;
+	flags				= _flags;
+	renderPass			= _renderPass;
+	subpass				= _subpass;
+	layout				= _layout;
+	basePipelineHandle	= DE_NULL;
+	basePipelineIndex	= 0;
+	pDynamicState		= DE_NULL;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addShader (const vk::VkPipelineShaderStageCreateInfo& shader)
+{
+	m_shaders.push_back(shader);
+
+	stageCount	= static_cast<deUint32>(m_shaders.size());
+	pStages		= &m_shaders[0];
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineVertexInputStateCreateInfo& state)
+{
+	m_vertexInputState	= state;
+	pVertexInputState	= &m_vertexInputState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineInputAssemblyStateCreateInfo& state)
+{
+	m_inputAssemblyState = state;
+	pInputAssemblyState = &m_inputAssemblyState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineColorBlendStateCreateInfo& state)
+{
+	m_colorBlendStateAttachments	= std::vector<vk::VkPipelineColorBlendAttachmentState>(state.pAttachments, state.pAttachments + state.attachmentCount);
+	m_colorBlendState				= state;
+	m_colorBlendState.pAttachments	= &m_colorBlendStateAttachments[0];
+	pColorBlendState				= &m_colorBlendState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineViewportStateCreateInfo& state)
+{
+	m_viewports					= std::vector<vk::VkViewport>(state.pViewports, state.pViewports + state.viewportCount);
+	m_scissors					= std::vector<vk::VkRect2D>(state.pScissors, state.pScissors + state.scissorCount);
+	m_viewportState				= state;
+	m_viewportState.pViewports	= &m_viewports[0];
+	m_viewportState.pScissors	= &m_scissors[0];
+	pViewportState				= &m_viewportState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDepthStencilStateCreateInfo& state)
+{
+	m_dynamicDepthStencilState	= state;
+	pDepthStencilState			= &m_dynamicDepthStencilState;
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineTessellationStateCreateInfo& state)
+{
+	m_tessState			= state;
+	pTessellationState	= &m_tessState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineRasterizationStateCreateInfo& state)
+{
+	m_rasterState		= state;
+	pRasterizationState	= &m_rasterState;
+
+	return *this;
+}
+
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineMultisampleStateCreateInfo& state)
+{
+
+	const size_t sampleMaskArrayLen = (sizeof(vk::VkSampleMask) * 8 + state.rasterizationSamples) / ( sizeof(vk::VkSampleMask) * 8 );
+	m_multisampleStateSampleMask	= std::vector<vk::VkSampleMask>(state.pSampleMask, state.pSampleMask + sampleMaskArrayLen);
+	m_multisampleState				= state;
+	m_multisampleState.pSampleMask	= &m_multisampleStateSampleMask[0];
+	pMultisampleState				= &m_multisampleState;
+
+	return *this;
+}
+PipelineCreateInfo& PipelineCreateInfo::addState (const vk::VkPipelineDynamicStateCreateInfo& state)
+{
+	m_dynamicStates					= std::vector<vk::VkDynamicState>(state.pDynamicStates, state.pDynamicStates + state.dynamicStateCount);
+	m_dynamicState					= state;
+	m_dynamicState.pDynamicStates	= &m_dynamicStates[0];
+	pDynamicState					= &m_dynamicState;
+
+	return *this;
+}
+
+SamplerCreateInfo::SamplerCreateInfo (vk::VkFilter				_magFilter,
+									  vk::VkFilter				_minFilter,
+									  vk::VkSamplerMipmapMode	_mipmapMode,
+									  vk::VkSamplerAddressMode	_addressModeU,
+									  vk::VkSamplerAddressMode	_addressModeV,
+									  vk::VkSamplerAddressMode	_addressModeW,
+									  float						_mipLodBias,
+									  vk::VkBool32				_anisotropyEnable,
+									  float						_maxAnisotropy,
+									  vk::VkBool32				_compareEnable,
+									  vk::VkCompareOp			_compareOp,
+									  float						_minLod,
+									  float						_maxLod,
+									  vk::VkBorderColor			_borderColor,
+									  vk::VkBool32				_unnormalizedCoordinates)
+{
+	sType					= vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
+	pNext					= DE_NULL;
+	flags					= 0u;
+	magFilter				= _magFilter;
+	minFilter				= _minFilter;
+	mipmapMode				= _mipmapMode;
+	addressModeU			= _addressModeU;
+	addressModeV			= _addressModeV;
+	addressModeW			= _addressModeW;
+	mipLodBias				= _mipLodBias;
+	anisotropyEnable		= _anisotropyEnable;
+	maxAnisotropy			= _maxAnisotropy;
+	compareEnable			= _compareEnable;
+	compareOp				= _compareOp;
+	minLod					= _minLod;
+	maxLod					= _maxLod;
+	borderColor				= _borderColor;
+	unnormalizedCoordinates = _unnormalizedCoordinates;
+}
+
+} // QueryPool
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.hpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.hpp
new file mode 100644
index 0000000..1c1f7e5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolCreateInfoUtil.hpp
@@ -0,0 +1,520 @@
+#ifndef _VKTQUERYPOOLCREATEINFOUTIL_HPP
+#define _VKTQUERYPOOLCREATEINFOUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief CreateInfo utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vkDefs.hpp"
+#include "tcuVector.hpp"
+#include "deSharedPtr.hpp"
+#include <vector>
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+class ImageSubresourceRange : public vk::VkImageSubresourceRange
+{
+public:
+	ImageSubresourceRange		(vk::VkImageAspectFlags	aspectMask,
+								 deUint32				baseMipLevel	= 0,
+								 deUint32				levelCount		= 1,
+								 deUint32				baseArrayLayer	= 0,
+								 deUint32				layerCount		= 1);
+};
+
+class ComponentMapping : public vk::VkComponentMapping
+{
+public:
+	ComponentMapping			(vk::VkComponentSwizzle r = vk::VK_COMPONENT_SWIZZLE_R,
+								 vk::VkComponentSwizzle g = vk::VK_COMPONENT_SWIZZLE_G,
+								 vk::VkComponentSwizzle b = vk::VK_COMPONENT_SWIZZLE_B,
+								 vk::VkComponentSwizzle a = vk::VK_COMPONENT_SWIZZLE_A);
+};
+
+class ImageViewCreateInfo : public vk::VkImageViewCreateInfo
+{
+public:
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkImageSubresourceRange&	subresourceRange,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+
+	ImageViewCreateInfo			(vk::VkImage						image,
+								 vk::VkImageViewType				viewType,
+								 vk::VkFormat						format,
+								 const vk::VkComponentMapping&		components			= ComponentMapping(),
+								 vk::VkImageViewCreateFlags			flags				= 0);
+};
+
+class BufferViewCreateInfo : public vk::VkBufferViewCreateInfo
+{
+public:
+	BufferViewCreateInfo		 (vk::VkBuffer		buffer,
+								  vk::VkFormat		format,
+								  vk::VkDeviceSize	offset,
+								  vk::VkDeviceSize	range);
+};
+
+class BufferCreateInfo : public vk::VkBufferCreateInfo
+{
+public:
+	BufferCreateInfo			(vk::VkDeviceSize			size,
+								 vk::VkBufferCreateFlags	usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkBufferCreateFlags	flags					= 0);
+
+	BufferCreateInfo			(const BufferCreateInfo&	other);
+	BufferCreateInfo& operator=	(const BufferCreateInfo&	other);
+
+private:
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class ImageCreateInfo : public vk::VkImageCreateInfo
+{
+public:
+	ImageCreateInfo				(vk::VkImageType			imageType,
+								 vk::VkFormat				format,
+								 vk::VkExtent3D				extent,
+								 deUint32					mipLevels,
+								 deUint32					arrayLayers,
+								 vk::VkSampleCountFlagBits	samples,
+								 vk::VkImageTiling			tiling,
+								 vk::VkImageUsageFlags		usage,
+								 vk::VkSharingMode			sharingMode				= vk::VK_SHARING_MODE_EXCLUSIVE,
+								 deUint32					queueFamilyIndexCount	= 0,
+								 const deUint32*			pQueueFamilyIndices		= DE_NULL,
+								 vk::VkImageCreateFlags		flags					= 0,
+								 vk::VkImageLayout			initialLayout			= vk::VK_IMAGE_LAYOUT_UNDEFINED);
+
+private:
+	ImageCreateInfo				(const ImageCreateInfo&		other);
+	ImageCreateInfo& operator=	(const ImageCreateInfo&		other);
+
+	std::vector<deUint32> m_queueFamilyIndices;
+};
+
+class FramebufferCreateInfo : public vk::VkFramebufferCreateInfo
+{
+public:
+	FramebufferCreateInfo		(vk::VkRenderPass						renderPass,
+								 const std::vector<vk::VkImageView>&	attachments,
+								 deUint32								width,
+								 deUint32								height,
+								 deUint32								layers);
+};
+
+class AttachmentDescription : public vk::VkAttachmentDescription
+{
+public:
+	AttachmentDescription	(vk::VkFormat				format,
+							 vk::VkSampleCountFlagBits	samples,
+							 vk::VkAttachmentLoadOp		loadOp,
+							 vk::VkAttachmentStoreOp	storeOp,
+							 vk::VkAttachmentLoadOp		stencilLoadOp,
+							 vk::VkAttachmentStoreOp	stencilStoreOp,
+							 vk::VkImageLayout			initialLayout,
+							 vk::VkImageLayout			finalLayout);
+
+	AttachmentDescription	(const vk::VkAttachmentDescription &);
+};
+
+class AttachmentReference : public vk::VkAttachmentReference
+{
+public:
+	AttachmentReference		(deUint32 attachment, vk::VkImageLayout layout);
+	AttachmentReference		(void);
+};
+
+class SubpassDescription : public vk::VkSubpassDescription
+{
+public:
+	SubpassDescription				(vk::VkPipelineBindPoint			pipelineBindPoint,
+									 vk::VkSubpassDescriptionFlags		flags,
+									 deUint32							inputAttachmentCount,
+									 const vk::VkAttachmentReference*	inputAttachments,
+									 deUint32							colorAttachmentCount,
+									 const vk::VkAttachmentReference*	colorAttachments,
+									 const vk::VkAttachmentReference*	resolveAttachments,
+									 vk::VkAttachmentReference			depthStencilAttachment,
+									 deUint32							preserveAttachmentCount,
+									 const deUint32*					preserveAttachments);
+
+	SubpassDescription				(const vk::VkSubpassDescription&	other);
+	SubpassDescription				(const SubpassDescription&			other);
+	SubpassDescription& operator=	(const SubpassDescription&			other);
+
+private:
+	std::vector<vk::VkAttachmentReference>	m_inputAttachments;
+	std::vector<vk::VkAttachmentReference>	m_colorAttachments;
+	std::vector<vk::VkAttachmentReference>	m_resolveAttachments;
+	std::vector<deUint32>					m_preserveAttachments;
+
+	vk::VkAttachmentReference				m_depthStencilAttachment;
+};
+
+class SubpassDependency : public vk::VkSubpassDependency
+{
+public:
+	SubpassDependency (	deUint32					srcSubpass,
+						deUint32					dstSubpass,
+						vk::VkPipelineStageFlags	srcStageMask,
+						vk::VkPipelineStageFlags	dstStageMask,
+						vk::VkAccessFlags			srcAccessMask,
+						vk::VkAccessFlags			dstAccessMask,
+						vk::VkDependencyFlags		dependencyFlags);
+
+	SubpassDependency (const vk::VkSubpassDependency& other);
+};
+
+class RenderPassCreateInfo : public vk::VkRenderPassCreateInfo
+{
+public:
+	RenderPassCreateInfo (const std::vector<vk::VkAttachmentDescription>&	attachments,
+						  const std::vector<vk::VkSubpassDescription>&		subpasses,
+						  const std::vector<vk::VkSubpassDependency>&		dependiences		= std::vector<vk::VkSubpassDependency>());
+
+	RenderPassCreateInfo (deUint32											attachmentCount	= 0,
+						  const vk::VkAttachmentDescription*				pAttachments	= DE_NULL,
+						  deUint32											subpassCount	= 0,
+						  const vk::VkSubpassDescription*					pSubpasses		= DE_NULL,
+						  deUint32											dependencyCount	= 0,
+						  const vk::VkSubpassDependency*					pDependiences	= DE_NULL);
+
+	void addAttachment	(vk::VkAttachmentDescription						attachment);
+	void addSubpass		(vk::VkSubpassDescription							subpass);
+	void addDependency	(vk::VkSubpassDependency							dependency);
+
+private:
+	std::vector<AttachmentDescription>			m_attachments;
+	std::vector<SubpassDescription>				m_subpasses;
+	std::vector<SubpassDependency>				m_dependiences;
+
+	std::vector<vk::VkAttachmentDescription>	m_attachmentsStructs;
+	std::vector<vk::VkSubpassDescription>		m_subpassesStructs;
+	std::vector<vk::VkSubpassDependency>		m_dependiencesStructs;
+
+	RenderPassCreateInfo			(const RenderPassCreateInfo &other); //Not allowed!
+	RenderPassCreateInfo& operator= (const RenderPassCreateInfo &other); //Not allowed!
+};
+
+class RenderPassBeginInfo : public vk::VkRenderPassBeginInfo
+{
+public:
+	RenderPassBeginInfo (vk::VkRenderPass						renderPass,
+						 vk::VkFramebuffer						framebuffer,
+						 vk::VkRect2D							renderArea,
+						 const std::vector<vk::VkClearValue>&	clearValues = std::vector<vk::VkClearValue>());
+
+private:
+	std::vector<vk::VkClearValue> m_clearValues;
+
+	RenderPassBeginInfo				(const RenderPassBeginInfo&	other); //Not allowed!
+	RenderPassBeginInfo& operator=	(const RenderPassBeginInfo&	other); //Not allowed!
+};
+
+class CmdPoolCreateInfo : public vk::VkCommandPoolCreateInfo
+{
+public:
+	CmdPoolCreateInfo (deUint32						queueFamilyIndex,
+					   vk::VkCommandPoolCreateFlags flags				= vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
+};
+
+class CmdBufferBeginInfo : public vk::VkCommandBufferBeginInfo
+{
+public:
+	CmdBufferBeginInfo (vk::VkCommandBufferUsageFlags		flags					= 0);
+};
+
+class DescriptorPoolSize : public vk::VkDescriptorPoolSize
+{
+public:
+	DescriptorPoolSize (vk::VkDescriptorType _type, deUint32 _descriptorCount)
+	{
+		type			= _type;
+		descriptorCount = _descriptorCount;
+	}
+};
+
+class DescriptorPoolCreateInfo : public vk::VkDescriptorPoolCreateInfo
+{
+public:
+	DescriptorPoolCreateInfo (const std::vector<vk::VkDescriptorPoolSize>&	poolSizeCounts,
+							  vk::VkDescriptorPoolCreateFlags				flags,
+							  deUint32										maxSets);
+
+	DescriptorPoolCreateInfo& addDescriptors (vk::VkDescriptorType type, deUint32 count);
+
+private:
+	std::vector<vk::VkDescriptorPoolSize> m_poolSizeCounts;
+};
+
+class DescriptorSetLayoutCreateInfo : public vk::VkDescriptorSetLayoutCreateInfo
+{
+public:
+	DescriptorSetLayoutCreateInfo (deUint32 bindingCount, const vk::VkDescriptorSetLayoutBinding* pBindings);
+};
+
+class PipelineLayoutCreateInfo : public vk::VkPipelineLayoutCreateInfo
+{
+public:
+	PipelineLayoutCreateInfo (deUint32										descriptorSetCount,
+							  const vk::VkDescriptorSetLayout*				pSetLayouts,
+							  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+	PipelineLayoutCreateInfo (const std::vector<vk::VkDescriptorSetLayout>&	setLayouts				= std::vector<vk::VkDescriptorSetLayout>(),
+							  deUint32										pushConstantRangeCount	= 0,
+							  const vk::VkPushConstantRange*				pPushConstantRanges		= DE_NULL);
+
+private:
+	std::vector<vk::VkDescriptorSetLayout>	m_setLayouts;
+	std::vector<vk::VkPushConstantRange>	m_pushConstantRanges;
+};
+
+class PipelineCreateInfo : public vk::VkGraphicsPipelineCreateInfo
+{
+public:
+	class VertexInputState : public vk::VkPipelineVertexInputStateCreateInfo
+	{
+	public:
+		VertexInputState (deUint32										vertexBindingDescriptionCount	= 0,
+						  const vk::VkVertexInputBindingDescription*	pVertexBindingDescriptions		= NULL,
+						  deUint32										vertexAttributeDescriptionCount	= 0,
+						  const vk::VkVertexInputAttributeDescription*	pVertexAttributeDescriptions	= NULL);
+	};
+
+	class InputAssemblerState : public vk::VkPipelineInputAssemblyStateCreateInfo
+	{
+	public:
+		InputAssemblerState (vk::VkPrimitiveTopology topology, vk::VkBool32 primitiveRestartEnable = false);
+	};
+
+	class TessellationState : public vk::VkPipelineTessellationStateCreateInfo
+	{
+	public:
+		TessellationState (deUint32 patchControlPoints = 0);
+	};
+
+	class ViewportState : public vk::VkPipelineViewportStateCreateInfo
+	{
+	public:
+		ViewportState				(deUint32						viewportCount,
+									 std::vector<vk::VkViewport>	viewports		= std::vector<vk::VkViewport>(0),
+									 std::vector<vk::VkRect2D>		scissors		= std::vector<vk::VkRect2D>(0));
+
+		ViewportState				(const ViewportState&			other);
+		ViewportState& operator=	(const ViewportState&			other);
+
+		std::vector<vk::VkViewport> m_viewports;
+		std::vector<vk::VkRect2D>	m_scissors;
+	};
+
+	class RasterizerState : public vk::VkPipelineRasterizationStateCreateInfo
+	{
+	public:
+		RasterizerState (vk::VkBool32			depthClampEnable		= false,
+						 vk::VkBool32			rasterizerDiscardEnable = false,
+						 vk::VkPolygonMode		polygonMode				= vk::VK_POLYGON_MODE_FILL,
+						 vk::VkCullModeFlags	cullMode				= vk::VK_CULL_MODE_NONE,
+						 vk::VkFrontFace		frontFace				= vk::VK_FRONT_FACE_CLOCKWISE,
+						 vk::VkBool32			depthBiasEnable			= true,
+						 float					depthBiasConstantFactor	= 0.0f,
+						 float					depthBiasClamp			= 0.0f,
+						 float					depthBiasSlopeFactor	= 0.0f,
+						 float					lineWidth				= 1.0f);
+	};
+
+	class MultiSampleState : public vk::VkPipelineMultisampleStateCreateInfo
+	{
+	public:
+		MultiSampleState			(vk::VkSampleCountFlagBits				rasterizationSamples		= vk::VK_SAMPLE_COUNT_1_BIT,
+									 vk::VkBool32							sampleShadingEnable			= false,
+									 float									minSampleShading			= 0.0f,
+									 const std::vector<vk::VkSampleMask>&	sampleMask					= std::vector<vk::VkSampleMask>(1, 0xffffffff),
+									 bool									alphaToCoverageEnable		= false,
+									 bool									alphaToOneEnable			= false);
+
+		MultiSampleState			(const MultiSampleState&				other);
+		MultiSampleState& operator= (const MultiSampleState&				other);
+
+	private:
+		std::vector<vk::VkSampleMask> m_sampleMask;
+	};
+
+	class ColorBlendState : public vk::VkPipelineColorBlendStateCreateInfo
+	{
+	public:
+		class Attachment : public vk::VkPipelineColorBlendAttachmentState
+		{
+		public:
+			Attachment (vk::VkBool32		blendEnable			= false,
+						vk::VkBlendFactor	srcColorBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstColorBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		colorBlendOp		= vk::VK_BLEND_OP_ADD,
+						vk::VkBlendFactor	srcAlphaBlendFactor	= vk::VK_BLEND_FACTOR_SRC_COLOR,
+						vk::VkBlendFactor	dstAlphaBlendFactor	= vk::VK_BLEND_FACTOR_DST_COLOR,
+						vk::VkBlendOp		alphaBlendOp		= vk::VK_BLEND_OP_ADD,
+						deUint8				colorWriteMask		= 0xff);
+		};
+
+		ColorBlendState (const std::vector<vk::VkPipelineColorBlendAttachmentState>&	attachments,
+						 vk::VkBool32													alphaToCoverageEnable	= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (deUint32														attachmentCount,
+						 const vk::VkPipelineColorBlendAttachmentState*					attachments,
+						 vk::VkBool32													logicOpEnable			= false,
+						 vk::VkLogicOp													logicOp					= vk::VK_LOGIC_OP_COPY);
+
+		ColorBlendState (const vk::VkPipelineColorBlendStateCreateInfo&					createInfo);
+		ColorBlendState (const ColorBlendState&											createInfo,
+						 std::vector<float>												blendConstants			= std::vector<float>(4));
+
+	private:
+		std::vector<vk::VkPipelineColorBlendAttachmentState> m_attachments;
+	};
+
+	class DepthStencilState : public vk::VkPipelineDepthStencilStateCreateInfo
+	{
+	public:
+		class StencilOpState : public vk::VkStencilOpState
+		{
+		public:
+			StencilOpState (vk::VkStencilOp failOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp passOp					= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkStencilOp depthFailOp				= vk::VK_STENCIL_OP_REPLACE,
+							vk::VkCompareOp compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+							deUint32		compareMask				= 0xffffffffu,
+							deUint32		writeMask				= 0xffffffffu,
+							deUint32		reference				= 0);
+		};
+
+		DepthStencilState (vk::VkBool32		depthTestEnable			= false,
+						   vk::VkBool32		depthWriteEnable		= false,
+						   vk::VkCompareOp	depthCompareOp			= vk::VK_COMPARE_OP_ALWAYS,
+						   vk::VkBool32		depthBoundsTestEnable	= false,
+						   vk::VkBool32		stencilTestEnable		= false,
+						   StencilOpState	front					= StencilOpState(),
+						   StencilOpState	back					= StencilOpState(),
+						   float			minDepthBounds			= -1.0f,
+						   float			maxDepthBounds			= 1.0f);
+	};
+
+	class PipelineShaderStage : public vk::VkPipelineShaderStageCreateInfo
+	{
+	public:
+		PipelineShaderStage (vk::VkShaderModule shaderModule, const char* pName, vk::VkShaderStageFlagBits stage);
+	};
+
+	class DynamicState : public vk::VkPipelineDynamicStateCreateInfo
+	{
+	public:
+		DynamicState			(const std::vector<vk::VkDynamicState>& dynamicStates = std::vector<vk::VkDynamicState>(0));
+
+		DynamicState			(const DynamicState& other);
+		DynamicState& operator= (const DynamicState& other);
+
+		std::vector<vk::VkDynamicState> m_dynamicStates;
+	};
+
+	PipelineCreateInfo				(vk::VkPipelineLayout								layout,
+								     vk::VkRenderPass									renderPass,
+									 int												subpass,
+									 vk::VkPipelineCreateFlags							flags);
+
+	PipelineCreateInfo& addShader	(const vk::VkPipelineShaderStageCreateInfo&			shader);
+
+	PipelineCreateInfo& addState	(const vk::VkPipelineVertexInputStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineInputAssemblyStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineColorBlendStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineViewportStateCreateInfo&		state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDepthStencilStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineTessellationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineRasterizationStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineMultisampleStateCreateInfo&	state);
+	PipelineCreateInfo& addState	(const vk::VkPipelineDynamicStateCreateInfo&		state);
+
+private:
+	std::vector<vk::VkPipelineShaderStageCreateInfo>		m_shaders;
+
+	vk::VkPipelineVertexInputStateCreateInfo				m_vertexInputState;
+	vk::VkPipelineInputAssemblyStateCreateInfo				m_inputAssemblyState;
+	std::vector<vk::VkPipelineColorBlendAttachmentState>	m_colorBlendStateAttachments;
+	vk::VkPipelineColorBlendStateCreateInfo					m_colorBlendState;
+	vk::VkPipelineViewportStateCreateInfo					m_viewportState;
+	vk::VkPipelineDepthStencilStateCreateInfo				m_dynamicDepthStencilState;
+	vk::VkPipelineTessellationStateCreateInfo				m_tessState;
+	vk::VkPipelineRasterizationStateCreateInfo				m_rasterState;
+	vk::VkPipelineMultisampleStateCreateInfo				m_multisampleState;
+	vk::VkPipelineDynamicStateCreateInfo					m_dynamicState;
+
+	std::vector<vk::VkDynamicState>							m_dynamicStates;
+
+	std::vector<vk::VkViewport>								m_viewports;
+	std::vector<vk::VkRect2D>								m_scissors;
+
+	std::vector<vk::VkSampleMask>							m_multisampleStateSampleMask;
+};
+
+class SamplerCreateInfo : public vk::VkSamplerCreateInfo
+{
+public:
+	SamplerCreateInfo (vk::VkFilter				magFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkFilter				minFilter				= vk::VK_FILTER_NEAREST,
+					   vk::VkSamplerMipmapMode	mipmapMode				= vk::VK_SAMPLER_MIPMAP_MODE_NEAREST,
+					   vk::VkSamplerAddressMode	addressU				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressV				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   vk::VkSamplerAddressMode	addressW				= vk::VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+					   float					mipLodBias				= 0.0f,
+					   vk::VkBool32				anisotropyEnable		= vk::VK_FALSE,
+					   float					maxAnisotropy			= 1.0f,
+					   vk::VkBool32				compareEnable			= false,
+					   vk::VkCompareOp			compareOp				= vk::VK_COMPARE_OP_ALWAYS,
+					   float					minLod					= 0.0f,
+					   float					maxLod					= 16.0f,
+					   vk::VkBorderColor		borderColor				= vk::VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+					   vk::VkBool32				unnormalizedCoordinates	= false);
+};
+
+} // QueryPool
+} // vkt
+
+#endif // _VKTQUERYPOOLCREATEINFOUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.cpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.cpp
new file mode 100644
index 0000000..4967248
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.cpp
@@ -0,0 +1,942 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vktQueryPoolImageObjectUtil.hpp"
+
+#include "tcuSurface.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vktQueryPoolCreateInfoUtil.hpp"
+#include "vktQueryPoolBufferObjectUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+void MemoryOp::pack (int				pixelSize,
+					 int				width,
+					 int				height,
+					 int				depth,
+					 vk::VkDeviceSize	rowPitchOrZero,
+					 vk::VkDeviceSize	depthPitchOrZero,
+					 const void *		srcBuffer,
+					 void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch	= depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const char *srcRow = reinterpret_cast<const char *>(srcBuffer);
+	const char *srcStart;
+	srcStart = srcRow;
+	char *dstRow = reinterpret_cast<char *>(destBuffer);
+	char *dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else
+	{
+		// slower, per row path
+		for (int d = 0; d < depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * depthPitch;
+			vk::VkDeviceSize offsetDepthSrc = d * (pixelSize * width * height);
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(rowPitch));
+				srcRow += pixelSize * width;
+				dstRow += rowPitch;
+			}
+		}
+	}
+}
+
+void MemoryOp::unpack (int					pixelSize,
+					   int					width,
+					   int					height,
+					   int					depth,
+					   vk::VkDeviceSize		rowPitchOrZero,
+					   vk::VkDeviceSize		depthPitchOrZero,
+					   const void *			srcBuffer,
+					   void *				destBuffer)
+{
+	vk::VkDeviceSize rowPitch	= rowPitchOrZero;
+	vk::VkDeviceSize depthPitch = depthPitchOrZero;
+
+	if (rowPitch == 0)
+		rowPitch = width * pixelSize;
+
+	if (depthPitch == 0)
+		depthPitch = rowPitch * height;
+
+	const vk::VkDeviceSize size = depthPitch * depth;
+
+	const char *srcRow = reinterpret_cast<const char *>(srcBuffer);
+	const char *srcStart;
+	srcStart = srcRow;
+	char *dstRow = reinterpret_cast<char *>(destBuffer);
+	char *dstStart;
+	dstStart = dstRow;
+
+	if (rowPitch == static_cast<vk::VkDeviceSize>(width * pixelSize) &&
+		depthPitch == static_cast<vk::VkDeviceSize>(rowPitch * height))
+	{
+		// fast path
+		deMemcpy(dstRow, srcRow, static_cast<size_t>(size));
+	}
+	else
+	{
+		// slower, per row path
+		for (size_t d = 0; d < (size_t)depth; d++)
+		{
+			vk::VkDeviceSize offsetDepthDst = d * (pixelSize * width * height);
+			vk::VkDeviceSize offsetDepthSrc = d * depthPitch;
+			srcRow = srcStart + offsetDepthSrc;
+			dstRow = dstStart + offsetDepthDst;
+			for (int r = 0; r < height; ++r)
+			{
+				deMemcpy(dstRow, srcRow, static_cast<size_t>(pixelSize * width));
+				srcRow += rowPitch;
+				dstRow += pixelSize * width;
+			}
+		}
+	}
+}
+
+Image::Image (const vk::DeviceInterface& vk,
+			  vk::VkDevice				device,
+			  vk::VkFormat				format,
+			  const vk::VkExtent3D&		extend,
+			  deUint32					levelCount,
+			  deUint32					layerCount,
+			  vk::Move<vk::VkImage>		object_)
+	: m_allocation		(DE_NULL)
+	, m_object			(object_)
+	, m_format			(format)
+	, m_extent			(extend)
+	, m_levelCount		(levelCount)
+	, m_layerCount		(layerCount)
+	, m_vk(vk)
+	, m_device(device)
+{
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface (vk::VkQueue					queue,
+												vk::Allocator&				allocator,
+												vk::VkImageLayout			layout,
+												vk::VkOffset3D				offset,
+												int							width,
+												int							height,
+												vk::VkImageAspectFlagBits	aspect,
+												unsigned int				mipLevel,
+												unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, 1, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readVolume (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   vk::VkImageAspectFlagBits	aspect,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * depth * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, depth, m_pixelAccessData.data());
+}
+
+tcu::ConstPixelBufferAccess Image::readSurface1D(vk::VkQueue				queue,
+												 vk::Allocator&				allocator,
+												 vk::VkImageLayout			layout,
+												 vk::VkOffset3D				offset,
+												 int						width,
+												 vk::VkImageAspectFlagBits	aspect,
+												 unsigned int				mipLevel,
+												 unsigned int				arrayElement)
+{
+	m_pixelAccessData.resize(width * vk::mapVkFormat(m_format).getPixelSize());
+	deMemset(m_pixelAccessData.data(), 0, m_pixelAccessData.size());
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		read(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+		m_pixelAccessData.data());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		readUsingBuffer(queue, allocator, layout, offset, width, 1, 1, mipLevel, arrayElement, aspect,
+		m_pixelAccessData.data());
+	}
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, 1, 1, m_pixelAccessData.data());
+}
+
+void Image::read (vk::VkQueue					queue,
+				  vk::Allocator&				allocator,
+				  vk::VkImageLayout				layout,
+				  vk::VkOffset3D				offset,
+				  int							width,
+				  int							height,
+				  int							depth,
+				  unsigned int					mipLevel,
+				  unsigned int					arrayElement,
+				  vk::VkImageAspectFlagBits		aspect,
+				  vk::VkImageType				type,
+				  void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource = copyToLinearImage(queue, allocator, layout, offset, width,
+															 height, depth, mipLevel, arrayElement, aspect, type);
+	const vk::VkOffset3D zeroOffset = {0, 0, 0};
+	stagingResource->readLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+}
+
+void Image::readUsingBuffer (vk::VkQueue				queue,
+							 vk::Allocator&				allocator,
+							 vk::VkImageLayout			layout,
+							 vk::VkOffset3D				offset,
+							 int						width,
+							 int						height,
+							 int						depth,
+							 unsigned int				mipLevel,
+							 unsigned int				arrayElement,
+							 vk::VkImageAspectFlagBits	aspect,
+							 void *						data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);;
+
+	de::SharedPtr<Buffer> stagingResource;
+
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() * width * height * depth;
+
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+				break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		const vk::VkBufferImageCopy region =
+		{
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyImageToBuffer(*copyCmdBuffer, object(), layout, stagingResource->object(), 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		const vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+
+	char* destPtr = reinterpret_cast<char*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(data, destPtr, static_cast<size_t>(bufferSize));
+}
+
+tcu::ConstPixelBufferAccess Image::readSurfaceLinear (vk::VkOffset3D				offset,
+													  int							width,
+													  int							height,
+													  int							depth,
+													  vk::VkImageAspectFlagBits		aspect,
+													  unsigned int					mipLevel,
+													  unsigned int					arrayElement)
+{
+	m_pixelAccessData.resize(width * height * vk::mapVkFormat(m_format).getPixelSize());
+	readLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, m_pixelAccessData.data());
+	return tcu::ConstPixelBufferAccess(vk::mapVkFormat(m_format), width, height, 1, m_pixelAccessData.data());
+}
+
+void Image::readLinear (vk::VkOffset3D				offset,
+						int							width,
+						int							height,
+						int							depth,
+						unsigned int				mipLevel,
+						unsigned int				arrayElement,
+						vk::VkImageAspectFlagBits	aspect,
+						void *						data)
+{
+	vk::VkImageSubresource imageSubResource = { aspect, mipLevel, arrayElement };
+
+	vk::VkSubresourceLayout imageLayout;
+	deMemset(&imageLayout, 0, sizeof(imageLayout));
+
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource, &imageLayout);
+
+	const char* srcPtr = reinterpret_cast<const char*>(getBoundMemory().getHostPtr());
+	srcPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::unpack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, srcPtr, data);
+}
+
+de::SharedPtr<Image> Image::copyToLinearImage (vk::VkQueue					queue,
+											   vk::Allocator&				allocator,
+											   vk::VkImageLayout			layout,
+											   vk::VkOffset3D				offset,
+											   int							width,
+											   int							height,
+											   int							depth,
+											   unsigned int					mipLevel,
+											   unsigned int					arrayElement,
+											   vk::VkImageAspectFlagBits	aspect,
+											   vk::VkImageType				type)
+{
+	de::SharedPtr<Image> stagingResource;
+	{
+		vk::VkExtent3D stagingExtent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+		ImageCreateInfo stagingResourceCreateInfo(type, m_format, stagingExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+												  vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT);
+
+		stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+												vk::MemoryRequirement::HostVisible);
+
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+		vk::VkImageCopy region = { {aspect, mipLevel, arrayElement, 1}, offset, {aspect, 0, 0, 1}, zeroOffset, {(deUint32)width, (deUint32)height, (deUint32)depth} };
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, object(), layout, stagingResource->object(), vk::VK_IMAGE_LAYOUT_GENERAL, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		const vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+	return stagingResource;
+}
+
+void Image::uploadVolume(const tcu::ConstPixelBufferAccess&	access,
+						 vk::VkQueue						queue,
+						 vk::Allocator&						allocator,
+						 vk::VkImageLayout					layout,
+						 vk::VkOffset3D						offset,
+						 vk::VkImageAspectFlagBits			aspect,
+						 unsigned int						mipLevel,
+						 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_3D,
+		access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+		access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface (const tcu::ConstPixelBufferAccess&	access,
+						   vk::VkQueue							queue,
+						   vk::Allocator&						allocator,
+						   vk::VkImageLayout					layout,
+						   vk::VkOffset3D						offset,
+						   vk::VkImageAspectFlagBits			aspect,
+						   unsigned int							mipLevel,
+						   unsigned int							arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_2D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurface1D (const tcu::ConstPixelBufferAccess&	access,
+							 vk::VkQueue						queue,
+							 vk::Allocator&						allocator,
+							 vk::VkImageLayout					layout,
+							 vk::VkOffset3D						offset,
+							 vk::VkImageAspectFlagBits			aspect,
+							 unsigned int						mipLevel,
+							 unsigned int						arrayElement)
+{
+	if (aspect == vk::VK_IMAGE_ASPECT_COLOR_BIT)
+	{
+		upload(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, vk::VK_IMAGE_TYPE_1D,
+			access.getDataPtr());
+	}
+	if (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT || aspect == vk::VK_IMAGE_ASPECT_STENCIL_BIT)
+	{
+		uploadUsingBuffer(queue, allocator, layout, offset, access.getWidth(),
+			access.getHeight(), access.getDepth(), mipLevel, arrayElement, aspect, access.getDataPtr());
+	}
+}
+
+void Image::uploadSurfaceLinear (const tcu::ConstPixelBufferAccess&	access,
+								 vk::VkOffset3D						offset,
+								 int								width,
+								 int								height,
+								 int								depth,
+								 vk::VkImageAspectFlagBits			aspect,
+								 unsigned int						mipLevel,
+								 unsigned int						arrayElement)
+{
+	uploadLinear(offset, width, height, depth, mipLevel, arrayElement, aspect, access.getDataPtr());
+}
+
+void Image::upload (vk::VkQueue					queue,
+					vk::Allocator&				allocator,
+					vk::VkImageLayout			layout,
+					vk::VkOffset3D				offset,
+					int							width,
+					int							height,
+					int							depth,
+					unsigned int				mipLevel,
+					unsigned int				arrayElement,
+					vk::VkImageAspectFlagBits	aspect,
+					vk::VkImageType				type,
+					const void *				data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Image> stagingResource;
+	vk::VkExtent3D extent = {(deUint32)width, (deUint32)height, (deUint32)depth};
+	ImageCreateInfo stagingResourceCreateInfo(
+		type, m_format, extent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT,
+		vk::VK_IMAGE_TILING_LINEAR, vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+	stagingResource = Image::createAndAlloc(m_vk, m_device, stagingResourceCreateInfo, allocator,
+								vk::MemoryRequirement::HostVisible);
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+	stagingResource->uploadLinear(zeroOffset, width, height, depth, 0, 0, aspect, data);
+
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		transition2DImage(m_vk, *copyCmdBuffer, stagingResource->object(), aspect, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_GENERAL);
+
+		vk::VkImageCopy region = {{aspect, 0, 0, 1},
+									zeroOffset,
+									{aspect, mipLevel, arrayElement, 1},
+									offset,
+									{(deUint32)width, (deUint32)height, (deUint32)depth}};
+
+		m_vk.cmdCopyImage(*copyCmdBuffer, stagingResource->object(),
+								vk::VK_IMAGE_LAYOUT_GENERAL, object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		const vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadUsingBuffer (vk::VkQueue					queue,
+							   vk::Allocator&				allocator,
+							   vk::VkImageLayout			layout,
+							   vk::VkOffset3D				offset,
+							   int							width,
+							   int							height,
+							   int							depth,
+							   unsigned int					mipLevel,
+							   unsigned int					arrayElement,
+							   vk::VkImageAspectFlagBits	aspect,
+							   const void *					data)
+{
+	DE_ASSERT(layout == vk::VK_IMAGE_LAYOUT_GENERAL || layout == vk::VK_IMAGE_LAYOUT_UNDEFINED || layout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	de::SharedPtr<Buffer> stagingResource;
+	bool isCombinedType = isCombinedDepthStencilType(vk::mapVkFormat(m_format).type);
+	vk::VkDeviceSize bufferSize = 0;
+	if (!isCombinedType)
+		bufferSize = vk::mapVkFormat(m_format).getPixelSize() *width*height*depth;
+	if (isCombinedType)
+	{
+		int pixelSize = 0;
+		switch (m_format)
+		{
+			case vk::VK_FORMAT_D16_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 2 : 1;
+				break;
+			case  vk::VK_FORMAT_D32_SFLOAT_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 4 : 1;
+				break;
+			case vk::VK_FORMAT_X8_D24_UNORM_PACK32:
+			case vk::VK_FORMAT_D24_UNORM_S8_UINT:
+				pixelSize = (aspect == vk::VK_IMAGE_ASPECT_DEPTH_BIT) ? 3 : 1;
+			break;
+
+			default:
+				DE_FATAL("Not implemented");
+		}
+		bufferSize = pixelSize*width*height*depth;
+	}
+	BufferCreateInfo stagingBufferResourceCreateInfo(bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT | vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
+	stagingResource = Buffer::createAndAlloc(m_vk, m_device, stagingBufferResourceCreateInfo, allocator, vk::MemoryRequirement::HostVisible);
+	char* destPtr = reinterpret_cast<char*>(stagingResource->getBoundMemory().getHostPtr());
+	deMemcpy(destPtr, data, static_cast<size_t>(bufferSize));
+	vk::flushMappedMemoryRange(m_vk, m_device, stagingResource->getBoundMemory().getMemory(), stagingResource->getBoundMemory().getOffset(), bufferSize);
+	{
+		//todo [scygan] get proper queueFamilyIndex
+		CmdPoolCreateInfo copyCmdPoolCreateInfo(0);
+		vk::Unique<vk::VkCommandPool> copyCmdPool(vk::createCommandPool(m_vk, m_device, &copyCmdPoolCreateInfo));
+
+		const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			*copyCmdPool,										// VkCommandPool			commandPool;
+			vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,													// deUint32					bufferCount;
+		};
+		vk::Unique<vk::VkCommandBuffer> copyCmdBuffer(vk::allocateCommandBuffer(m_vk, m_device, &cmdBufferAllocateInfo));
+
+		CmdBufferBeginInfo beginInfo;
+		VK_CHECK(m_vk.beginCommandBuffer(*copyCmdBuffer, &beginInfo));
+
+		if (layout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
+		{
+			layout = vk::VK_IMAGE_LAYOUT_GENERAL;
+
+			vk::VkImageMemoryBarrier barrier;
+			barrier.sType = vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+			barrier.pNext = DE_NULL;
+			barrier.srcAccessMask = 0;
+			barrier.dstAccessMask = 0;
+			barrier.oldLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
+			barrier.newLayout = vk::VK_IMAGE_LAYOUT_GENERAL;
+			barrier.srcQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.dstQueueFamilyIndex = vk::VK_QUEUE_FAMILY_IGNORED;
+			barrier.image = object();
+
+			barrier.subresourceRange.aspectMask = aspect;
+			barrier.subresourceRange.baseMipLevel = 0;
+			barrier.subresourceRange.levelCount = m_levelCount;
+			barrier.subresourceRange.baseArrayLayer = 0;
+			barrier.subresourceRange.layerCount = m_layerCount;
+
+			m_vk.cmdPipelineBarrier(*copyCmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+									0, (const vk::VkMemoryBarrier*)DE_NULL,
+									0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+									1, &barrier);
+		}
+
+		vk::VkBufferImageCopy region =
+		{
+			0, 0, 0,
+			{ aspect, mipLevel, arrayElement, 1 },
+			offset,
+			{ (deUint32)width, (deUint32)height, (deUint32)depth }
+		};
+
+		m_vk.cmdCopyBufferToImage(*copyCmdBuffer, stagingResource->object(),
+			object(), layout, 1, &region);
+		VK_CHECK(m_vk.endCommandBuffer(*copyCmdBuffer));
+
+		vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&copyCmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		m_vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+		// TODO: make this less intrusive
+		VK_CHECK(m_vk.queueWaitIdle(queue));
+	}
+}
+
+void Image::uploadLinear (vk::VkOffset3D			offset,
+						  int						width,
+						  int						height,
+						  int						depth,
+						  unsigned int				mipLevel,
+						  unsigned int				arrayElement,
+						  vk::VkImageAspectFlagBits	aspect,
+						  const void *				data)
+{
+	vk::VkSubresourceLayout imageLayout;
+
+	vk::VkImageSubresource imageSubResource = {aspect, mipLevel, arrayElement};
+
+	m_vk.getImageSubresourceLayout(m_device, object(), &imageSubResource,
+													&imageLayout);
+
+	char* destPtr = reinterpret_cast<char*>(getBoundMemory().getHostPtr());
+
+	destPtr += imageLayout.offset + getPixelOffset(offset, imageLayout.rowPitch, imageLayout.depthPitch, mipLevel, arrayElement);
+
+	MemoryOp::pack(vk::mapVkFormat(m_format).getPixelSize(), width, height, depth,
+		imageLayout.rowPitch, imageLayout.depthPitch, data, destPtr);
+}
+
+vk::VkDeviceSize Image::getPixelOffset (vk::VkOffset3D		offset,
+										vk::VkDeviceSize	rowPitch,
+										vk::VkDeviceSize	depthPitch,
+										unsigned int		level,
+										unsigned int		layer)
+{
+	DE_ASSERT(level < m_levelCount);
+	DE_ASSERT(layer < m_layerCount);
+
+	vk::VkDeviceSize mipLevelSizes[32];
+	vk::VkDeviceSize mipLevelRectSizes[32];
+	tcu::IVec3 mipExtend
+	= tcu::IVec3(m_extent.width, m_extent.height, m_extent.depth);
+
+	vk::VkDeviceSize arrayElemSize = 0;
+	for (unsigned int i = 0; i < m_levelCount && (mipExtend[0] > 1 || mipExtend[1] > 1 || mipExtend[2] > 1); ++i)
+	{
+		// Rect size is just a 3D image size;
+		mipLevelSizes[i] = mipExtend[2] * depthPitch;
+
+		arrayElemSize += mipLevelSizes[0];
+
+		mipExtend = tcu::max(mipExtend / 2, tcu::IVec3(1));
+	}
+
+	vk::VkDeviceSize pixelOffset = layer * arrayElemSize;
+	for (size_t i = 0; i < level; ++i)
+	{
+		pixelOffset += mipLevelSizes[i];
+	}
+	pixelOffset += offset.z * mipLevelRectSizes[level];
+	pixelOffset += offset.y * rowPitch;
+	pixelOffset += offset.x;
+
+	return pixelOffset;
+}
+
+void Image::bindMemory (de::MovePtr<vk::Allocation> allocation)
+{
+	DE_ASSERT(allocation);
+	VK_CHECK(m_vk.bindImageMemory(m_device, *m_object, allocation->getMemory(), allocation->getOffset()));
+
+	DE_ASSERT(!m_allocation);
+	m_allocation = allocation;
+}
+
+de::SharedPtr<Image> Image::createAndAlloc(const vk::DeviceInterface&	vk,
+										   vk::VkDevice					device,
+										   const vk::VkImageCreateInfo& createInfo,
+										   vk::Allocator&				allocator,
+										   vk::MemoryRequirement		memoryRequirement)
+{
+	de::SharedPtr<Image> ret = create(vk, device, createInfo);
+
+	vk::VkMemoryRequirements imageRequirements = vk::getImageMemoryRequirements(vk, device, ret->object());
+	ret->bindMemory(allocator.allocate(imageRequirements, memoryRequirement));
+	return ret;
+}
+
+de::SharedPtr<Image> Image::create(const vk::DeviceInterface&	vk,
+								   vk::VkDevice					device,
+								   const vk::VkImageCreateInfo	&createInfo)
+{
+	return de::SharedPtr<Image>(new Image(vk, device, createInfo.format, createInfo.extent,
+								createInfo.mipLevels, createInfo.arrayLayers,
+								vk::createImage(vk, device, &createInfo)));
+}
+
+void transition2DImage (const vk::DeviceInterface&	vk,
+						vk::VkCommandBuffer				cmdBuffer,
+						vk::VkImage					image,
+						vk::VkImageAspectFlags		aspectMask,
+						vk::VkImageLayout			oldLayout,
+						vk::VkImageLayout			newLayout)
+{
+	vk::VkImageMemoryBarrier barrier;
+	barrier.sType					= vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
+	barrier.pNext					= DE_NULL;
+	barrier.srcAccessMask				= 0;
+	barrier.dstAccessMask				= 0;
+	barrier.oldLayout				= oldLayout;
+	barrier.newLayout				= newLayout;
+	barrier.srcQueueFamilyIndex		= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.dstQueueFamilyIndex	= vk::VK_QUEUE_FAMILY_IGNORED;
+	barrier.image					= image;
+	barrier.subresourceRange.aspectMask		= aspectMask;
+	barrier.subresourceRange.baseMipLevel	= 0;
+	barrier.subresourceRange.levelCount		= 1;
+	barrier.subresourceRange.baseArrayLayer = 0;
+	barrier.subresourceRange.layerCount		= 1;
+
+	vk.cmdPipelineBarrier(cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (vk::VkDependencyFlags)0,
+						  0, (const vk::VkMemoryBarrier*)DE_NULL,
+						  0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+						  1, &barrier);
+}
+
+void initialTransitionColor2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface &vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout)
+{
+	transition2DImage(vk, cmdBuffer, image, vk::VK_IMAGE_ASPECT_DEPTH_BIT | vk::VK_IMAGE_ASPECT_STENCIL_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, layout);
+}
+
+} // QueryPool
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.hpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.hpp
new file mode 100644
index 0000000..60759a8
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolImageObjectUtil.hpp
@@ -0,0 +1,287 @@
+#ifndef _VKTQUERYPOOLIMAGEOBJECTUTIL_HPP
+#define _VKTQUERYPOOLIMAGEOBJECTUTIL_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Image Object Util
+ *//*--------------------------------------------------------------------*/
+
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include "tcuTexture.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+class MemoryOp
+{
+public:
+	static void pack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+
+	static void unpack	(int					pixelSize,
+						 int					width,
+						 int					height,
+						 int					depth,
+						 vk::VkDeviceSize		rowPitchOrZero,
+						 vk::VkDeviceSize		depthPitchOrZero,
+						 const void *			srcBuffer,
+						 void *					destBuffer);
+};
+
+class Image
+{
+public:
+	static de::SharedPtr<Image> create				(const vk::DeviceInterface& vk, vk::VkDevice device, const vk::VkImageCreateInfo& createInfo);
+
+	static de::SharedPtr<Image> createAndAlloc		(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 const vk::VkImageCreateInfo&			createInfo,
+													 vk::Allocator&							allocator,
+													 vk::MemoryRequirement					memoryRequirement = vk::MemoryRequirement::Any);
+
+	tcu::ConstPixelBufferAccess readSurface			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurface1D		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readVolume			(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	tcu::ConstPixelBufferAccess readSurfaceLinear	(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						read				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 void *									data);
+
+	void						readUsingBuffer		(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						readLinear			(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 void *									data);
+
+	void						uploadVolume		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurface		 (const tcu::ConstPixelBufferAccess&	access,
+														vk::VkQueue							queue,
+														vk::Allocator&						allocator,
+														vk::VkImageLayout					layout,
+														vk::VkOffset3D						offset,
+														vk::VkImageAspectFlagBits			aspect,
+														unsigned int						mipLevel = 0,
+														unsigned int						arrayElement = 0);
+
+	void						uploadSurface1D		(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						uploadSurfaceLinear	(const tcu::ConstPixelBufferAccess&		access,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 vk::VkImageAspectFlagBits				aspect,
+													 unsigned int							mipLevel = 0,
+													 unsigned int							arrayElement = 0);
+
+	void						upload				(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type,
+													 const void *							data);
+
+	void						uploadUsingBuffer	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	void						uploadLinear		(vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 const void *							data);
+
+	de::SharedPtr<Image>		copyToLinearImage	(vk::VkQueue							queue,
+													 vk::Allocator&							allocator,
+													 vk::VkImageLayout						layout,
+													 vk::VkOffset3D							offset,
+													 int									width,
+													 int									height,
+													 int									depth,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement,
+													 vk::VkImageAspectFlagBits				aspect,
+													 vk::VkImageType						type);
+
+	const vk::VkFormat&			getFormat			(void) const											{ return m_format;		}
+	vk::VkImage					object				(void) const											{ return *m_object;		}
+	void						bindMemory			(de::MovePtr<vk::Allocation>			allocation);
+	vk::Allocation				getBoundMemory		(void) const											{ return *m_allocation; }
+
+private:
+	vk::VkDeviceSize			getPixelOffset		(vk::VkOffset3D							offset,
+													 vk::VkDeviceSize						rowPitch,
+													 vk::VkDeviceSize						depthPitch,
+													 unsigned int							mipLevel,
+													 unsigned int							arrayElement);
+
+								Image				(const vk::DeviceInterface&				vk,
+													 vk::VkDevice							device,
+													 vk::VkFormat							format,
+													 const vk::VkExtent3D&					extend,
+													 deUint32								levelCount,
+													 deUint32								layerCount,
+													 vk::Move<vk::VkImage>					object);
+
+	Image											(const Image& other);	// Not allowed!
+	Image&						operator=			(const Image& other);	// Not allowed!
+
+	de::MovePtr<vk::Allocation>	m_allocation;
+	vk::Unique<vk::VkImage>		m_object;
+
+	vk::VkFormat				m_format;
+	vk::VkExtent3D				m_extent;
+	deUint32					m_levelCount;
+	deUint32					m_layerCount;
+
+	std::vector<deUint8>		m_pixelAccessData;
+
+	const vk::DeviceInterface&	m_vk;
+	vk::VkDevice				m_device;
+};
+
+void transition2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageAspectFlags aspectMask, vk::VkImageLayout oldLayout, vk::VkImageLayout newLayout);
+
+void initialTransitionColor2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepth2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+void initialTransitionDepthStencil2DImage (const vk::DeviceInterface& vk, vk::VkCommandBuffer cmdBuffer, vk::VkImage image, vk::VkImageLayout layout);
+
+} //QueryPool
+} //vkt
+
+#endif // _VKTQUERYPOOLIMAGEOBJECTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.cpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.cpp
new file mode 100644
index 0000000..72270bc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.cpp
@@ -0,0 +1,1229 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Occlusion Query Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktQueryPoolOcclusionTests.hpp"
+
+#include "vktTestCase.hpp"
+
+#include "vktQueryPoolImageObjectUtil.hpp"
+#include "vktQueryPoolBufferObjectUtil.hpp"
+#include "vktQueryPoolCreateInfoUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vkPrograms.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResource.hpp"
+#include "tcuImageCompare.hpp"
+#include "tcuCommandLine.hpp"
+
+using namespace vkt::QueryPool;
+
+namespace
+{
+
+struct StateObjects
+{
+			StateObjects	(const vk::DeviceInterface&vk, vkt::Context &context, const int numVertices, vk::VkPrimitiveTopology primitive);
+	void	setVertices		(const vk::DeviceInterface&vk, std::vector<tcu::Vec4> vertices);
+
+	enum
+	{
+		WIDTH	= 128,
+		HEIGHT	= 128
+	};
+
+	vkt::Context &m_context;
+
+	vk::Move<vk::VkPipeline>		m_pipeline;
+	vk::Move<vk::VkPipelineLayout>	m_pipelineLayout;
+
+	de::SharedPtr<Image>			m_colorAttachmentImage, m_DepthImage;
+	vk::Move<vk::VkImageView>		m_attachmentView;
+	vk::Move<vk::VkImageView>		m_depthiew;
+
+	vk::Move<vk::VkRenderPass>		m_renderPass;
+	vk::Move<vk::VkFramebuffer>		m_framebuffer;
+
+	de::SharedPtr<Buffer>			m_vertexBuffer;
+
+	vk::VkFormat					m_colorAttachmentFormat;
+};
+
+StateObjects::StateObjects (const vk::DeviceInterface&vk, vkt::Context &context, const int numVertices, vk::VkPrimitiveTopology primitive)
+	: m_context(context)
+	, m_colorAttachmentFormat(vk::VK_FORMAT_R8G8B8A8_UNORM)
+
+{
+	vk::VkFormat		depthFormat = vk::VK_FORMAT_D16_UNORM;
+	const vk::VkDevice	device		= m_context.getDevice();
+
+	//attachment images and views
+	{
+		vk::VkExtent3D imageExtent =
+		{
+			WIDTH,	// width;
+			HEIGHT,	// height;
+			1		// depth;
+		};
+
+		const ImageCreateInfo colorImageCreateInfo(vk::VK_IMAGE_TYPE_2D, m_colorAttachmentFormat, imageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT, vk::VK_IMAGE_TILING_OPTIMAL,
+												   vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT);
+
+		m_colorAttachmentImage	= Image::createAndAlloc(vk, device, colorImageCreateInfo, m_context.getDefaultAllocator());
+
+		const ImageViewCreateInfo attachmentViewInfo(m_colorAttachmentImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, m_colorAttachmentFormat);
+		m_attachmentView		= vk::createImageView(vk, device, &attachmentViewInfo);
+
+		ImageCreateInfo depthImageCreateInfo(vk::VK_IMAGE_TYPE_2D, depthFormat, imageExtent, 1, 1, vk::VK_SAMPLE_COUNT_1_BIT, vk::VK_IMAGE_TILING_OPTIMAL,
+			vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT);
+
+		m_DepthImage			= Image::createAndAlloc(vk, device, depthImageCreateInfo, m_context.getDefaultAllocator());
+
+		// Construct a depth  view from depth image
+		const ImageViewCreateInfo depthViewInfo(m_DepthImage->object(), vk::VK_IMAGE_VIEW_TYPE_2D, depthFormat);
+		m_depthiew				= vk::createImageView(vk, device, &depthViewInfo);
+	}
+
+	{
+		// Renderpass and Framebuffer
+
+		RenderPassCreateInfo renderPassCreateInfo;
+		renderPassCreateInfo.addAttachment(AttachmentDescription(m_colorAttachmentFormat,									// format
+																	vk::VK_SAMPLE_COUNT_1_BIT,								// samples
+																	vk::VK_ATTACHMENT_LOAD_OP_CLEAR,						// loadOp
+																	vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,					// storeOp
+																	vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// stencilLoadOp
+																	vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,					// stencilLoadOp
+																	vk::VK_IMAGE_LAYOUT_GENERAL,							// initialLauout
+																	vk::VK_IMAGE_LAYOUT_GENERAL));							// finalLayout
+
+		renderPassCreateInfo.addAttachment(AttachmentDescription(depthFormat,												// format
+																 vk::VK_SAMPLE_COUNT_1_BIT,									// samples
+																 vk::VK_ATTACHMENT_LOAD_OP_CLEAR,							// loadOp
+																 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,						// storeOp
+																 vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,						// stencilLoadOp
+																 vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,						// stencilLoadOp
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,		// initialLauout
+																 vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL));	// finalLayout
+
+		const vk::VkAttachmentReference colorAttachmentReference =
+		{
+			0,															// attachment
+			vk::VK_IMAGE_LAYOUT_GENERAL									// layout
+		};
+
+		const vk::VkAttachmentReference depthAttachmentReference =
+		{
+			1,															// attachment
+			vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL		// layout
+		};
+
+		renderPassCreateInfo.addSubpass(SubpassDescription(vk::VK_PIPELINE_BIND_POINT_GRAPHICS,					// pipelineBindPoint
+														   0,													// flags
+														   0,													// inputCount
+														   DE_NULL,												// pInputAttachments
+														   1,													// colorCount
+														   &colorAttachmentReference,							// pColorAttachments
+														   DE_NULL,												// pResolveAttachments
+														   depthAttachmentReference,							// depthStencilAttachment
+														   0,													// preserveCount
+														   DE_NULL));											// preserveAttachments
+
+		m_renderPass = vk::createRenderPass(vk, device, &renderPassCreateInfo);
+
+		std::vector<vk::VkImageView> attachments(2);
+		attachments[0] = *m_attachmentView;
+		attachments[1] = *m_depthiew;
+
+		FramebufferCreateInfo framebufferCreateInfo(*m_renderPass, attachments, WIDTH, HEIGHT, 1);
+		m_framebuffer = vk::createFramebuffer(vk, device, &framebufferCreateInfo);
+	}
+
+	{
+		// Pipeline
+
+		vk::Unique<vk::VkShaderModule> vs(vk::createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
+		vk::Unique<vk::VkShaderModule> fs(vk::createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
+
+		const PipelineCreateInfo::ColorBlendState::Attachment attachmentState;
+
+		const PipelineLayoutCreateInfo pipelineLayoutCreateInfo;
+		m_pipelineLayout = vk::createPipelineLayout(vk, device, &pipelineLayoutCreateInfo);
+
+		const vk::VkVertexInputBindingDescription vf_binding_desc		=
+		{
+			0,																// binding;
+			4 * (deUint32)sizeof(float),									// stride;
+			vk::VK_VERTEX_INPUT_RATE_VERTEX									// inputRate
+		};
+
+		const vk::VkVertexInputAttributeDescription vf_attribute_desc	=
+		{
+			0,																// location;
+			0,																// binding;
+			vk::VK_FORMAT_R32G32B32A32_SFLOAT,								// format;
+			0																// offset;
+		};
+
+		const vk::VkPipelineVertexInputStateCreateInfo vf_info			=
+		{																	// sType;
+			vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	// pNext;
+			NULL,															// flags;
+			0u,																// vertexBindingDescriptionCount;
+			1,																// pVertexBindingDescriptions;
+			&vf_binding_desc,												// vertexAttributeDescriptionCount;
+			1,																// pVertexAttributeDescriptions;
+			&vf_attribute_desc
+		};
+
+		PipelineCreateInfo pipelineCreateInfo(*m_pipelineLayout, *m_renderPass, 0, 0);
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*vs, "main", vk::VK_SHADER_STAGE_VERTEX_BIT));
+		pipelineCreateInfo.addShader(PipelineCreateInfo::PipelineShaderStage(*fs, "main", vk::VK_SHADER_STAGE_FRAGMENT_BIT));
+		pipelineCreateInfo.addState(PipelineCreateInfo::InputAssemblerState(primitive));
+		pipelineCreateInfo.addState(PipelineCreateInfo::ColorBlendState(1, &attachmentState));
+		const vk::VkViewport viewport	=
+		{
+			0,		// float x;
+			0,		// float y;
+			WIDTH,	// float width;
+			HEIGHT,	// float height;
+			0.0f,	// float minDepth;
+			1.0f	// float maxDepth;
+		};
+
+		const vk::VkRect2D scissor		=
+		{
+			{
+				0,		// deInt32 x
+				0,		// deInt32 y
+			},		// VkOffset2D	offset;
+			{
+				WIDTH,	// deInt32 width;
+				HEIGHT,	// deInt32 height
+			},		// VkExtent2D	extent;
+		};
+		pipelineCreateInfo.addState(PipelineCreateInfo::ViewportState(1, std::vector<vk::VkViewport>(1, viewport), std::vector<vk::VkRect2D>(1, scissor)));
+		pipelineCreateInfo.addState(PipelineCreateInfo::DepthStencilState(true, true, vk::VK_COMPARE_OP_GREATER_OR_EQUAL));
+		pipelineCreateInfo.addState(PipelineCreateInfo::RasterizerState());
+		pipelineCreateInfo.addState(PipelineCreateInfo::MultiSampleState());
+		pipelineCreateInfo.addState(vf_info);
+		m_pipeline = vk::createGraphicsPipeline(vk, device, DE_NULL, &pipelineCreateInfo);
+	}
+
+	{
+		// Vertex buffer
+		const size_t kBufferSize = numVertices * sizeof(tcu::Vec4);
+		m_vertexBuffer = Buffer::createAndAlloc(vk, device, BufferCreateInfo(kBufferSize, vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+	}
+}
+
+void StateObjects::setVertices (const vk::DeviceInterface&vk, std::vector<tcu::Vec4> vertices)
+{
+	const vk::VkDevice device			= m_context.getDevice();
+
+	tcu::Vec4 *ptr = reinterpret_cast<tcu::Vec4*>(m_vertexBuffer->getBoundMemory().getHostPtr());
+	std::copy(vertices.begin(), vertices.end(), ptr);
+
+	vk::flushMappedMemoryRange(vk, device,	m_vertexBuffer->getBoundMemory().getMemory(), m_vertexBuffer->getBoundMemory().getOffset(),	vertices.size() * sizeof(vertices[0]));
+}
+
+enum OcclusionQueryResultSize
+{
+	RESULT_SIZE_64_BIT,
+	RESULT_SIZE_32_BIT,
+};
+
+enum OcclusionQueryWait
+{
+	WAIT_QUEUE,
+	WAIT_QUERY,
+	WAIT_NONE
+};
+
+enum OcclusionQueryResultsMode
+{
+	RESULTS_MODE_GET,
+	RESULTS_MODE_COPY
+};
+
+struct OcclusionQueryTestVector
+{
+	vk::VkQueryControlFlags		queryControlFlags;
+	OcclusionQueryResultSize	queryResultSize;
+	OcclusionQueryWait			queryWait;
+	OcclusionQueryResultsMode	queryResultsMode;
+	vk::VkDeviceSize			queryResultsStride;
+	bool						queryResultsAvailability;
+	vk::VkPrimitiveTopology		primitiveTopology;
+};
+
+class BasicOcclusionQueryTestInstance : public vkt::TestInstance
+{
+public:
+					BasicOcclusionQueryTestInstance		(vkt::Context &context, const OcclusionQueryTestVector&  testVector);
+					~BasicOcclusionQueryTestInstance	(void);
+private:
+	tcu::TestStatus	iterate								(void);
+
+	enum
+	{
+		NUM_QUERIES_IN_POOL				= 2,
+		QUERY_INDEX_CAPTURE_EMPTY		= 0,
+		QUERY_INDEX_CAPTURE_DRAWCALL	= 1,
+		NUM_VERTICES_IN_DRAWCALL		= 3
+	};
+
+	OcclusionQueryTestVector	m_testVector;
+	StateObjects*				m_stateObjects;
+	vk::VkQueryPool				m_queryPool;
+};
+
+BasicOcclusionQueryTestInstance::BasicOcclusionQueryTestInstance (vkt::Context &context, const OcclusionQueryTestVector&  testVector)
+	: TestInstance		(context)
+	, m_testVector		(testVector)
+{
+	DE_ASSERT(testVector.queryResultSize			== RESULT_SIZE_64_BIT
+			&& testVector.queryWait					== WAIT_QUEUE
+			&& testVector.queryResultsMode			== RESULTS_MODE_GET
+			&& testVector.queryResultsStride		== sizeof(deUint64)
+			&& testVector.queryResultsAvailability	== false
+			&& testVector.primitiveTopology			== vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST);
+
+	if ((m_testVector.queryControlFlags & vk::VK_QUERY_CONTROL_PRECISE_BIT) && !m_context.getDeviceFeatures().occlusionQueryPrecise)
+		throw tcu::NotSupportedError("Precise occlusion queries are not supported");
+
+	m_stateObjects = new StateObjects(m_context.getDeviceInterface(), m_context, NUM_VERTICES_IN_DRAWCALL, m_testVector.primitiveTopology);
+
+	const vk::VkDevice			device	= m_context.getDevice();
+	const vk::DeviceInterface&	vk		= m_context.getDeviceInterface();
+
+	const vk::VkQueryPoolCreateInfo queryPoolCreateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+		DE_NULL,
+		0u,
+		vk::VK_QUERY_TYPE_OCCLUSION,
+		NUM_QUERIES_IN_POOL,
+		0
+	};
+	VK_CHECK(vk.createQueryPool(device, &queryPoolCreateInfo, /*pAllocator*/ DE_NULL, &m_queryPool));
+
+	std::vector<tcu::Vec4> vertices(NUM_VERTICES_IN_DRAWCALL);
+	vertices[0] = tcu::Vec4(0.5, 0.5, 0.0, 1.0);
+	vertices[1] = tcu::Vec4(0.5, 0.0, 0.0, 1.0);
+	vertices[2] = tcu::Vec4(0.0, 0.5, 0.0, 1.0);
+	m_stateObjects->setVertices(vk, vertices);
+}
+
+BasicOcclusionQueryTestInstance::~BasicOcclusionQueryTestInstance (void)
+{
+	if (m_stateObjects)
+		delete m_stateObjects;
+
+	if (m_queryPool != DE_NULL)
+	{
+		const vk::VkDevice device		= m_context.getDevice();
+		const vk::DeviceInterface& vk	= m_context.getDeviceInterface();
+
+		vk.destroyQueryPool(device, m_queryPool, /*pAllocator*/ DE_NULL);
+	}
+}
+
+tcu::TestStatus	BasicOcclusionQueryTestInstance::iterate (void)
+{
+	tcu::TestLog &log				= m_context.getTestContext().getLog();
+	const vk::VkDevice device		= m_context.getDevice();
+	const vk::VkQueue queue			= m_context.getUniversalQueue();
+	const vk::DeviceInterface& vk	= m_context.getDeviceInterface();
+
+	const CmdPoolCreateInfo			cmdPoolCreateInfo	(m_context.getUniversalQueueFamilyIndex());
+	vk::Move<vk::VkCommandPool>		cmdPool				= vk::createCommandPool(vk, device, &cmdPoolCreateInfo);
+
+	const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		*cmdPool,											// VkCommandPool			commandPool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	vk::Unique<vk::VkCommandBuffer> cmdBuffer			(vk::allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo));
+	const CmdBufferBeginInfo		beginInfo			(0u);
+
+	vk.beginCommandBuffer(*cmdBuffer, &beginInfo);
+
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_colorAttachmentImage->object(), vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_DepthImage->object(), vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+
+	std::vector<vk::VkClearValue> renderPassClearValues(2);
+	deMemset(&renderPassClearValues[0], 0, static_cast<int>(renderPassClearValues.size()) * sizeof(vk::VkClearValue));
+
+	const vk::VkRect2D renderArea =
+	{
+		{ 0,					0 },
+		{ StateObjects::WIDTH,	StateObjects::HEIGHT }
+	};
+
+	RenderPassBeginInfo renderPassBegin(*m_stateObjects->m_renderPass, *m_stateObjects->m_framebuffer, renderArea, renderPassClearValues);
+
+	vk.cmdResetQueryPool(*cmdBuffer, m_queryPool, 0, NUM_QUERIES_IN_POOL);
+
+	vk.cmdBeginRenderPass(*cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+
+	vk.cmdBindPipeline(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_stateObjects->m_pipeline);
+
+	vk::VkBuffer vertexBuffer = m_stateObjects->m_vertexBuffer->object();
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+	vk.cmdBeginQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_EMPTY, m_testVector.queryControlFlags);
+	vk.cmdEndQuery(*cmdBuffer, m_queryPool,	QUERY_INDEX_CAPTURE_EMPTY);
+
+	vk.cmdBeginQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_DRAWCALL, m_testVector.queryControlFlags);
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_DRAWCALL, 1, 0, 0);
+	vk.cmdEndQuery(*cmdBuffer, m_queryPool,	QUERY_INDEX_CAPTURE_DRAWCALL);
+
+	vk.cmdEndRenderPass(*cmdBuffer);
+
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_colorAttachmentImage->object(), vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	vk.endCommandBuffer(*cmdBuffer);
+
+	// Submit command buffer
+	const vk::VkSubmitInfo submitInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,							// const void*				pNext;
+		0,									// deUint32					waitSemaphoreCount;
+		DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1,									// deUint32					commandBufferCount;
+		&cmdBuffer.get(),					// const VkCommandBuffer*	pCommandBuffers;
+		0,									// deUint32					signalSemaphoreCount;
+		DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+	};
+	vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+
+	VK_CHECK(vk.queueWaitIdle(queue));
+
+	deUint64 queryResults[NUM_QUERIES_IN_POOL] = { 0 };
+	size_t queryResultsSize		= sizeof(queryResults);
+
+	vk::VkResult queryResult	= vk.getQueryPoolResults(device, m_queryPool, 0, NUM_QUERIES_IN_POOL, queryResultsSize, queryResults, sizeof(queryResults[0]), vk::VK_QUERY_RESULT_64_BIT);
+
+	if (queryResult == vk::VK_NOT_READY)
+	{
+		TCU_FAIL("Query result not avaliable, but vkWaitIdle() was called.");
+	}
+
+	VK_CHECK(queryResult);
+
+	log << tcu::TestLog::Section("OcclusionQueryResults",
+		"Occlusion query results");
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(queryResults); ++ndx)
+	{
+		log << tcu::TestLog::Message << "query[ slot == " << ndx
+			<< "] result == " << queryResults[ndx] << tcu::TestLog::EndMessage;
+	}
+
+	bool passed = true;
+
+	for (int queryNdx = 0; queryNdx < DE_LENGTH_OF_ARRAY(queryResults); ++queryNdx)
+	{
+
+		deUint64 expectedValue;
+
+		switch (queryNdx)
+		{
+			case QUERY_INDEX_CAPTURE_EMPTY:
+				expectedValue = 0;
+				break;
+			case QUERY_INDEX_CAPTURE_DRAWCALL:
+				expectedValue = NUM_VERTICES_IN_DRAWCALL;
+				break;
+		}
+
+		if ((m_testVector.queryControlFlags & vk::VK_QUERY_CONTROL_PRECISE_BIT) || expectedValue == 0)
+		{
+			// require precise value
+			if (queryResults[queryNdx] != expectedValue)
+			{
+				log << tcu::TestLog::Message << "vkGetQueryPoolResults returned "
+					"wrong value of query for index "
+					<< queryNdx << ", expected " << expectedValue << ", got "
+					<< queryResults[0] << "." << tcu::TestLog::EndMessage;
+				passed = false;
+			}
+		}
+		else
+		{
+			// require imprecize value > 0
+			if (queryResults[queryNdx] == 0)
+			{
+				log << tcu::TestLog::Message << "vkGetQueryPoolResults returned "
+					"wrong value of query for index "
+					<< queryNdx << ", expected any non-zero value, got "
+					<< queryResults[0] << "." << tcu::TestLog::EndMessage;
+				passed = false;
+			}
+		}
+	}
+	log << tcu::TestLog::EndSection;
+
+	const vk::VkOffset3D zeroOffset = { 0, 0, 0 };
+
+	tcu::ConstPixelBufferAccess resultImageAccess = m_stateObjects->m_colorAttachmentImage->readSurface(
+				queue, m_context.getDefaultAllocator(), vk::VK_IMAGE_LAYOUT_GENERAL,
+				zeroOffset,  StateObjects::HEIGHT, StateObjects::WIDTH, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	log << tcu::TestLog::Image("Result", "Result", resultImageAccess);
+
+	if (passed)
+	{
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Query result verification passed");
+	}
+	return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Query result verification failed");
+}
+
+class OcclusionQueryTestInstance : public vkt::TestInstance
+{
+public:
+	OcclusionQueryTestInstance		(vkt::Context &context, const OcclusionQueryTestVector& testVector);
+	~OcclusionQueryTestInstance		(void);
+private:
+	tcu::TestStatus					iterate							(void);
+
+	vk::Move<vk::VkCommandBuffer>	recordRender					(vk::VkCommandPool commandPool);
+	vk::Move<vk::VkCommandBuffer>	recordCopyResults				(vk::VkCommandPool commandPool);
+
+	void							captureResults					(deUint64*			retResults,	deUint64*		retAvailability,	bool	allowNotReady);
+	void							logResults						(const deUint64*	results,	const deUint64* availability);
+	bool							validateResults					(const deUint64*	results,	const deUint64* availability,		bool	allowUnavailable,	vk::VkPrimitiveTopology primitiveTopology);
+	void							logRenderTarget					(void);
+
+	enum
+	{
+		NUM_QUERIES_IN_POOL							= 3,
+		QUERY_INDEX_CAPTURE_ALL						= 0,
+		QUERY_INDEX_CAPTURE_PARTIALLY_OCCLUDED		= 1,
+		QUERY_INDEX_CAPTURE_OCCLUDED				= 2
+	};
+	enum
+	{
+		NUM_VERTICES_IN_DRAWCALL					= 3,
+		NUM_VERTICES_IN_PARTIALLY_OCCLUDED_DRAWCALL	= 3,
+		NUM_VERTICES_IN_OCCLUDER_DRAWCALL			= 3,
+		NUM_VERTICES								= NUM_VERTICES_IN_DRAWCALL + NUM_VERTICES_IN_PARTIALLY_OCCLUDED_DRAWCALL + NUM_VERTICES_IN_OCCLUDER_DRAWCALL
+	};
+	enum
+	{
+		START_VERTEX								= 0,
+		START_VERTEX_PARTIALLY_OCCLUDED				= START_VERTEX + NUM_VERTICES_IN_DRAWCALL,
+		START_VERTEX_OCCLUDER						= START_VERTEX_PARTIALLY_OCCLUDED + NUM_VERTICES_IN_PARTIALLY_OCCLUDED_DRAWCALL
+	};
+
+	OcclusionQueryTestVector		m_testVector;
+
+	const vk::VkQueryResultFlags	m_queryResultFlags;
+
+	StateObjects*					m_stateObjects;
+	vk::VkQueryPool					m_queryPool;
+	de::SharedPtr<Buffer>			m_queryPoolResultsBuffer;
+
+	vk::Move<vk::VkCommandPool>		m_commandPool;
+	vk::Move<vk::VkCommandBuffer>	m_renderCommandBuffer;
+	vk::Move<vk::VkCommandBuffer>	m_copyResultsCommandBuffer;
+};
+
+OcclusionQueryTestInstance::OcclusionQueryTestInstance (vkt::Context &context, const OcclusionQueryTestVector& testVector)
+	: vkt::TestInstance		(context)
+	, m_testVector			(testVector)
+	, m_queryResultFlags	((m_testVector.queryWait == WAIT_QUERY					? vk::VK_QUERY_RESULT_WAIT_BIT				: 0)
+							| (m_testVector.queryResultSize == RESULT_SIZE_64_BIT	? vk::VK_QUERY_RESULT_64_BIT				: 0)
+							| (m_testVector.queryResultsAvailability				? vk::VK_QUERY_RESULT_WITH_AVAILABILITY_BIT	: 0))
+{
+	const vk::VkDevice			device				= m_context.getDevice();
+	const vk::DeviceInterface&	vk					= m_context.getDeviceInterface();
+
+	if ((m_testVector.queryControlFlags & vk::VK_QUERY_CONTROL_PRECISE_BIT) && !m_context.getDeviceFeatures().occlusionQueryPrecise)
+		throw tcu::NotSupportedError("Precise occlusion queries are not supported");
+
+	m_stateObjects  = new StateObjects(m_context.getDeviceInterface(), m_context, NUM_VERTICES_IN_DRAWCALL + NUM_VERTICES_IN_PARTIALLY_OCCLUDED_DRAWCALL + NUM_VERTICES_IN_OCCLUDER_DRAWCALL, m_testVector.primitiveTopology);
+
+	const vk::VkQueryPoolCreateInfo queryPoolCreateInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+		DE_NULL,
+		0u,
+		vk::VK_QUERY_TYPE_OCCLUSION,
+		NUM_QUERIES_IN_POOL,
+		0
+	};
+
+	VK_CHECK(vk.createQueryPool(device, &queryPoolCreateInfo, /*pAllocator*/ DE_NULL, &m_queryPool));
+
+	if (m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+	{
+		const vk::VkDeviceSize	resultsBufferSize			= m_testVector.queryResultsStride * NUM_QUERIES_IN_POOL;
+								m_queryPoolResultsBuffer	= Buffer::createAndAlloc(vk, device, BufferCreateInfo(resultsBufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT), m_context.getDefaultAllocator(), vk::MemoryRequirement::HostVisible);
+	}
+
+	const CmdPoolCreateInfo	cmdPoolCreateInfo		(m_context.getUniversalQueueFamilyIndex());
+							m_commandPool			= vk::createCommandPool(vk, device, &cmdPoolCreateInfo);
+							m_renderCommandBuffer	= recordRender(*m_commandPool);
+
+	if (m_testVector.queryWait == WAIT_QUEUE && m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+	{
+		m_copyResultsCommandBuffer = recordCopyResults(*m_commandPool);
+	}
+}
+
+OcclusionQueryTestInstance::~OcclusionQueryTestInstance (void)
+{
+	const vk::VkDevice device = m_context.getDevice();
+
+	if (m_stateObjects)
+		delete m_stateObjects;
+
+	if (m_queryPool != DE_NULL)
+	{
+		const vk::DeviceInterface& vk = m_context.getDeviceInterface();
+		vk.destroyQueryPool(device, m_queryPool, /*pAllocator*/ DE_NULL);
+	}
+}
+
+tcu::TestStatus OcclusionQueryTestInstance::iterate (void)
+{
+	const vk::VkQueue			queue		= m_context.getUniversalQueue();
+	const vk::DeviceInterface&	vk			= m_context.getDeviceInterface();
+	tcu::TestLog&				log			= m_context.getTestContext().getLog();
+	std::vector<tcu::Vec4>		vertices	(NUM_VERTICES);
+
+	// 1st triangle
+	vertices[START_VERTEX + 0] = tcu::Vec4( 0.5,  0.5, 0.5, 1.0);
+	vertices[START_VERTEX + 1] = tcu::Vec4( 0.5, -0.5, 0.5, 1.0);
+	vertices[START_VERTEX + 2] = tcu::Vec4(-0.5,  0.5, 0.5, 1.0);
+	// 2nd triangle - partially occluding the scene
+	vertices[START_VERTEX_PARTIALLY_OCCLUDED + 0] = tcu::Vec4(-0.5, -0.5, 1.0, 1.0);
+	vertices[START_VERTEX_PARTIALLY_OCCLUDED + 1] = tcu::Vec4( 0.5, -0.5, 1.0, 1.0);
+	vertices[START_VERTEX_PARTIALLY_OCCLUDED + 2] = tcu::Vec4(-0.5,  0.5, 1.0, 1.0);
+	// 3nd triangle - fully occluding the scene
+	vertices[START_VERTEX_OCCLUDER + 0] = tcu::Vec4( 0.5,  0.5, 1.0, 1.0);
+	vertices[START_VERTEX_OCCLUDER + 1] = tcu::Vec4( 0.5, -0.5, 1.0, 1.0);
+	vertices[START_VERTEX_OCCLUDER + 2] = tcu::Vec4(-0.5,  0.5, 1.0, 1.0);
+
+	m_stateObjects->setVertices(vk, vertices);
+
+	{
+		const vk::VkSubmitInfo submitInfo =
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,							// const void*				pNext;
+			0,									// deUint32					waitSemaphoreCount;
+			DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1,									// deUint32					commandBufferCount;
+			&m_renderCommandBuffer.get(),		// const VkCommandBuffer*	pCommandBuffers;
+			0,									// deUint32					signalSemaphoreCount;
+			DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+		};
+		vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+	}
+
+	if (m_testVector.queryWait == WAIT_QUEUE)
+	{
+		VK_CHECK(vk.queueWaitIdle(queue));
+
+		if (m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+		{
+			// In case of WAIT_QUEUE test variant, the previously submitted m_renderCommandBuffer did not
+			// contain vkCmdCopyQueryResults, so additional cmd buffer is needed.
+
+			// In the case of WAIT_NONE or WAIT_QUERY, vkCmdCopyQueryResults is stored in m_renderCommandBuffer.
+
+			const vk::VkSubmitInfo submitInfo =
+			{
+				vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+				DE_NULL,							// const void*				pNext;
+				0,									// deUint32					waitSemaphoreCount;
+				DE_NULL,							// const VkSemaphore*		pWaitSemaphores;
+				(const vk::VkPipelineStageFlags*)DE_NULL,
+				1,									// deUint32					commandBufferCount;
+				&m_copyResultsCommandBuffer.get(),	// const VkCommandBuffer*	pCommandBuffers;
+				0,									// deUint32					signalSemaphoreCount;
+				DE_NULL								// const VkSemaphore*		pSignalSemaphores;
+			};
+			vk.queueSubmit(queue, 1, &submitInfo, DE_NULL);
+		}
+	}
+
+	if (m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+	{
+		// In case of vkCmdCopyQueryResults is used, test must always wait for it
+		// to complete before we can read the result buffer.
+
+		VK_CHECK(vk.queueWaitIdle(queue));
+	}
+
+	deUint64 queryResults		[NUM_QUERIES_IN_POOL];
+	deUint64 queryAvailability	[NUM_QUERIES_IN_POOL];
+
+	// Allow not ready results only if nobody waited before getting the query results
+	bool	allowNotReady		= (m_testVector.queryWait == WAIT_NONE);
+
+	captureResults(queryResults, queryAvailability, allowNotReady);
+
+	log << tcu::TestLog::Section("OcclusionQueryResults", "Occlusion query results");
+
+	logResults(queryResults, queryAvailability);
+	bool passed = validateResults(queryResults, queryAvailability, allowNotReady, m_testVector.primitiveTopology);
+
+	log << tcu::TestLog::EndSection;
+
+	logRenderTarget();
+
+		if (passed)
+	{
+		return tcu::TestStatus(QP_TEST_RESULT_PASS, "Query result verification passed");
+	}
+	return tcu::TestStatus(QP_TEST_RESULT_FAIL, "Query result verification failed");
+}
+
+vk::Move<vk::VkCommandBuffer> OcclusionQueryTestInstance::recordRender (vk::VkCommandPool cmdPool)
+{
+	const vk::VkDevice				device		= m_context.getDevice();
+	const vk::DeviceInterface&		vk			= m_context.getDeviceInterface();
+
+	const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		cmdPool,											// VkCommandPool			commandPool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	vk::Move<vk::VkCommandBuffer>	cmdBuffer	(vk::allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo));
+	CmdBufferBeginInfo				beginInfo	(0u);
+
+	vk.beginCommandBuffer(*cmdBuffer, &beginInfo);
+
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_colorAttachmentImage->object(), vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_DepthImage->object(), vk::VK_IMAGE_ASPECT_DEPTH_BIT, vk::VK_IMAGE_LAYOUT_UNDEFINED, vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL);
+
+	std::vector<vk::VkClearValue>	renderPassClearValues(2);
+	deMemset(&renderPassClearValues[0], 0, static_cast<int>(renderPassClearValues.size()) * sizeof(vk::VkClearValue));
+
+	const vk::VkRect2D renderArea =
+	{
+		{ 0,					0 },
+		{ StateObjects::WIDTH,	StateObjects::HEIGHT }
+	};
+
+	RenderPassBeginInfo renderPassBegin(*m_stateObjects->m_renderPass, *m_stateObjects->m_framebuffer, renderArea, renderPassClearValues);
+
+	vk.cmdResetQueryPool(*cmdBuffer, m_queryPool, 0, NUM_QUERIES_IN_POOL);
+
+	vk.cmdBeginRenderPass(*cmdBuffer, &renderPassBegin, vk::VK_SUBPASS_CONTENTS_INLINE);
+
+	vk.cmdBindPipeline(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS,	*m_stateObjects->m_pipeline);
+
+	vk::VkBuffer vertexBuffer = m_stateObjects->m_vertexBuffer->object();
+	const vk::VkDeviceSize vertexBufferOffset = 0;
+	vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &vertexBuffer, &vertexBufferOffset);
+
+	// Draw un-occluded geometry
+	vk.cmdBeginQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_ALL, m_testVector.queryControlFlags);
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_DRAWCALL, 1, START_VERTEX, 0);
+	vk.cmdEndQuery(*cmdBuffer, m_queryPool,	QUERY_INDEX_CAPTURE_ALL);
+
+	// Partially occlude geometry
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_PARTIALLY_OCCLUDED_DRAWCALL, 1, START_VERTEX_PARTIALLY_OCCLUDED, 0);
+
+	// Draw partially-occluded geometry
+	vk.cmdBeginQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_PARTIALLY_OCCLUDED, m_testVector.queryControlFlags);
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_DRAWCALL, 1, START_VERTEX, 0);
+	vk.cmdEndQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_PARTIALLY_OCCLUDED);
+
+	// Occlude geometry
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_OCCLUDER_DRAWCALL, 1, START_VERTEX_OCCLUDER, 0);
+
+	// Draw occluded geometry
+	vk.cmdBeginQuery(*cmdBuffer, m_queryPool, QUERY_INDEX_CAPTURE_OCCLUDED, m_testVector.queryControlFlags);
+	vk.cmdDraw(*cmdBuffer, NUM_VERTICES_IN_DRAWCALL, 1, START_VERTEX, 0);
+	vk.cmdEndQuery(*cmdBuffer, m_queryPool,	QUERY_INDEX_CAPTURE_OCCLUDED);
+
+	if (m_testVector.queryWait != WAIT_QUEUE )
+	{
+		//For WAIT_QUEUE another cmdBuffer is issued with cmdCopyQueryPoolResults
+		if (m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+		{
+			vk.cmdCopyQueryPoolResults(*cmdBuffer, m_queryPool, 0, NUM_QUERIES_IN_POOL, m_queryPoolResultsBuffer->object(), /*dstOffset*/ 0, m_testVector.queryResultsStride, m_queryResultFlags);
+		}
+	}
+
+	vk.cmdEndRenderPass(*cmdBuffer);
+
+	transition2DImage(vk, *cmdBuffer, m_stateObjects->m_colorAttachmentImage->object(), vk::VK_IMAGE_ASPECT_COLOR_BIT, vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
+
+	vk.endCommandBuffer(*cmdBuffer);
+
+	return cmdBuffer;
+}
+
+vk::Move<vk::VkCommandBuffer> OcclusionQueryTestInstance::recordCopyResults (vk::VkCommandPool cmdPool)
+{
+	const vk::VkDevice				device		= m_context.getDevice();
+	const vk::DeviceInterface&		vk			= m_context.getDeviceInterface();
+
+	const vk::VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		cmdPool,											// VkCommandPool			commandPool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	vk::Move<vk::VkCommandBuffer>	cmdBuffer	(vk::allocateCommandBuffer(vk, device, &cmdBufferAllocateInfo));
+	const CmdBufferBeginInfo		beginInfo	(0u);
+
+	vk.beginCommandBuffer(*cmdBuffer, &beginInfo);
+	vk.cmdCopyQueryPoolResults(*cmdBuffer, m_queryPool, 0, NUM_QUERIES_IN_POOL, m_queryPoolResultsBuffer->object(), /*dstOffset*/ 0, m_testVector.queryResultsStride, m_queryResultFlags);
+	vk.endCommandBuffer(*cmdBuffer);
+
+	return cmdBuffer;
+}
+
+void OcclusionQueryTestInstance::captureResults (deUint64* retResults, deUint64* retAvailAbility, bool allowNotReady)
+{
+
+	const vk::VkDevice			device			= m_context.getDevice();
+	const vk::DeviceInterface&	vk				= m_context.getDeviceInterface();
+	std::vector<deUint8>		resultsBuffer	(static_cast<size_t>(m_testVector.queryResultsStride) * NUM_QUERIES_IN_POOL);
+
+	if (m_testVector.queryResultsMode == RESULTS_MODE_GET)
+	{
+		const vk::VkResult queryResult = vk.getQueryPoolResults(device, m_queryPool, 0, NUM_QUERIES_IN_POOL, resultsBuffer.size(), &resultsBuffer[0], m_testVector.queryResultsStride, m_queryResultFlags);
+		if (queryResult == vk::VK_NOT_READY && !allowNotReady)
+		{
+			TCU_FAIL("getQueryPoolResults returned VK_NOT_READY, but results should be already available.");
+		}
+		else
+		{
+			VK_CHECK(queryResult);
+		}
+	}
+	else if (m_testVector.queryResultsMode == RESULTS_MODE_COPY)
+	{
+		const vk::Allocation& allocation = m_queryPoolResultsBuffer->getBoundMemory();
+		const void* allocationData = allocation.getHostPtr();
+
+		vk::invalidateMappedMemoryRange(vk, device, allocation.getMemory(), allocation.getOffset(), resultsBuffer.size());
+
+		deMemcpy(&resultsBuffer[0], allocationData, resultsBuffer.size());
+	}
+
+	for (int queryNdx = 0; queryNdx < NUM_QUERIES_IN_POOL; queryNdx++)
+	{
+		const void* srcPtr = &resultsBuffer[queryNdx * static_cast<size_t>(m_testVector.queryResultsStride)];
+		if (m_testVector.queryResultSize == RESULT_SIZE_32_BIT)
+		{
+			const deUint32* srcPtrTyped = static_cast<const deUint32*>(srcPtr);
+			retResults[queryNdx]		= *srcPtrTyped;
+			if (m_testVector.queryResultsAvailability)
+			{
+				retAvailAbility[queryNdx] = *(srcPtrTyped + 1);
+			}
+		}
+		else if (m_testVector.queryResultSize == RESULT_SIZE_64_BIT)
+		{
+			const deUint64* srcPtrTyped = static_cast<const deUint64*>(srcPtr);
+			retResults[queryNdx]		= *srcPtrTyped;
+
+			if (m_testVector.queryResultsAvailability)
+			{
+				if (m_testVector.queryResultsAvailability)
+				{
+					retAvailAbility[queryNdx] = *(srcPtrTyped + 1);
+				}
+			}
+		}
+		else
+		{
+			TCU_FAIL("Wrong m_testVector.queryResultSize");
+		}
+	}
+}
+
+void OcclusionQueryTestInstance::logResults (const deUint64* results, const deUint64* availability)
+{
+	tcu::TestLog& log = m_context.getTestContext().getLog();
+
+	for (int ndx = 0; ndx < NUM_QUERIES_IN_POOL; ++ndx)
+	{
+		if (!m_testVector.queryResultsAvailability)
+		{
+			log << tcu::TestLog::Message << "query[ slot == " << ndx << "] result == " << results[ndx] << tcu::TestLog::EndMessage;
+		}
+		else
+		{
+			log << tcu::TestLog::Message << "query[ slot == " << ndx << "] result == " << results[ndx] << ", availability	== " << availability[ndx] << tcu::TestLog::EndMessage;
+		}
+	}
+}
+
+bool OcclusionQueryTestInstance::validateResults (const deUint64* results , const deUint64* availability, bool allowUnavailable, vk::VkPrimitiveTopology primitiveTopology)
+{
+	bool passed			= true;
+	tcu::TestLog& log	= m_context.getTestContext().getLog();
+
+	for (int queryNdx = 0; queryNdx < NUM_QUERIES_IN_POOL; ++queryNdx)
+	{
+		deUint64 expectedValueMin = 0;
+		deUint64 expectedValueMax = 0;
+
+		if (m_testVector.queryResultsAvailability && availability[queryNdx] == 0)
+		{
+			// query result was not available
+			if (!allowUnavailable)
+			{
+				log << tcu::TestLog::Message << "query results availability was 0 for index "
+					<< queryNdx << ", expected any value greater than 0." << tcu::TestLog::EndMessage;
+				passed = false;
+				continue;
+			}
+		}
+		else
+		{
+			// query is available, so expect proper result values
+			if (primitiveTopology == vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
+			{
+				switch (queryNdx)
+				{
+					case QUERY_INDEX_CAPTURE_OCCLUDED:
+						expectedValueMin = 0;
+						expectedValueMax = 0;
+						break;
+					case QUERY_INDEX_CAPTURE_PARTIALLY_OCCLUDED:
+						expectedValueMin = 1;
+						expectedValueMax = 1;
+						break;
+					case QUERY_INDEX_CAPTURE_ALL:
+						expectedValueMin = NUM_VERTICES_IN_DRAWCALL;
+						expectedValueMax = NUM_VERTICES_IN_DRAWCALL;
+						break;
+				}
+			}
+			else if (primitiveTopology == vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST)
+			{
+				switch (queryNdx)
+				{
+					case QUERY_INDEX_CAPTURE_OCCLUDED:
+						expectedValueMin = 0;
+						expectedValueMax = 0;
+						break;
+					case QUERY_INDEX_CAPTURE_PARTIALLY_OCCLUDED:
+					case QUERY_INDEX_CAPTURE_ALL:
+						{
+							const int primWidth		= StateObjects::WIDTH  / 2;
+							const int primHeight	= StateObjects::HEIGHT / 2;
+							const int primArea		= primWidth * primHeight / 2;
+							expectedValueMin		= (int)(0.97f * primArea);
+							expectedValueMax		= (int)(1.03f * primArea);
+						}
+				}
+			}
+			else
+			{
+				TCU_FAIL("Unsupported primitive topology");
+			}
+		}
+
+		if ((m_testVector.queryControlFlags & vk::VK_QUERY_CONTROL_PRECISE_BIT) || (expectedValueMin == 0 && expectedValueMax == 0))
+		{
+			// require precise value
+			if (results[queryNdx] < expectedValueMin || results[queryNdx] > expectedValueMax)
+			{
+				log << tcu::TestLog::Message << "wrong value of query for index "
+					<< queryNdx << ", expected the value minimum of " << expectedValueMin << ", maximum of " << expectedValueMax << " got "
+					<< results[queryNdx] << "." << tcu::TestLog::EndMessage;
+				passed = false;
+			}
+		}
+		else
+		{
+			// require imprecise value greater than 0
+			if (results[queryNdx] == 0)
+			{
+				log << tcu::TestLog::Message << "wrong value of query for index "
+					<< queryNdx << ", expected any non-zero value, got "
+					<< results[queryNdx] << "." << tcu::TestLog::EndMessage;
+				passed = false;
+			}
+		}
+	}
+	return passed;
+}
+
+void OcclusionQueryTestInstance::logRenderTarget (void)
+{
+	tcu::TestLog&			log						= m_context.getTestContext().getLog();
+	const vk::VkQueue		queue					= m_context.getUniversalQueue();
+	const vk::VkOffset3D	zeroOffset				= { 0, 0, 0 };
+	tcu::ConstPixelBufferAccess resultImageAccess	= m_stateObjects->m_colorAttachmentImage->readSurface(
+		queue, m_context.getDefaultAllocator(), vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+		zeroOffset, StateObjects::HEIGHT, StateObjects::WIDTH, vk::VK_IMAGE_ASPECT_COLOR_BIT);
+
+	log << tcu::TestLog::Image("Result", "Result", resultImageAccess);
+}
+
+template<class Instance>
+class QueryPoolOcclusionTest : public vkt::TestCase
+{
+public:
+	QueryPoolOcclusionTest (tcu::TestContext &context, const char *name, const char *description, const OcclusionQueryTestVector& testVector)
+		: TestCase			(context, name, description)
+		, m_testVector		(testVector)
+	{
+	}
+private:
+	vkt::TestInstance* createInstance (vkt::Context& context) const
+	{
+		return new Instance(context, m_testVector);
+	}
+
+	void initPrograms(vk::SourceCollections& programCollection) const
+	{
+		programCollection.glslSources.add("frag") << glu::FragmentSource("#version 400\n"
+																	   "layout(location = 0) out vec4 out_FragColor;\n"
+																	   "void main()\n"
+																	   "{\n"
+																	   "	out_FragColor = vec4(0.07, 0.48, 0.75, 1.0);\n"
+																	   "}\n");
+
+		programCollection.glslSources.add("vert") << glu::VertexSource("#version 430\n"
+																		 "layout(location = 0) in vec4 in_Postion;\n"
+																		 "void main() {\n"
+																		 "	gl_Position  = in_Postion;\n"
+																		 "	gl_PointSize = 1.0;\n"
+																		 "}\n");
+	}
+
+	OcclusionQueryTestVector m_testVector;
+};
+
+} //anonymous
+
+namespace vkt
+{
+
+namespace QueryPool
+{
+
+QueryPoolOcclusionTests::QueryPoolOcclusionTests (tcu::TestContext &testCtx)
+	: TestCaseGroup(testCtx, "occlusion_query", "Tests for occlusion queries")
+{
+	/* Left blank on purpose */
+}
+
+QueryPoolOcclusionTests::~QueryPoolOcclusionTests (void)
+{
+	/* Left blank on purpose */
+}
+
+void QueryPoolOcclusionTests::init (void)
+{
+	OcclusionQueryTestVector baseTestVector;
+	baseTestVector.queryControlFlags		= 0;
+	baseTestVector.queryResultSize			= RESULT_SIZE_64_BIT;
+	baseTestVector.queryWait				= WAIT_QUEUE;
+	baseTestVector.queryResultsMode			= RESULTS_MODE_GET;
+	baseTestVector.queryResultsStride		= sizeof(deUint64);
+	baseTestVector.queryResultsAvailability = false;
+	baseTestVector.primitiveTopology		= vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
+
+	//Basic tests
+	{
+		OcclusionQueryTestVector testVector = baseTestVector;
+		testVector.queryControlFlags = 0;
+		addChild(new QueryPoolOcclusionTest<BasicOcclusionQueryTestInstance>(m_testCtx,	"basic_conservative",	"draw with conservative occlusion query",	testVector));
+		testVector.queryControlFlags = vk::VK_QUERY_CONTROL_PRECISE_BIT;
+		addChild(new QueryPoolOcclusionTest<BasicOcclusionQueryTestInstance>(m_testCtx,	"basic_precise",		"draw with precise occlusion query",		testVector));
+	}
+
+	// Functional test
+	{
+		vk::VkQueryControlFlags	controlFlags[]		= { 0,					vk::VK_QUERY_CONTROL_PRECISE_BIT	};
+		const char*				controlFlagsStr[]	= { "conservative",		"precise"							};
+
+		for (int controlFlagIdx = 0; controlFlagIdx < DE_LENGTH_OF_ARRAY(controlFlags); ++controlFlagIdx)
+		{
+
+			vk::VkPrimitiveTopology	primitiveTopology[]		= { vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST };
+			const char*				primitiveTopologyStr[]	= { "points", "triangles" };
+			for (int primitiveTopologyIdx = 0; primitiveTopologyIdx < DE_LENGTH_OF_ARRAY(primitiveTopology); ++primitiveTopologyIdx)
+			{
+
+				OcclusionQueryResultSize	resultSize[]	= { RESULT_SIZE_32_BIT, RESULT_SIZE_64_BIT };
+				const char*					resultSizeStr[] = { "32",				"64" };
+
+				for (int resultSizeIdx = 0; resultSizeIdx < DE_LENGTH_OF_ARRAY(resultSize); ++resultSizeIdx)
+				{
+
+					OcclusionQueryWait	wait[]		= { WAIT_QUEUE, WAIT_QUERY };
+					const char*			waitStr[]	= { "queue",	"query" };
+
+					for (int waitIdx = 0; waitIdx < DE_LENGTH_OF_ARRAY(wait); ++waitIdx)
+					{
+						OcclusionQueryResultsMode	resultsMode[]		= { RESULTS_MODE_GET,	RESULTS_MODE_COPY };
+						const char*					resultsModeStr[]	= { "get",				"copy" };
+
+						for (int resultsModeIdx = 0; resultsModeIdx < DE_LENGTH_OF_ARRAY(resultsMode); ++resultsModeIdx)
+						{
+
+							bool testAvailability[]				= { false, true };
+							const char* testAvailabilityStr[]	= { "without", "with"};
+
+							for (int testAvailabilityIdx = 0; testAvailabilityIdx < DE_LENGTH_OF_ARRAY(testAvailability); ++testAvailabilityIdx)
+							{
+								OcclusionQueryTestVector testVector			= baseTestVector;
+								testVector.queryControlFlags				= controlFlags[controlFlagIdx];
+								testVector.queryResultSize					= resultSize[resultSizeIdx];
+								testVector.queryWait						= wait[waitIdx];
+								testVector.queryResultsMode					= resultsMode[resultsModeIdx];
+								testVector.queryResultsStride				= (testVector.queryResultSize == RESULT_SIZE_32_BIT ? sizeof(deUint32) : sizeof(deUint64));
+								testVector.queryResultsAvailability			= testAvailability[testAvailabilityIdx];
+								testVector.primitiveTopology				= primitiveTopology[primitiveTopologyIdx];
+
+								if (testVector.queryResultsAvailability)
+								{
+									testVector.queryResultsStride *= 2;
+								}
+
+								std::ostringstream testName;
+								std::ostringstream testDescr;
+
+								testName << resultsModeStr[resultsModeIdx] << "_results"
+										 << "_" << controlFlagsStr[controlFlagIdx]
+										 << "_size_" << resultSizeStr[resultSizeIdx]
+										 << "_wait_" << waitStr[waitIdx]
+										 << "_" << testAvailabilityStr[testAvailabilityIdx] << "_availability"
+										 << "_draw_" <<  primitiveTopologyStr[primitiveTopologyIdx];
+
+								testDescr << "draw occluded " << primitiveTopologyStr[primitiveTopologyIdx]
+										  << "with " << controlFlagsStr[controlFlagIdx] << ", "
+									      << resultsModeStr[resultsModeIdx] << " results "
+									      << testAvailabilityStr[testAvailabilityIdx] << " availability bit as "
+										  << resultSizeStr[resultSizeIdx] << "bit variables,"
+									      << "wait for results on" << waitStr[waitIdx];
+
+								addChild(new QueryPoolOcclusionTest<OcclusionQueryTestInstance>(m_testCtx, testName.str().c_str(), testDescr.str().c_str(), testVector));
+							}
+						}
+					}
+				}
+			}
+		}
+	}
+	// Test different strides
+	{
+		OcclusionQueryResultsMode	resultsMode[]		= { RESULTS_MODE_GET,	RESULTS_MODE_COPY	};
+		const char*					resultsModeStr[]	= { "get",				"copy"				};
+
+		for (int resultsModeIdx = 0; resultsModeIdx < DE_LENGTH_OF_ARRAY(resultsMode); ++resultsModeIdx)
+		{
+			OcclusionQueryResultSize	resultSizes[]	= { RESULT_SIZE_32_BIT, RESULT_SIZE_64_BIT };
+			const char*					resultSizeStr[] = { "32", "64" };
+
+			bool testAvailability[]				= { false,		true	};
+			const char* testAvailabilityStr[]	= { "without",	"with"	};
+
+			for (int testAvailabilityIdx = 0; testAvailabilityIdx < DE_LENGTH_OF_ARRAY(testAvailability); ++testAvailabilityIdx)
+			{
+				for (int resultSizeIdx = 0; resultSizeIdx < DE_LENGTH_OF_ARRAY(resultSizes); ++resultSizeIdx)
+				{
+					const vk::VkDeviceSize resultSize	= (resultSizes[resultSizeIdx] == RESULT_SIZE_32_BIT ? sizeof(deUint32) : sizeof(deUint64));
+
+					// \todo [2015-12-18 scygan] Ensure only stride values aligned to resultSize are allowed. Otherwise test should be extended.
+					const vk::VkDeviceSize strides[]	=
+					{
+						1 * resultSize,
+						2 * resultSize,
+						3 * resultSize,
+						4 * resultSize,
+						5 * resultSize,
+						13 * resultSize,
+						1024 * resultSize
+					};
+
+					for (int strideIdx = 0; strideIdx < DE_LENGTH_OF_ARRAY(strides); strideIdx++)
+					{
+						OcclusionQueryTestVector testVector		= baseTestVector;
+						testVector.queryResultsMode				= resultsMode[resultsModeIdx];
+						testVector.queryResultSize				= resultSizes[resultSizeIdx];
+						testVector.queryResultsAvailability		= testAvailability[testAvailabilityIdx];
+						testVector.queryResultsStride			= strides[strideIdx];
+
+						const vk::VkDeviceSize elementSize		= (testVector.queryResultsAvailability ? resultSize * 2 : resultSize);
+
+						if (elementSize > testVector.queryResultsStride)
+						{
+							continue;
+						}
+
+						std::ostringstream testName;
+						std::ostringstream testDescr;
+
+						testName << resultsModeStr[resultsModeIdx]
+								 << "_results_size_" << resultSizeStr[resultSizeIdx]
+								 << "_stride_" << strides[strideIdx]
+								 << "_" << testAvailabilityStr[testAvailabilityIdx] << "_availability";
+
+						testDescr << resultsModeStr[resultsModeIdx] << " results "
+								  << testAvailabilityStr[testAvailabilityIdx] << " availability bit as "
+								  << resultSizeStr[resultSizeIdx] << "bit variables, with stride" << strides[strideIdx];
+
+						addChild(new QueryPoolOcclusionTest<OcclusionQueryTestInstance>(m_testCtx, testName.str().c_str(), testDescr.str().c_str(), testVector));
+					}
+				}
+			}
+		}
+
+	}
+}
+
+} //QueryPool
+} //vkt
+
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.hpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.hpp
new file mode 100644
index 0000000..02f16d2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolOcclusionTests.hpp
@@ -0,0 +1,60 @@
+#ifndef _VKTQUERYPOOLOCCLUSIONTESTS_HPP
+#define _VKTQUERYPOOLOCCLUSIONTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Occlusion Query Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+	namespace QueryPool
+	{
+
+class QueryPoolOcclusionTests : public tcu::TestCaseGroup
+{
+public:
+	QueryPoolOcclusionTests		(tcu::TestContext &testCtx);
+	~QueryPoolOcclusionTests	(void);
+	void init					(void);
+
+private:
+	QueryPoolOcclusionTests					(const QueryPoolOcclusionTests &other);
+	QueryPoolOcclusionTests&	operator=	(const QueryPoolOcclusionTests &other);
+};
+
+} // QueryPool
+} // vkt
+
+#endif // _VKTQUERYPOOLOCCLUSIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.cpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.cpp
new file mode 100644
index 0000000..b480b08
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.cpp
@@ -0,0 +1,64 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Query Pool Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktQueryPoolTests.hpp"
+
+#include "vktTestGroupUtil.hpp"
+#include "vktQueryPoolOcclusionTests.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* queryPoolTests)
+{
+	tcu::TestContext&	testCtx		= queryPoolTests->getTestContext();
+
+	queryPoolTests->addChild(new QueryPoolOcclusionTests(testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "query_pool", "query pool tests", createChildren);
+}
+
+} // QueryPool
+} // vkt
\ No newline at end of file
diff --git a/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.hpp b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.hpp
new file mode 100644
index 0000000..4d8b836
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/query_pool/vktQueryPoolTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTQUERYPOOLTESTS_HPP
+#define _VKTQUERYPOOLTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Query Pool Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace QueryPool
+{
+
+tcu::TestCaseGroup*		createTests			(tcu::TestContext& testCtx);
+
+} // QueryPool
+} // vkt
+
+#endif // _VKTQUERYPOOLTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/CMakeLists.txt b/external/vulkancts/modules/vulkan/shaderexecutor/CMakeLists.txt
new file mode 100644
index 0000000..6f79231
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/CMakeLists.txt
@@ -0,0 +1,27 @@
+include_directories(..)
+
+set(DEQP_VK_SHADEREXECUTOR_SRCS
+   vktShaderExecutor.cpp
+   vktShaderExecutor.hpp
+   vktShaderBuiltinTests.cpp
+   vktShaderBuiltinTests.hpp
+   vktShaderCommonFunctionTests.cpp
+   vktShaderCommonFunctionTests.hpp
+   vktShaderIntegerFunctionTests.cpp
+   vktShaderIntegerFunctionTests.hpp
+   vktShaderPackingFunctionTests.cpp
+   vktShaderPackingFunctionTests.hpp
+   vktShaderBuiltinPrecisionTests.cpp
+   vktShaderBuiltinPrecisionTests.hpp
+   vktOpaqueTypeIndexingTests.cpp
+   vktOpaqueTypeIndexingTests.hpp
+)
+
+set(DEQP_VK_SHADEREXECUTOR_LIBS
+   deqp-vk-common
+   tcutil
+   vkutil
+)
+
+add_library(deqp-vk-shaderexecutor STATIC ${DEQP_VK_SHADEREXECUTOR_SRCS})
+target_link_libraries(deqp-vk-shaderexecutor ${DEQP_VK_SHADEREXECUTOR_LIBS})
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.cpp
new file mode 100644
index 0000000..72a6983
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.cpp
@@ -0,0 +1,1475 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Opaque type (sampler, buffer, atomic counter, ...) indexing tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktOpaqueTypeIndexingTests.hpp"
+
+#include "tcuTexture.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuVectorUtil.hpp"
+#include "tcuTextureUtil.hpp"
+
+#include "deStringUtil.hpp"
+#include "deRandom.hpp"
+
+#include "vktShaderExecutor.hpp"
+
+#include <sstream>
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+namespace
+{
+
+enum IndexExprType
+{
+	INDEX_EXPR_TYPE_CONST_LITERAL	= 0,
+	INDEX_EXPR_TYPE_CONST_EXPRESSION,
+	INDEX_EXPR_TYPE_UNIFORM,
+	INDEX_EXPR_TYPE_DYNAMIC_UNIFORM,
+
+	INDEX_EXPR_TYPE_LAST
+};
+
+enum TextureType
+{
+	TEXTURE_TYPE_1D = 0,
+	TEXTURE_TYPE_2D,
+	TEXTURE_TYPE_CUBE,
+	TEXTURE_TYPE_2D_ARRAY,
+	TEXTURE_TYPE_3D,
+
+	TEXTURE_TYPE_LAST
+};
+
+class OpaqueTypeIndexingCase : public TestCase
+{
+public:
+										OpaqueTypeIndexingCase		(tcu::TestContext&			testCtx,
+																	 const char*				name,
+																	 const char*				description,
+																	 const glu::ShaderType		shaderType,
+																	 const IndexExprType		indexExprType);
+	virtual								~OpaqueTypeIndexingCase		(void);
+	virtual void						initPrograms				(vk::SourceCollections& programCollection) const
+										{
+											m_executor->setShaderSources(programCollection);
+										}
+	virtual TestInstance*				createInstance				(Context& context) const = 0;
+	void								init						(void);
+
+protected:
+	const char*							m_name;
+	const glu::ShaderType				m_shaderType;
+	const IndexExprType					m_indexExprType;
+	ShaderSpec							m_shaderSpec;
+	de::MovePtr<ShaderExecutor>			m_executor;
+	UniformSetup*						m_uniformSetup;
+};
+
+class OpaqueTypeIndexingTestInstance : public TestInstance
+{
+public:
+										OpaqueTypeIndexingTestInstance		(Context&					context,
+																			 const glu::ShaderType		shaderType,
+																			 const ShaderSpec&			shaderSpec,
+																			 ShaderExecutor&			executor,
+																			 const char*				name,
+																			 UniformSetup*				uniformSetup,
+																			 const IndexExprType		indexExprType);
+	virtual								~OpaqueTypeIndexingTestInstance		(void);
+
+	virtual tcu::TestStatus				iterate								(void) = 0;
+
+protected:
+	void								checkSupported						(const VkDescriptorType descriptorType);
+
+protected:
+	tcu::TestContext&					m_testCtx;
+	const glu::ShaderType				m_shaderType;
+	const ShaderSpec&					m_shaderSpec;
+	const char*							m_name;
+	const IndexExprType					m_indexExprType;
+	ShaderExecutor&						m_executor;
+	UniformSetup*						m_uniformSetup;
+};
+
+OpaqueTypeIndexingCase::OpaqueTypeIndexingCase (tcu::TestContext&			testCtx,
+												const char*					name,
+												const char*					description,
+												const glu::ShaderType		shaderType,
+												const IndexExprType			indexExprType)
+	: TestCase			(testCtx, name, description)
+	, m_name			(name)
+	, m_shaderType		(shaderType)
+	, m_indexExprType	(indexExprType)
+	, m_executor		(DE_NULL)
+	, m_uniformSetup	(new UniformSetup())
+{
+}
+
+OpaqueTypeIndexingCase::~OpaqueTypeIndexingCase (void)
+{
+}
+
+void OpaqueTypeIndexingCase::init (void)
+{
+	DE_ASSERT(!m_executor);
+
+	m_executor = de::MovePtr<ShaderExecutor>(createExecutor(m_shaderType, m_shaderSpec));
+	m_testCtx.getLog() << *m_executor;
+}
+
+OpaqueTypeIndexingTestInstance::OpaqueTypeIndexingTestInstance (Context&					context,
+																const glu::ShaderType		shaderType,
+																const ShaderSpec&			shaderSpec,
+																ShaderExecutor&				executor,
+																const char*					name,
+																UniformSetup*				uniformSetup,
+																const IndexExprType			indexExprType)
+	: TestInstance		(context)
+	, m_testCtx			(context.getTestContext())
+	, m_shaderType		(shaderType)
+	, m_shaderSpec		(shaderSpec)
+	, m_name			(name)
+	, m_indexExprType	(indexExprType)
+	, m_executor		(executor)
+	, m_uniformSetup	(uniformSetup)
+{
+}
+
+OpaqueTypeIndexingTestInstance::~OpaqueTypeIndexingTestInstance (void)
+{
+}
+
+void OpaqueTypeIndexingTestInstance::checkSupported (const VkDescriptorType descriptorType)
+{
+	const VkPhysicalDeviceFeatures& deviceFeatures = m_context.getDeviceFeatures();
+
+	if (m_indexExprType != INDEX_EXPR_TYPE_CONST_LITERAL && m_indexExprType != INDEX_EXPR_TYPE_CONST_EXPRESSION)
+	{
+		switch (descriptorType)
+		{
+			case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
+				if (!deviceFeatures.shaderSampledImageArrayDynamicIndexing)
+					TCU_THROW(NotSupportedError, "Dynamic indexing of sampler arrays is not supported");
+				break;
+
+			case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
+				if (!deviceFeatures.shaderUniformBufferArrayDynamicIndexing)
+					TCU_THROW(NotSupportedError, "Dynamic indexing of uniform buffer arrays is not supported");
+				break;
+
+			case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
+				if (!deviceFeatures.shaderStorageBufferArrayDynamicIndexing)
+					TCU_THROW(NotSupportedError, "Dynamic indexing of storage buffer arrays is not supported");
+				break;
+
+			default:
+				break;
+		}
+	}
+}
+
+static deUint32 getFirstFreeBindingLocation (const glu::ShaderType shaderType)
+{
+	deUint32 location;
+
+	switch (shaderType)
+	{
+		case glu::SHADERTYPE_TESSELLATION_CONTROL:
+		case glu::SHADERTYPE_TESSELLATION_EVALUATION:
+		case glu::SHADERTYPE_COMPUTE:
+			// 0 - input buffer
+			// 1 - output buffer
+			location = 2u;
+			break;
+
+		default:
+			location = 0u;
+			break;
+	}
+
+	return location;
+}
+
+static void declareUniformIndexVars (std::ostream& str, const char* varPrefix, int numVars, deUint32& bindingLocation)
+{
+	for (int varNdx = 0; varNdx < numVars; varNdx++)
+		str << "layout(set = 0, binding = " << bindingLocation++ << ") uniform buf" << varNdx << " { highp int " << varPrefix << varNdx << "; }" << ";\n";
+}
+
+static void uploadUniformIndices (UniformSetup* uniformSetup, int numIndices, const int* indices, deUint32& bindingLocation)
+{
+	for (int varNdx = 0; varNdx < numIndices; varNdx++)
+		uniformSetup->addData(new UniformData<int>(bindingLocation++, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, indices[varNdx]));
+}
+
+static TextureType getTextureType (glu::DataType samplerType)
+{
+	switch (samplerType)
+	{
+		case glu::TYPE_SAMPLER_1D:
+		case glu::TYPE_INT_SAMPLER_1D:
+		case glu::TYPE_UINT_SAMPLER_1D:
+		case glu::TYPE_SAMPLER_1D_SHADOW:
+			return TEXTURE_TYPE_1D;
+
+		case glu::TYPE_SAMPLER_2D:
+		case glu::TYPE_INT_SAMPLER_2D:
+		case glu::TYPE_UINT_SAMPLER_2D:
+		case glu::TYPE_SAMPLER_2D_SHADOW:
+			return TEXTURE_TYPE_2D;
+
+		case glu::TYPE_SAMPLER_CUBE:
+		case glu::TYPE_INT_SAMPLER_CUBE:
+		case glu::TYPE_UINT_SAMPLER_CUBE:
+		case glu::TYPE_SAMPLER_CUBE_SHADOW:
+			return TEXTURE_TYPE_CUBE;
+
+		case glu::TYPE_SAMPLER_2D_ARRAY:
+		case glu::TYPE_INT_SAMPLER_2D_ARRAY:
+		case glu::TYPE_UINT_SAMPLER_2D_ARRAY:
+		case glu::TYPE_SAMPLER_2D_ARRAY_SHADOW:
+			return TEXTURE_TYPE_2D_ARRAY;
+
+		case glu::TYPE_SAMPLER_3D:
+		case glu::TYPE_INT_SAMPLER_3D:
+		case glu::TYPE_UINT_SAMPLER_3D:
+			return TEXTURE_TYPE_3D;
+
+		default:
+			throw tcu::InternalError("Invalid sampler type");
+	}
+}
+
+static bool isShadowSampler (glu::DataType samplerType)
+{
+	return samplerType == glu::TYPE_SAMPLER_1D_SHADOW		||
+		   samplerType == glu::TYPE_SAMPLER_2D_SHADOW		||
+		   samplerType == glu::TYPE_SAMPLER_2D_ARRAY_SHADOW	||
+		   samplerType == glu::TYPE_SAMPLER_CUBE_SHADOW;
+}
+
+static glu::DataType getSamplerOutputType (glu::DataType samplerType)
+{
+	switch (samplerType)
+	{
+		case glu::TYPE_SAMPLER_1D:
+		case glu::TYPE_SAMPLER_2D:
+		case glu::TYPE_SAMPLER_CUBE:
+		case glu::TYPE_SAMPLER_2D_ARRAY:
+		case glu::TYPE_SAMPLER_3D:
+			return glu::TYPE_FLOAT_VEC4;
+
+		case glu::TYPE_SAMPLER_1D_SHADOW:
+		case glu::TYPE_SAMPLER_2D_SHADOW:
+		case glu::TYPE_SAMPLER_CUBE_SHADOW:
+		case glu::TYPE_SAMPLER_2D_ARRAY_SHADOW:
+			return glu::TYPE_FLOAT;
+
+		case glu::TYPE_INT_SAMPLER_1D:
+		case glu::TYPE_INT_SAMPLER_2D:
+		case glu::TYPE_INT_SAMPLER_CUBE:
+		case glu::TYPE_INT_SAMPLER_2D_ARRAY:
+		case glu::TYPE_INT_SAMPLER_3D:
+			return glu::TYPE_INT_VEC4;
+
+		case glu::TYPE_UINT_SAMPLER_1D:
+		case glu::TYPE_UINT_SAMPLER_2D:
+		case glu::TYPE_UINT_SAMPLER_CUBE:
+		case glu::TYPE_UINT_SAMPLER_2D_ARRAY:
+		case glu::TYPE_UINT_SAMPLER_3D:
+			return glu::TYPE_UINT_VEC4;
+
+		default:
+			throw tcu::InternalError("Invalid sampler type");
+	}
+}
+
+static tcu::TextureFormat getSamplerTextureFormat (glu::DataType samplerType)
+{
+	const glu::DataType		outType			= getSamplerOutputType(samplerType);
+	const glu::DataType		outScalarType	= glu::getDataTypeScalarType(outType);
+
+	switch (outScalarType)
+	{
+		case glu::TYPE_FLOAT:
+			if (isShadowSampler(samplerType))
+				return tcu::TextureFormat(tcu::TextureFormat::D, tcu::TextureFormat::UNORM_INT16);
+			else
+				return tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8);
+
+		case glu::TYPE_INT:		return tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::SIGNED_INT8);
+		case glu::TYPE_UINT:	return tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNSIGNED_INT8);
+
+		default:
+			throw tcu::InternalError("Invalid sampler type");
+	}
+}
+
+static glu::DataType getSamplerCoordType (glu::DataType samplerType)
+{
+	const TextureType	texType		= getTextureType(samplerType);
+	int					numCoords	= 0;
+
+	switch (texType)
+	{
+		case TEXTURE_TYPE_1D:		numCoords = 1;	break;
+		case TEXTURE_TYPE_2D:		numCoords = 2;	break;
+		case TEXTURE_TYPE_2D_ARRAY:	numCoords = 3;	break;
+		case TEXTURE_TYPE_CUBE:		numCoords = 3;	break;
+		case TEXTURE_TYPE_3D:		numCoords = 3;	break;
+		default:
+			DE_ASSERT(false);
+	}
+
+	if (isShadowSampler(samplerType))
+		numCoords += 1;
+
+	DE_ASSERT(de::inRange(numCoords, 1, 4));
+
+	return numCoords == 1 ? glu::TYPE_FLOAT : glu::getDataTypeFloatVec(numCoords);
+}
+
+static void fillTextureData (const tcu::PixelBufferAccess& access, de::Random& rnd)
+{
+	DE_ASSERT(access.getHeight() == 1 && access.getDepth() == 1);
+
+	if (access.getFormat().order == tcu::TextureFormat::D)
+	{
+		// \note Texture uses odd values, lookup even values to avoid precision issues.
+		const float values[] = { 0.1f, 0.3f, 0.5f, 0.7f, 0.9f };
+
+		for (int ndx = 0; ndx < access.getWidth(); ndx++)
+			access.setPixDepth(rnd.choose<float>(DE_ARRAY_BEGIN(values), DE_ARRAY_END(values)), ndx, 0);
+	}
+	else
+	{
+		TCU_CHECK_INTERNAL(access.getFormat().order == tcu::TextureFormat::RGBA && access.getFormat().getPixelSize() == 4);
+
+		for (int ndx = 0; ndx < access.getWidth(); ndx++)
+			*((deUint32*)access.getDataPtr() + ndx) = rnd.getUint32();
+	}
+}
+
+static vk::VkImageType getVkImageType (TextureType texType)
+{
+	switch (texType)
+	{
+		case TEXTURE_TYPE_1D:			return vk::VK_IMAGE_TYPE_1D;
+		case TEXTURE_TYPE_2D:
+		case TEXTURE_TYPE_2D_ARRAY:		return vk::VK_IMAGE_TYPE_2D;
+		case TEXTURE_TYPE_CUBE:			return vk::VK_IMAGE_TYPE_2D;
+		case TEXTURE_TYPE_3D:			return vk::VK_IMAGE_TYPE_3D;
+		default:
+			DE_FATAL("Impossible");
+			return (vk::VkImageType)0;
+	}
+}
+
+static vk::VkImageViewType getVkImageViewType (TextureType texType)
+{
+	switch (texType)
+	{
+		case TEXTURE_TYPE_1D:			return vk::VK_IMAGE_VIEW_TYPE_1D;
+		case TEXTURE_TYPE_2D:			return vk::VK_IMAGE_VIEW_TYPE_2D;
+		case TEXTURE_TYPE_2D_ARRAY:		return vk::VK_IMAGE_VIEW_TYPE_2D_ARRAY;
+		case TEXTURE_TYPE_CUBE:			return vk::VK_IMAGE_VIEW_TYPE_CUBE;
+		case TEXTURE_TYPE_3D:			return vk::VK_IMAGE_VIEW_TYPE_3D;
+		default:
+			DE_FATAL("Impossible");
+			return (vk::VkImageViewType)0;
+	}
+}
+
+// SamplerIndexingCaseInstance
+
+class SamplerIndexingCaseInstance : public OpaqueTypeIndexingTestInstance
+{
+public:
+	enum
+	{
+		NUM_INVOCATIONS		= 64,
+		NUM_SAMPLERS		= 8,
+		NUM_LOOKUPS			= 4
+	};
+
+								SamplerIndexingCaseInstance		(Context&					context,
+																 const glu::ShaderType		shaderType,
+																 const ShaderSpec&			shaderSpec,
+																 ShaderExecutor&			executor,
+																 const char*				name,
+																 glu::DataType				samplerType,
+																 const IndexExprType		indexExprType,
+																 UniformSetup*				uniformSetup,
+																 const std::vector<int>&	lookupIndices);
+	virtual						~SamplerIndexingCaseInstance	(void);
+
+	virtual tcu::TestStatus		iterate							(void);
+
+protected:
+	const glu::DataType			m_samplerType;
+	const std::vector<int>&		m_lookupIndices;
+};
+
+SamplerIndexingCaseInstance::SamplerIndexingCaseInstance (Context&						context,
+														  const glu::ShaderType			shaderType,
+														  const ShaderSpec&				shaderSpec,
+														  ShaderExecutor&				executor,
+														  const char*					name,
+														  glu::DataType					samplerType,
+														  const IndexExprType			indexExprType,
+														  UniformSetup*					uniformSetup,
+														  const std::vector<int>&		lookupIndices)
+	: OpaqueTypeIndexingTestInstance	(context, shaderType, shaderSpec, executor, name, uniformSetup, indexExprType)
+	, m_samplerType						(samplerType)
+	, m_lookupIndices					(lookupIndices)
+{
+}
+
+SamplerIndexingCaseInstance::~SamplerIndexingCaseInstance (void)
+{
+}
+
+bool isIntegerFormat (const tcu::TextureFormat& format)
+{
+	const tcu::TextureChannelClass	chnClass	= tcu::getTextureChannelClass(format.type);
+
+	return chnClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ||
+		   chnClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER;
+}
+
+tcu::TestStatus SamplerIndexingCaseInstance::iterate (void)
+{
+	const int						numInvocations		= SamplerIndexingCaseInstance::NUM_INVOCATIONS;
+	const int						numSamplers			= SamplerIndexingCaseInstance::NUM_SAMPLERS;
+	const int						numLookups			= SamplerIndexingCaseInstance::NUM_LOOKUPS;
+	const glu::DataType				coordType			= getSamplerCoordType(m_samplerType);
+	const glu::DataType				outputType			= getSamplerOutputType(m_samplerType);
+	const tcu::TextureFormat		texFormat			= getSamplerTextureFormat(m_samplerType);
+	const int						outLookupStride		= numInvocations*getDataTypeScalarSize(outputType);
+	std::vector<float>				coords;
+	std::vector<deUint32>			outData;
+	std::vector<deUint8>			texData				(numSamplers * texFormat.getPixelSize());
+	const tcu::PixelBufferAccess	refTexAccess		(texFormat, numSamplers, 1, 1, &texData[0]);
+	de::Random						rnd					(deInt32Hash(m_samplerType) ^ deInt32Hash(m_shaderType) ^ deInt32Hash(m_indexExprType));
+	const TextureType				texType				= getTextureType(m_samplerType);
+	const vk::VkImageType			imageType			= getVkImageType(texType);
+	const vk::VkImageViewType		imageViewType		= getVkImageViewType(texType);
+	const tcu::Sampler::FilterMode	filterMode			= (isShadowSampler(m_samplerType) || isIntegerFormat(texFormat)) ? tcu::Sampler::NEAREST : tcu::Sampler::LINEAR;
+	const tcu::Sampler				refSampler			= isShadowSampler(m_samplerType)
+																? tcu::Sampler(tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE,
+																				filterMode, filterMode, 0.0f, false /* non-normalized */,
+																				tcu::Sampler::COMPAREMODE_LESS)
+																: tcu::Sampler(tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE, tcu::Sampler::CLAMP_TO_EDGE,
+																				filterMode, filterMode);
+
+	checkSupported(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+
+	coords.resize(numInvocations * getDataTypeScalarSize(coordType));
+
+	if (texType == TEXTURE_TYPE_CUBE)
+	{
+		if (isShadowSampler(m_samplerType))
+		{
+			for (size_t i = 0; i < coords.size() / 4; i++)
+			{
+				coords[4 * i] = 1.0f;
+				coords[4 * i + 1] = coords[4 * i + 2] = coords[4 * i + 3] = 0.0f;
+			}
+		}
+		else
+		{
+			for (size_t i = 0; i < coords.size() / 3; i++)
+			{
+				coords[3 * i] = 1.0f;
+				coords[3 * i + 1] = coords[3 * i + 2] = 0.0f;
+			}
+		}
+	}
+
+	if (isShadowSampler(m_samplerType))
+	{
+		// Use different comparison value per invocation.
+		// \note Texture uses odd values, comparison even values.
+		const int	numCoordComps	= getDataTypeScalarSize(coordType);
+		const float	cmpValues[]		= { 0.0f, 0.2f, 0.4f, 0.6f, 0.8f, 1.0f };
+
+		for (int invocationNdx = 0; invocationNdx < numInvocations; invocationNdx++)
+			coords[invocationNdx*numCoordComps + (numCoordComps-1)] = rnd.choose<float>(DE_ARRAY_BEGIN(cmpValues), DE_ARRAY_END(cmpValues));
+	}
+
+	fillTextureData(refTexAccess, rnd);
+
+	outData.resize(numLookups*outLookupStride);
+
+	{
+		std::vector<void*>		inputs;
+		std::vector<void*>		outputs;
+		std::vector<int>		expandedIndices;
+		deUint32				bindingLocation		= getFirstFreeBindingLocation(m_shaderType);
+
+		inputs.push_back(&coords[0]);
+
+		m_uniformSetup->addData(new SamplerUniformData(bindingLocation++, (deUint32)numSamplers, refSampler, texFormat, tcu::IVec3(1, 1, 1), imageType, imageViewType, &texData[0]));
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+		{
+			expandedIndices.resize(numInvocations * m_lookupIndices.size());
+			for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+			{
+				for (int invNdx = 0; invNdx < numInvocations; invNdx++)
+					expandedIndices[lookupNdx*numInvocations + invNdx] = m_lookupIndices[lookupNdx];
+			}
+
+			for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+				inputs.push_back(&expandedIndices[lookupNdx*numInvocations]);
+		}
+		else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+			uploadUniformIndices(m_uniformSetup, numLookups, &m_lookupIndices[0], bindingLocation);
+
+		for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+			outputs.push_back(&outData[outLookupStride*lookupNdx]);
+
+		m_executor.setUniforms(m_uniformSetup);
+
+		m_executor.execute(m_context, numInvocations, &inputs[0], &outputs[0]);
+	}
+
+	{
+		tcu::TestLog&		log				= m_context.getTestContext().getLog();
+		tcu::TestStatus		testResult		= tcu::TestStatus::pass("Pass");
+
+		if (isShadowSampler(m_samplerType))
+		{
+			const int			numCoordComps	= getDataTypeScalarSize(coordType);
+
+			TCU_CHECK_INTERNAL(getDataTypeScalarSize(outputType) == 1);
+
+			// Each invocation may have different results.
+			for (int invocationNdx = 0; invocationNdx < numInvocations; invocationNdx++)
+			{
+				const float	coord	= coords[invocationNdx*numCoordComps + (numCoordComps-1)];
+
+				for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+				{
+					const int		texNdx		= m_lookupIndices[lookupNdx];
+					const float		result		= *((const float*)(const deUint8*)&outData[lookupNdx*outLookupStride + invocationNdx]);
+					const float		reference	= refTexAccess.sample2DCompare(refSampler, tcu::Sampler::NEAREST, coord, (float)texNdx, 0.0f, tcu::IVec3(0));
+
+					if (de::abs(result-reference) > 0.005f)
+					{
+						log << tcu::TestLog::Message << "ERROR: at invocation " << invocationNdx << ", lookup " << lookupNdx << ": expected "
+							<< reference << ", got " << result
+							<< tcu::TestLog::EndMessage;
+
+						if (testResult.getCode() == QP_TEST_RESULT_PASS)
+							testResult = tcu::TestStatus::fail("Got invalid lookup result");
+					}
+				}
+			}
+		}
+		else
+		{
+			TCU_CHECK_INTERNAL(getDataTypeScalarSize(outputType) == 4);
+
+			// Validate results from first invocation
+			for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+			{
+				const int		texNdx	= m_lookupIndices[lookupNdx];
+				const deUint8*	resPtr	= (const deUint8*)&outData[lookupNdx*outLookupStride];
+				bool			isOk;
+
+				if (outputType == glu::TYPE_FLOAT_VEC4)
+				{
+					const float			threshold		= 1.0f / 256.0f;
+					const tcu::Vec4		reference		= refTexAccess.getPixel(texNdx, 0);
+					const float*		floatPtr		= (const float*)resPtr;
+					const tcu::Vec4		result			(floatPtr[0], floatPtr[1], floatPtr[2], floatPtr[3]);
+
+					isOk = boolAll(lessThanEqual(abs(reference-result), tcu::Vec4(threshold)));
+
+					if (!isOk)
+					{
+						log << tcu::TestLog::Message << "ERROR: at lookup " << lookupNdx << ": expected "
+							<< reference << ", got " << result
+							<< tcu::TestLog::EndMessage;
+					}
+				}
+				else
+				{
+					const tcu::UVec4	reference		= refTexAccess.getPixelUint(texNdx, 0);
+					const deUint32*		uintPtr			= (const deUint32*)resPtr;
+					const tcu::UVec4	result			(uintPtr[0], uintPtr[1], uintPtr[2], uintPtr[3]);
+
+					isOk = boolAll(equal(reference, result));
+
+					if (!isOk)
+					{
+						log << tcu::TestLog::Message << "ERROR: at lookup " << lookupNdx << ": expected "
+							<< reference << ", got " << result
+							<< tcu::TestLog::EndMessage;
+					}
+				}
+
+				if (!isOk && testResult.getCode() == QP_TEST_RESULT_PASS)
+					testResult = tcu::TestStatus::fail("Got invalid lookup result");
+			}
+
+			// Check results of other invocations against first one
+			for (int invocationNdx = 1; invocationNdx < numInvocations; invocationNdx++)
+			{
+				for (int lookupNdx = 0; lookupNdx < numLookups; lookupNdx++)
+				{
+					const deUint32*		refPtr		= &outData[lookupNdx*outLookupStride];
+					const deUint32*		resPtr		= refPtr + invocationNdx*4;
+					bool				isOk		= true;
+
+					for (int ndx = 0; ndx < 4; ndx++)
+						isOk = isOk && (refPtr[ndx] == resPtr[ndx]);
+
+					if (!isOk)
+					{
+						log << tcu::TestLog::Message << "ERROR: invocation " << invocationNdx << " result "
+							<< tcu::formatArray(tcu::Format::HexIterator<deUint32>(resPtr), tcu::Format::HexIterator<deUint32>(resPtr+4))
+							<< " for lookup " << lookupNdx << " doesn't match result from first invocation "
+							<< tcu::formatArray(tcu::Format::HexIterator<deUint32>(refPtr), tcu::Format::HexIterator<deUint32>(refPtr+4))
+							<< tcu::TestLog::EndMessage;
+
+						if (testResult.getCode() == QP_TEST_RESULT_PASS)
+							testResult = tcu::TestStatus::fail("Inconsistent lookup results");
+					}
+				}
+			}
+		}
+
+		return testResult;
+	}
+}
+
+class SamplerIndexingCase : public OpaqueTypeIndexingCase
+{
+public:
+								SamplerIndexingCase			(tcu::TestContext&			testCtx,
+															 const char*				name,
+															 const char*				description,
+															 const glu::ShaderType		shaderType,
+															 glu::DataType				samplerType,
+															 IndexExprType				indexExprType);
+	virtual						~SamplerIndexingCase		(void);
+
+	virtual TestInstance*		createInstance				(Context& ctx) const;
+
+private:
+								SamplerIndexingCase			(const SamplerIndexingCase&);
+	SamplerIndexingCase&		operator=					(const SamplerIndexingCase&);
+
+	void						createShaderSpec			(void);
+
+	const glu::DataType			m_samplerType;
+	const int					m_numSamplers;
+	const int					m_numLookups;
+	std::vector<int>			m_lookupIndices;
+};
+
+SamplerIndexingCase::SamplerIndexingCase (tcu::TestContext&			testCtx,
+										  const char*				name,
+										  const char*				description,
+										  const glu::ShaderType		shaderType,
+										  glu::DataType				samplerType,
+										  IndexExprType				indexExprType)
+	: OpaqueTypeIndexingCase	(testCtx, name, description, shaderType, indexExprType)
+	, m_samplerType				(samplerType)
+	, m_numSamplers				(SamplerIndexingCaseInstance::NUM_SAMPLERS)
+	, m_numLookups				(SamplerIndexingCaseInstance::NUM_LOOKUPS)
+	, m_lookupIndices			(m_numLookups)
+{
+	createShaderSpec();
+	init();
+}
+
+SamplerIndexingCase::~SamplerIndexingCase (void)
+{
+}
+
+TestInstance* SamplerIndexingCase::createInstance (Context& ctx) const
+{
+	return new SamplerIndexingCaseInstance(ctx,
+										   m_shaderType,
+										   m_shaderSpec,
+										   *m_executor,
+										   m_name,
+										   m_samplerType,
+										   m_indexExprType,
+										   m_uniformSetup,
+										   m_lookupIndices);
+}
+
+void SamplerIndexingCase::createShaderSpec (void)
+{
+	de::Random			rnd				(deInt32Hash(m_samplerType) ^ deInt32Hash(m_shaderType) ^ deInt32Hash(m_indexExprType));
+	deUint32			binding			= getFirstFreeBindingLocation(m_shaderType);
+	const char*			samplersName	= "texSampler";
+	const char*			coordsName		= "coords";
+	const char*			indicesPrefix	= "index";
+	const char*			resultPrefix	= "result";
+	const glu::DataType	coordType		= getSamplerCoordType(m_samplerType);
+	const glu::DataType	outType			= getSamplerOutputType(m_samplerType);
+	std::ostringstream	global, code;
+
+	for (int ndx = 0; ndx < m_numLookups; ndx++)
+		m_lookupIndices[ndx] = rnd.getInt(0, m_numSamplers-1);
+
+	m_shaderSpec.inputs.push_back(Symbol(coordsName, glu::VarType(coordType, glu::PRECISION_HIGHP)));
+
+	if (m_indexExprType != INDEX_EXPR_TYPE_CONST_LITERAL)
+		global << "#extension GL_EXT_gpu_shader5 : require\n";
+
+	if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+		global << "const highp int indexBase = 1;\n";
+
+	global <<
+		"layout(set = 0, binding = " << binding++ << ") uniform highp " << getDataTypeName(m_samplerType) << " " << samplersName << "[" << m_numSamplers << "];\n";
+
+	if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+	{
+		for (int lookupNdx = 0; lookupNdx < m_numLookups; lookupNdx++)
+		{
+			const std::string varName = indicesPrefix + de::toString(lookupNdx);
+			m_shaderSpec.inputs.push_back(Symbol(varName, glu::VarType(glu::TYPE_INT, glu::PRECISION_HIGHP)));
+		}
+	}
+	else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+		declareUniformIndexVars(global, indicesPrefix, m_numLookups, binding);
+
+	for (int lookupNdx = 0; lookupNdx < m_numLookups; lookupNdx++)
+	{
+		const std::string varName = resultPrefix + de::toString(lookupNdx);
+		m_shaderSpec.outputs.push_back(Symbol(varName, glu::VarType(outType, glu::PRECISION_HIGHP)));
+	}
+
+	for (int lookupNdx = 0; lookupNdx < m_numLookups; lookupNdx++)
+	{
+		code << resultPrefix << "" << lookupNdx << " = texture(" << samplersName << "[";
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_CONST_LITERAL)
+			code << m_lookupIndices[lookupNdx];
+		else if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+			code << "indexBase + " << (m_lookupIndices[lookupNdx]-1);
+		else
+			code << indicesPrefix << lookupNdx;
+
+		code << "], " << coordsName << ");\n";
+	}
+
+	m_shaderSpec.globalDeclarations	= global.str();
+	m_shaderSpec.source				= code.str();
+}
+
+enum BlockType
+{
+	BLOCKTYPE_UNIFORM = 0,
+	BLOCKTYPE_BUFFER,
+
+	BLOCKTYPE_LAST
+};
+
+class BlockArrayIndexingCaseInstance : public OpaqueTypeIndexingTestInstance
+{
+public:
+	enum
+	{
+		NUM_INVOCATIONS		= 32,
+		NUM_INSTANCES		= 4,
+		NUM_READS			= 4
+	};
+
+									BlockArrayIndexingCaseInstance	(Context&						context,
+																	 const glu::ShaderType			shaderType,
+																	 const ShaderSpec&				shaderSpec,
+																	 ShaderExecutor&				executor,
+																	 const char*					name,
+																	 BlockType						blockType,
+																	 const IndexExprType			indexExprType,
+																	 UniformSetup*					uniformSetup,
+																	 const std::vector<int>&		readIndices,
+																	 const std::vector<deUint32>&	inValues);
+	virtual							~BlockArrayIndexingCaseInstance	(void);
+
+	virtual tcu::TestStatus			iterate							(void);
+
+private:
+	const BlockType					m_blockType;
+	const std::vector<int>&			m_readIndices;
+	const std::vector<deUint32>&	m_inValues;
+};
+
+BlockArrayIndexingCaseInstance::BlockArrayIndexingCaseInstance (Context&						context,
+																const glu::ShaderType			shaderType,
+																const ShaderSpec&				shaderSpec,
+																ShaderExecutor&					executor,
+																const char*						name,
+																BlockType						blockType,
+																const IndexExprType				indexExprType,
+																UniformSetup*					uniformSetup,
+																const std::vector<int>&			readIndices,
+																const std::vector<deUint32>&	inValues)
+	: OpaqueTypeIndexingTestInstance	(context, shaderType, shaderSpec, executor, name, uniformSetup, indexExprType)
+	, m_blockType						(blockType)
+	, m_readIndices						(readIndices)
+	, m_inValues						(inValues)
+{
+}
+
+BlockArrayIndexingCaseInstance::~BlockArrayIndexingCaseInstance (void)
+{
+}
+
+tcu::TestStatus BlockArrayIndexingCaseInstance::iterate (void)
+{
+	const int					numInvocations		= NUM_INVOCATIONS;
+	const int					numReads			= NUM_READS;
+	std::vector<deUint32>		outValues			(numInvocations*numReads);
+
+	{
+		tcu::TestLog&			log					= m_context.getTestContext().getLog();
+		tcu::TestStatus			testResult			= tcu::TestStatus::pass("Pass");
+		std::vector<int>		expandedIndices;
+		std::vector<void*>		inputs;
+		std::vector<void*>		outputs;
+		deUint32				bindingLocation		= getFirstFreeBindingLocation(m_shaderType);
+		VkDescriptorType		descriptorType		= m_blockType == BLOCKTYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
+
+		checkSupported(descriptorType);
+
+		for (size_t i = 0 ; i < m_inValues.size(); i++)
+			m_uniformSetup->addData(new UniformData<deUint32>(bindingLocation++, descriptorType, m_inValues[i]));
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+		{
+			expandedIndices.resize(numInvocations * m_readIndices.size());
+
+			for (int readNdx = 0; readNdx < numReads; readNdx++)
+			{
+				int* dst = &expandedIndices[numInvocations*readNdx];
+				std::fill(dst, dst+numInvocations, m_readIndices[readNdx]);
+			}
+
+			for (int readNdx = 0; readNdx < numReads; readNdx++)
+				inputs.push_back(&expandedIndices[readNdx*numInvocations]);
+		}
+		else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+			uploadUniformIndices(m_uniformSetup, numReads, &m_readIndices[0], bindingLocation);
+
+		for (int readNdx = 0; readNdx < numReads; readNdx++)
+			outputs.push_back(&outValues[readNdx*numInvocations]);
+
+		m_executor.setUniforms(m_uniformSetup);
+
+		m_executor.execute(m_context, numInvocations, inputs.empty() ? DE_NULL : &inputs[0], &outputs[0]);
+
+		for (int invocationNdx = 0; invocationNdx < numInvocations; invocationNdx++)
+		{
+			for (int readNdx = 0; readNdx < numReads; readNdx++)
+			{
+				const deUint32	refValue	= m_inValues[m_readIndices[readNdx]];
+				const deUint32	resValue	= outValues[readNdx*numInvocations + invocationNdx];
+
+				if (refValue != resValue)
+				{
+					log << tcu::TestLog::Message << "ERROR: at invocation " << invocationNdx
+						<< ", read " << readNdx << ": expected "
+						<< tcu::toHex(refValue) << ", got " << tcu::toHex(resValue)
+						<< tcu::TestLog::EndMessage;
+
+					if (testResult.getCode() == QP_TEST_RESULT_PASS)
+						testResult = tcu::TestStatus::fail("Invalid result value");
+				}
+			}
+		}
+
+		return testResult;
+	}
+}
+
+class BlockArrayIndexingCase : public OpaqueTypeIndexingCase
+{
+public:
+								BlockArrayIndexingCase		(tcu::TestContext&			testCtx,
+															 const char*				name,
+															 const char*				description,
+															 BlockType					blockType,
+															 IndexExprType				indexExprType,
+															 const glu::ShaderType		shaderType);
+	virtual						~BlockArrayIndexingCase		(void);
+
+	virtual TestInstance*		createInstance				(Context& ctx) const;
+
+private:
+								BlockArrayIndexingCase		(const BlockArrayIndexingCase&);
+	BlockArrayIndexingCase&		operator=					(const BlockArrayIndexingCase&);
+
+	void						createShaderSpec			(void);
+
+	const BlockType				m_blockType;
+	std::vector<int>			m_readIndices;
+	std::vector<deUint32>		m_inValues;
+};
+
+BlockArrayIndexingCase::BlockArrayIndexingCase (tcu::TestContext&			testCtx,
+												const char*					name,
+												const char*					description,
+												BlockType					blockType,
+												IndexExprType				indexExprType,
+												const glu::ShaderType		shaderType)
+	: OpaqueTypeIndexingCase	(testCtx, name, description, shaderType, indexExprType)
+	, m_blockType				(blockType)
+	, m_readIndices				(BlockArrayIndexingCaseInstance::NUM_READS)
+	, m_inValues				(BlockArrayIndexingCaseInstance::NUM_INSTANCES)
+{
+	createShaderSpec();
+	init();
+}
+
+BlockArrayIndexingCase::~BlockArrayIndexingCase (void)
+{
+}
+
+TestInstance* BlockArrayIndexingCase::createInstance (Context& ctx) const
+{
+	return new BlockArrayIndexingCaseInstance(ctx,
+											  m_shaderType,
+											  m_shaderSpec,
+											  *m_executor,
+											  m_name,
+											  m_blockType,
+											  m_indexExprType,
+											  m_uniformSetup,
+											  m_readIndices,
+											  m_inValues);
+}
+
+void BlockArrayIndexingCase::createShaderSpec (void)
+{
+	const int			numInstances	= BlockArrayIndexingCaseInstance::NUM_INSTANCES;
+	const int			numReads		= BlockArrayIndexingCaseInstance::NUM_READS;
+	de::Random			rnd				(deInt32Hash(m_shaderType) ^ deInt32Hash(m_blockType) ^ deInt32Hash(m_indexExprType));
+	deUint32			binding			= getFirstFreeBindingLocation(m_shaderType);
+	const char*			blockName		= "Block";
+	const char*			instanceName	= "block";
+	const char*			indicesPrefix	= "index";
+	const char*			resultPrefix	= "result";
+	const char*			interfaceName	= m_blockType == BLOCKTYPE_UNIFORM ? "uniform" : "buffer";
+	std::ostringstream	global, code;
+
+	for (int readNdx = 0; readNdx < numReads; readNdx++)
+		m_readIndices[readNdx] = rnd.getInt(0, numInstances-1);
+
+	for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		m_inValues[instanceNdx] = rnd.getUint32();
+
+	if (m_indexExprType != INDEX_EXPR_TYPE_CONST_LITERAL)
+		global << "#extension GL_EXT_gpu_shader5 : require\n";
+
+	if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+		global << "const highp int indexBase = 1;\n";
+
+	global <<
+		"layout(set = 0, binding = " << binding << ") " << interfaceName << " " << blockName << "\n"
+		"{\n"
+		"	highp uint value;\n"
+		"} " << instanceName << "[" << numInstances << "];\n";
+
+	binding += numInstances;
+
+	if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+	{
+		for (int readNdx = 0; readNdx < numReads; readNdx++)
+		{
+			const std::string varName = indicesPrefix + de::toString(readNdx);
+			m_shaderSpec.inputs.push_back(Symbol(varName, glu::VarType(glu::TYPE_INT, glu::PRECISION_HIGHP)));
+		}
+	}
+	else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+		declareUniformIndexVars(global, indicesPrefix, numReads, binding);
+
+	for (int readNdx = 0; readNdx < numReads; readNdx++)
+	{
+		const std::string varName = resultPrefix + de::toString(readNdx);
+		m_shaderSpec.outputs.push_back(Symbol(varName, glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+	}
+
+	for (int readNdx = 0; readNdx < numReads; readNdx++)
+	{
+		code << resultPrefix << readNdx << " = " << instanceName << "[";
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_CONST_LITERAL)
+			code << m_readIndices[readNdx];
+		else if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+			code << "indexBase + " << (m_readIndices[readNdx]-1);
+		else
+			code << indicesPrefix << readNdx;
+
+		code << "].value;\n";
+	}
+
+	m_shaderSpec.globalDeclarations	= global.str();
+	m_shaderSpec.source				= code.str();
+}
+
+class AtomicCounterIndexingCaseInstance : public OpaqueTypeIndexingTestInstance
+{
+public:
+	enum
+	{
+		NUM_INVOCATIONS		= 32,
+		NUM_COUNTERS		= 4,
+		NUM_OPS				= 4
+	};
+
+								AtomicCounterIndexingCaseInstance	(Context&					context,
+																	 const glu::ShaderType		shaderType,
+																	 const ShaderSpec&			shaderSpec,
+																	 ShaderExecutor&			executor,
+																	 const char*				name,
+																	 UniformSetup*				uniformSetup,
+																	 const std::vector<int>&	opIndices,
+																	 const IndexExprType		indexExprType);
+	virtual						~AtomicCounterIndexingCaseInstance	(void);
+
+	virtual	tcu::TestStatus		iterate								(void);
+
+private:
+	const std::vector<int>&		m_opIndices;
+};
+
+AtomicCounterIndexingCaseInstance::AtomicCounterIndexingCaseInstance (Context&					context,
+																	  const glu::ShaderType		shaderType,
+																	  const ShaderSpec&			shaderSpec,
+																	  ShaderExecutor&			executor,
+																	  const char*				name,
+																	  UniformSetup*				uniformSetup,
+																	  const std::vector<int>&	opIndices,
+																	  const IndexExprType		indexExprType)
+	: OpaqueTypeIndexingTestInstance	(context, shaderType, shaderSpec, executor, name, uniformSetup, indexExprType)
+	, m_opIndices						(opIndices)
+{
+}
+
+AtomicCounterIndexingCaseInstance::~AtomicCounterIndexingCaseInstance (void)
+{
+}
+
+tcu::TestStatus AtomicCounterIndexingCaseInstance::iterate (void)
+{
+	// \todo [2015-12-02 elecro] Add vertexPipelineStoresAndAtomics feature check.
+	const int					numInvocations		= NUM_INVOCATIONS;
+	const int					numCounters			= NUM_COUNTERS;
+	const int					numOps				= NUM_OPS;
+	std::vector<int>			expandedIndices;
+	std::vector<void*>			inputs;
+	std::vector<void*>			outputs;
+	std::vector<deUint32>		outValues			(numInvocations*numOps);
+	deUint32					bindingLocation		= getFirstFreeBindingLocation(m_shaderType);
+
+	const deUint32 atomicCounterLocation = bindingLocation++;
+
+	checkSupported(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+
+	{
+		DE_ASSERT(numCounters <= 4);
+		// Add the atomic counters' base value, all zero.
+		m_uniformSetup->addData(new UniformData<tcu::Mat4>(atomicCounterLocation, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, tcu::Mat4(0.0)));
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+		{
+			expandedIndices.resize(numInvocations * m_opIndices.size());
+
+			for (int opNdx = 0; opNdx < numOps; opNdx++)
+			{
+				int* dst = &expandedIndices[numInvocations*opNdx];
+				std::fill(dst, dst+numInvocations, m_opIndices[opNdx]);
+			}
+
+			for (int opNdx = 0; opNdx < numOps; opNdx++)
+				inputs.push_back(&expandedIndices[opNdx*numInvocations]);
+		}
+		else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+			uploadUniformIndices(m_uniformSetup, numOps, &m_opIndices[0], bindingLocation);
+
+		for (int opNdx = 0; opNdx < numOps; opNdx++)
+			outputs.push_back(&outValues[opNdx*numInvocations]);
+
+		m_executor.setUniforms(m_uniformSetup);
+
+		m_executor.execute(m_context, numInvocations, inputs.empty() ? DE_NULL : &inputs[0], &outputs[0]);
+	}
+
+	{
+		tcu::TestLog&					log				= m_context.getTestContext().getLog();
+		tcu::TestStatus					testResult		= tcu::TestStatus::pass("Pass");
+		std::vector<int>				numHits			(numCounters, 0);	// Number of hits per counter.
+		std::vector<deUint32>			counterValues	(numCounters);
+		std::vector<std::vector<bool> >	counterMasks	(numCounters);
+
+		for (int opNdx = 0; opNdx < numOps; opNdx++)
+			numHits[m_opIndices[opNdx]] += 1;
+
+		// Read counter values
+		{
+			const void* mapPtr = m_executor.getBufferPtr(atomicCounterLocation);
+			DE_ASSERT(mapPtr != DE_NULL);
+			std::copy((const deUint32*)mapPtr, (const deUint32*)mapPtr + numCounters, &counterValues[0]);
+		}
+
+		// Verify counter values
+		for (int counterNdx = 0; counterNdx < numCounters; counterNdx++)
+		{
+			const deUint32		refCount	= (deUint32)(numHits[counterNdx]*numInvocations);
+			const deUint32		resCount	= counterValues[counterNdx];
+
+			if (refCount != resCount)
+			{
+				log << tcu::TestLog::Message << "ERROR: atomic counter " << counterNdx << " has value " << resCount
+					<< ", expected " << refCount
+					<< tcu::TestLog::EndMessage;
+
+				if (testResult.getCode() == QP_TEST_RESULT_PASS)
+					testResult = tcu::TestStatus::fail("Invalid atomic counter value");
+			}
+		}
+
+		// Allocate bitmasks - one bit per each valid result value
+		for (int counterNdx = 0; counterNdx < numCounters; counterNdx++)
+		{
+			const int	counterValue	= numHits[counterNdx]*numInvocations;
+			counterMasks[counterNdx].resize(counterValue, false);
+		}
+
+		// Verify result values from shaders
+		for (int invocationNdx = 0; invocationNdx < numInvocations; invocationNdx++)
+		{
+			for (int opNdx = 0; opNdx < numOps; opNdx++)
+			{
+				const int		counterNdx	= m_opIndices[opNdx];
+				const deUint32	resValue	= outValues[opNdx*numInvocations + invocationNdx];
+				const bool		rangeOk		= de::inBounds(resValue, 0u, (deUint32)counterMasks[counterNdx].size());
+				const bool		notSeen		= rangeOk && !counterMasks[counterNdx][resValue];
+				const bool		isOk		= rangeOk && notSeen;
+
+				if (!isOk)
+				{
+					log << tcu::TestLog::Message << "ERROR: at invocation " << invocationNdx
+						<< ", op " << opNdx << ": got invalid result value "
+						<< resValue
+						<< tcu::TestLog::EndMessage;
+
+					if (testResult.getCode() == QP_TEST_RESULT_PASS)
+						testResult = tcu::TestStatus::fail("Invalid result value");
+				}
+				else
+				{
+					// Mark as used - no other invocation should see this value from same counter.
+					counterMasks[counterNdx][resValue] = true;
+				}
+			}
+		}
+
+		if (testResult.getCode() == QP_TEST_RESULT_PASS)
+		{
+			// Consistency check - all masks should be 1 now
+			for (int counterNdx = 0; counterNdx < numCounters; counterNdx++)
+			{
+				for (std::vector<bool>::const_iterator i = counterMasks[counterNdx].begin(); i != counterMasks[counterNdx].end(); i++)
+					TCU_CHECK_INTERNAL(*i);
+			}
+		}
+
+		return testResult;
+	}
+}
+
+class AtomicCounterIndexingCase : public OpaqueTypeIndexingCase
+{
+public:
+								AtomicCounterIndexingCase	(tcu::TestContext&			testCtx,
+															 const char*				name,
+															 const char*				description,
+															 IndexExprType				indexExprType,
+															 const glu::ShaderType		shaderType);
+	virtual						~AtomicCounterIndexingCase	(void);
+
+	virtual TestInstance*		createInstance				(Context& ctx) const;
+
+private:
+								AtomicCounterIndexingCase	(const BlockArrayIndexingCase&);
+	AtomicCounterIndexingCase&	operator=					(const BlockArrayIndexingCase&);
+
+	void						createShaderSpec			(void);
+
+	std::vector<int>			m_opIndices;
+};
+
+AtomicCounterIndexingCase::AtomicCounterIndexingCase (tcu::TestContext&			testCtx,
+													  const char*				name,
+													  const char*				description,
+													  IndexExprType				indexExprType,
+													  const glu::ShaderType		shaderType)
+	: OpaqueTypeIndexingCase	(testCtx, name, description, shaderType, indexExprType)
+	, m_opIndices				(AtomicCounterIndexingCaseInstance::NUM_OPS)
+{
+	createShaderSpec();
+	init();
+}
+
+AtomicCounterIndexingCase::~AtomicCounterIndexingCase (void)
+{
+}
+
+TestInstance* AtomicCounterIndexingCase::createInstance (Context& ctx) const
+{
+	return new AtomicCounterIndexingCaseInstance(ctx,
+												 m_shaderType,
+												 m_shaderSpec,
+												 *m_executor,
+												 m_name,
+												 m_uniformSetup,
+												 m_opIndices,
+												 m_indexExprType);
+}
+
+void AtomicCounterIndexingCase::createShaderSpec (void)
+{
+	const int				numCounters		= AtomicCounterIndexingCaseInstance::NUM_COUNTERS;
+	const int				numOps			= AtomicCounterIndexingCaseInstance::NUM_OPS;
+	deUint32				binding			= getFirstFreeBindingLocation(m_shaderType);
+	de::Random				rnd				(deInt32Hash(m_shaderType) ^ deInt32Hash(m_indexExprType));
+
+	for (int opNdx = 0; opNdx < numOps; opNdx++)
+		m_opIndices[opNdx] = rnd.getInt(0, numOps-1);
+
+	{
+		const char*			indicesPrefix	= "index";
+		const char*			resultPrefix	= "result";
+		std::ostringstream	global, code;
+
+		if (m_indexExprType != INDEX_EXPR_TYPE_CONST_LITERAL)
+			global << "#extension GL_EXT_gpu_shader5 : require\n";
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+			global << "const highp int indexBase = 1;\n";
+
+		global <<
+			"layout(set = 0, binding = " << binding++ << ") buffer AtomicBuffer { highp uint counter[" << numCounters << "]; };\n";
+
+		if (m_indexExprType == INDEX_EXPR_TYPE_DYNAMIC_UNIFORM)
+		{
+			for (int opNdx = 0; opNdx < numOps; opNdx++)
+			{
+				const std::string varName = indicesPrefix + de::toString(opNdx);
+				m_shaderSpec.inputs.push_back(Symbol(varName, glu::VarType(glu::TYPE_INT, glu::PRECISION_HIGHP)));
+			}
+		}
+		else if (m_indexExprType == INDEX_EXPR_TYPE_UNIFORM)
+			declareUniformIndexVars(global, indicesPrefix, numOps, binding);
+
+		for (int opNdx = 0; opNdx < numOps; opNdx++)
+		{
+			const std::string varName = resultPrefix + de::toString(opNdx);
+			m_shaderSpec.outputs.push_back(Symbol(varName, glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		}
+
+		for (int opNdx = 0; opNdx < numOps; opNdx++)
+		{
+			code << resultPrefix << opNdx << " = atomicAdd(counter[";
+
+			if (m_indexExprType == INDEX_EXPR_TYPE_CONST_LITERAL)
+				code << m_opIndices[opNdx];
+			else if (m_indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+				code << "indexBase + " << (m_opIndices[opNdx]-1);
+			else
+				code << indicesPrefix << opNdx;
+
+			code << "], uint(1));\n";
+		}
+
+		m_shaderSpec.globalDeclarations	= global.str();
+		m_shaderSpec.source				= code.str();
+	}
+}
+
+class OpaqueTypeIndexingTests : public tcu::TestCaseGroup
+{
+public:
+								OpaqueTypeIndexingTests		(tcu::TestContext& testCtx);
+	virtual						~OpaqueTypeIndexingTests	(void);
+
+	virtual void				init						(void);
+
+private:
+								OpaqueTypeIndexingTests		(const OpaqueTypeIndexingTests&);
+	OpaqueTypeIndexingTests&	operator=					(const OpaqueTypeIndexingTests&);
+};
+
+OpaqueTypeIndexingTests::OpaqueTypeIndexingTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup(testCtx, "opaque_type_indexing", "Opaque Type Indexing Tests")
+{
+}
+
+OpaqueTypeIndexingTests::~OpaqueTypeIndexingTests (void)
+{
+}
+
+void OpaqueTypeIndexingTests::init (void)
+{
+	static const struct
+	{
+		IndexExprType	type;
+		const char*		name;
+		const char*		description;
+	} indexingTypes[] =
+	{
+		{ INDEX_EXPR_TYPE_CONST_LITERAL,	"const_literal",		"Indexing by constant literal"					},
+		{ INDEX_EXPR_TYPE_CONST_EXPRESSION,	"const_expression",		"Indexing by constant expression"				},
+		{ INDEX_EXPR_TYPE_UNIFORM,			"uniform",				"Indexing by uniform value"						},
+		{ INDEX_EXPR_TYPE_DYNAMIC_UNIFORM,	"dynamically_uniform",	"Indexing by dynamically uniform expression"	}
+	};
+
+	static const struct
+	{
+		glu::ShaderType	type;
+		const char*		name;
+	} shaderTypes[] =
+	{
+		{ glu::SHADERTYPE_VERTEX,		"vertex"	},
+		{ glu::SHADERTYPE_FRAGMENT,		"fragment"	},
+		{ glu::SHADERTYPE_COMPUTE,		"compute"	}
+	};
+
+	// .sampler
+	{
+		static const glu::DataType samplerTypes[] =
+		{
+			// \note 1D images will be added by a later extension.
+//			glu::TYPE_SAMPLER_1D,
+			glu::TYPE_SAMPLER_2D,
+			glu::TYPE_SAMPLER_CUBE,
+			glu::TYPE_SAMPLER_2D_ARRAY,
+			glu::TYPE_SAMPLER_3D,
+//			glu::TYPE_SAMPLER_1D_SHADOW,
+			glu::TYPE_SAMPLER_2D_SHADOW,
+			glu::TYPE_SAMPLER_CUBE_SHADOW,
+			glu::TYPE_SAMPLER_2D_ARRAY_SHADOW,
+//			glu::TYPE_INT_SAMPLER_1D,
+			glu::TYPE_INT_SAMPLER_2D,
+			glu::TYPE_INT_SAMPLER_CUBE,
+			glu::TYPE_INT_SAMPLER_2D_ARRAY,
+			glu::TYPE_INT_SAMPLER_3D,
+//			glu::TYPE_UINT_SAMPLER_1D,
+			glu::TYPE_UINT_SAMPLER_2D,
+			glu::TYPE_UINT_SAMPLER_CUBE,
+			glu::TYPE_UINT_SAMPLER_2D_ARRAY,
+			glu::TYPE_UINT_SAMPLER_3D,
+		};
+
+		tcu::TestCaseGroup* const samplerGroup = new tcu::TestCaseGroup(m_testCtx, "sampler", "Sampler Array Indexing Tests");
+		addChild(samplerGroup);
+
+		for (int indexTypeNdx = 0; indexTypeNdx < DE_LENGTH_OF_ARRAY(indexingTypes); indexTypeNdx++)
+		{
+			const IndexExprType			indexExprType	= indexingTypes[indexTypeNdx].type;
+			tcu::TestCaseGroup* const	indexGroup		= new tcu::TestCaseGroup(m_testCtx, indexingTypes[indexTypeNdx].name, indexingTypes[indexTypeNdx].description);
+			samplerGroup->addChild(indexGroup);
+
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(shaderTypes); shaderTypeNdx++)
+			{
+				const glu::ShaderType		shaderType		= shaderTypes[shaderTypeNdx].type;
+				tcu::TestCaseGroup* const	shaderGroup		= new tcu::TestCaseGroup(m_testCtx, shaderTypes[shaderTypeNdx].name, "");
+				indexGroup->addChild(shaderGroup);
+
+				for (int samplerTypeNdx = 0; samplerTypeNdx < DE_LENGTH_OF_ARRAY(samplerTypes); samplerTypeNdx++)
+				{
+					const glu::DataType	samplerType	= samplerTypes[samplerTypeNdx];
+					const char*			samplerName	= getDataTypeName(samplerType);
+					const std::string	caseName	= de::toLower(samplerName);
+
+					shaderGroup->addChild(new SamplerIndexingCase(m_testCtx, caseName.c_str(), "", shaderType, samplerType, indexExprType));
+				}
+			}
+		}
+	}
+
+	// .ubo / .ssbo / .atomic_counter
+	{
+		tcu::TestCaseGroup* const	uboGroup	= new tcu::TestCaseGroup(m_testCtx, "ubo",				"Uniform Block Instance Array Indexing Tests");
+		tcu::TestCaseGroup* const	ssboGroup	= new tcu::TestCaseGroup(m_testCtx, "ssbo",				"Buffer Block Instance Array Indexing Tests");
+		tcu::TestCaseGroup* const	acGroup		= new tcu::TestCaseGroup(m_testCtx, "atomic_counter",	"Atomic Counter Array Indexing Tests");
+		addChild(uboGroup);
+		addChild(ssboGroup);
+		addChild(acGroup);
+
+		for (int indexTypeNdx = 0; indexTypeNdx < DE_LENGTH_OF_ARRAY(indexingTypes); indexTypeNdx++)
+		{
+			const IndexExprType		indexExprType		= indexingTypes[indexTypeNdx].type;
+			const char*				indexExprName		= indexingTypes[indexTypeNdx].name;
+			const char*				indexExprDesc		= indexingTypes[indexTypeNdx].description;
+
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(shaderTypes); shaderTypeNdx++)
+			{
+				const glu::ShaderType	shaderType		= shaderTypes[shaderTypeNdx].type;
+				const std::string		name			= std::string(indexExprName) + "_" + shaderTypes[shaderTypeNdx].name;
+
+				uboGroup->addChild	(new BlockArrayIndexingCase		(m_testCtx, name.c_str(), indexExprDesc, BLOCKTYPE_UNIFORM,	indexExprType, shaderType));
+				acGroup->addChild	(new AtomicCounterIndexingCase	(m_testCtx, name.c_str(), indexExprDesc, indexExprType, shaderType));
+
+				if (indexExprType == INDEX_EXPR_TYPE_CONST_LITERAL || indexExprType == INDEX_EXPR_TYPE_CONST_EXPRESSION)
+					ssboGroup->addChild	(new BlockArrayIndexingCase	(m_testCtx, name.c_str(), indexExprDesc, BLOCKTYPE_BUFFER, indexExprType, shaderType));
+			}
+		}
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createOpaqueTypeIndexingTests (tcu::TestContext& testCtx)
+{
+	return new OpaqueTypeIndexingTests(testCtx);
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.hpp
new file mode 100644
index 0000000..6bc63b7
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktOpaqueTypeIndexingTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTOPAQUETYPEINDEXINGTESTS_HPP
+#define _VKTOPAQUETYPEINDEXINGTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Opaque type (sampler, buffer, atomic counter, ...) indexing tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+tcu::TestCaseGroup*		createOpaqueTypeIndexingTests	(tcu::TestContext& testCtx);
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTOPAQUETYPEINDEXINGTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.cpp
new file mode 100644
index 0000000..8400885
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.cpp
@@ -0,0 +1,5316 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Precision and range tests for builtins and types.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderBuiltinPrecisionTests.hpp"
+#include "vktShaderExecutor.hpp"
+
+#include "deMath.h"
+#include "deMemory.h"
+#include "deDefs.hpp"
+#include "deRandom.hpp"
+#include "deSTLUtil.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+#include "deArrayUtil.hpp"
+
+#include "tcuCommandLine.hpp"
+#include "tcuFloatFormat.hpp"
+#include "tcuInterval.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuVector.hpp"
+#include "tcuMatrix.hpp"
+#include "tcuResultCollector.hpp"
+
+#include "gluContextInfo.hpp"
+#include "gluVarType.hpp"
+#include "gluRenderContext.hpp"
+#include "glwDefs.hpp"
+
+#include <cmath>
+#include <string>
+#include <sstream>
+#include <iostream>
+#include <map>
+#include <utility>
+
+// Uncomment this to get evaluation trace dumps to std::cerr
+// #define GLS_ENABLE_TRACE
+
+// set this to true to dump even passing results
+#define GLS_LOG_ALL_RESULTS false
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+using std::string;
+using std::map;
+using std::ostream;
+using std::ostringstream;
+using std::pair;
+using std::vector;
+using std::set;
+
+using de::MovePtr;
+using de::Random;
+using de::SharedPtr;
+using de::UniquePtr;
+using tcu::Interval;
+using tcu::FloatFormat;
+using tcu::MessageBuilder;
+using tcu::TestLog;
+using tcu::Vector;
+using tcu::Matrix;
+using glu::Precision;
+using glu::VarType;
+using glu::DataType;
+using glu::ShaderType;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Generic singleton creator.
+ *
+ * instance<T>() returns a reference to a unique default-constructed instance
+ * of T. This is mainly used for our GLSL function implementations: each
+ * function is implemented by an object, and each of the objects has a
+ * distinct class. It would be extremely toilsome to maintain a separate
+ * context object that contained individual instances of the function classes,
+ * so we have to resort to global singleton instances.
+ *
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+const T& instance (void)
+{
+	static const T s_instance = T();
+	return s_instance;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Dummy placeholder type for unused template parameters.
+ *
+ * In the precision tests we are dealing with functions of different arities.
+ * To minimize code duplication, we only define templates with the maximum
+ * number of arguments, currently four. If a function's arity is less than the
+ * maximum, Void us used as the type for unused arguments.
+ *
+ * Although Voids are not used at run-time, they still must be compilable, so
+ * they must support all operations that other types do.
+ *
+ *//*--------------------------------------------------------------------*/
+struct Void
+{
+	typedef	Void		Element;
+	enum
+	{
+		SIZE = 0,
+	};
+
+	template <typename T>
+	explicit			Void			(const T&)		{}
+						Void			(void)			{}
+						operator double	(void)	const	{ return TCU_NAN; }
+
+	// These are used to make Voids usable as containers in container-generic code.
+	Void&				operator[]		(int)			{ return *this; }
+	const Void&			operator[]		(int)	const	{ return *this; }
+};
+
+ostream& operator<< (ostream& os, Void) { return os << "()"; }
+
+//! Returns true for all other types except Void
+template <typename T>	bool isTypeValid		(void)	{ return true;	}
+template <>				bool isTypeValid<Void>	(void)	{ return false;	}
+
+//! Utility function for getting the name of a data type.
+//! This is used in vector and matrix constructors.
+template <typename T>
+const char* dataTypeNameOf (void)
+{
+	return glu::getDataTypeName(glu::dataTypeOf<T>());
+}
+
+template <>
+const char* dataTypeNameOf<Void> (void)
+{
+	DE_FATAL("Impossible");
+	return DE_NULL;
+}
+
+//! A hack to get Void support for VarType.
+template <typename T>
+VarType getVarTypeOf (Precision prec = glu::PRECISION_LAST)
+{
+	return glu::varTypeOf<T>(prec);
+}
+
+template <>
+VarType getVarTypeOf<Void> (Precision)
+{
+	DE_FATAL("Impossible");
+	return VarType();
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Type traits for generalized interval types.
+ *
+ * We are trying to compute sets of acceptable values not only for
+ * float-valued expressions but also for compound values: vectors and
+ * matrices. We approximate a set of vectors as a vector of intervals and
+ * likewise for matrices.
+ *
+ * We now need generalized operations for each type and its interval
+ * approximation. These are given in the type Traits<T>.
+ *
+ * The type Traits<T>::IVal is the approximation of T: it is `Interval` for
+ * scalar types, and a vector or matrix of intervals for container types.
+ *
+ * To allow template inference to take place, there are function wrappers for
+ * the actual operations in Traits<T>. Hence we can just use:
+ *
+ * makeIVal(someFloat)
+ *
+ * instead of:
+ *
+ * Traits<float>::doMakeIVal(value)
+ *
+ *//*--------------------------------------------------------------------*/
+
+template <typename T> struct Traits;
+
+//! Create container from elementwise singleton values.
+template <typename T>
+typename Traits<T>::IVal makeIVal (const T& value)
+{
+	return Traits<T>::doMakeIVal(value);
+}
+
+//! Elementwise union of intervals.
+template <typename T>
+typename Traits<T>::IVal unionIVal (const typename Traits<T>::IVal& a,
+									const typename Traits<T>::IVal& b)
+{
+	return Traits<T>::doUnion(a, b);
+}
+
+//! Returns true iff every element of `ival` contains the corresponding element of `value`.
+template <typename T>
+bool contains (const typename Traits<T>::IVal& ival, const T& value)
+{
+	return Traits<T>::doContains(ival, value);
+}
+
+//! Print out an interval with the precision of `fmt`.
+template <typename T>
+void printIVal (const FloatFormat& fmt, const typename Traits<T>::IVal& ival, ostream& os)
+{
+	Traits<T>::doPrintIVal(fmt, ival, os);
+}
+
+template <typename T>
+string intervalToString (const FloatFormat& fmt, const typename Traits<T>::IVal& ival)
+{
+	ostringstream oss;
+	printIVal<T>(fmt, ival, oss);
+	return oss.str();
+}
+
+//! Print out a value with the precision of `fmt`.
+template <typename T>
+void printValue (const FloatFormat& fmt, const T& value, ostream& os)
+{
+	Traits<T>::doPrintValue(fmt, value, os);
+}
+
+template <typename T>
+string valueToString (const FloatFormat& fmt, const T& val)
+{
+	ostringstream oss;
+	printValue(fmt, val, oss);
+	return oss.str();
+}
+
+//! Approximate `value` elementwise to the float precision defined in `fmt`.
+//! The resulting interval might not be a singleton if rounding in both
+//! directions is allowed.
+template <typename T>
+typename Traits<T>::IVal round (const FloatFormat& fmt, const T& value)
+{
+	return Traits<T>::doRound(fmt, value);
+}
+
+template <typename T>
+typename Traits<T>::IVal convert (const FloatFormat&				fmt,
+								  const typename Traits<T>::IVal&	value)
+{
+	return Traits<T>::doConvert(fmt, value);
+}
+
+//! Common traits for scalar types.
+template <typename T>
+struct ScalarTraits
+{
+	typedef				Interval		IVal;
+
+	static Interval		doMakeIVal		(const T& value)
+	{
+		// Thankfully all scalar types have a well-defined conversion to `double`,
+		// hence Interval can represent their ranges without problems.
+		return Interval(double(value));
+	}
+
+	static Interval		doUnion			(const Interval& a, const Interval& b)
+	{
+		return a | b;
+	}
+
+	static bool			doContains		(const Interval& a, T value)
+	{
+		return a.contains(double(value));
+	}
+
+	static Interval		doConvert		(const FloatFormat& fmt, const IVal& ival)
+	{
+		return fmt.convert(ival);
+	}
+
+	static Interval		doRound			(const FloatFormat& fmt, T value)
+	{
+		return fmt.roundOut(double(value), false);
+	}
+};
+
+template<>
+struct Traits<float> : ScalarTraits<float>
+{
+	static void			doPrintIVal		(const FloatFormat&	fmt,
+										 const Interval&	ival,
+										 ostream&			os)
+	{
+		os << fmt.intervalToHex(ival);
+	}
+
+	static void			doPrintValue	(const FloatFormat&	fmt,
+										 const float&		value,
+										 ostream&			os)
+	{
+		os << fmt.floatToHex(value);
+	}
+};
+
+template<>
+struct Traits<bool> : ScalarTraits<bool>
+{
+	static void			doPrintValue	(const FloatFormat&,
+										 const float&		value,
+										 ostream&			os)
+	{
+		os << (value != 0.0f ? "true" : "false");
+	}
+
+	static void			doPrintIVal		(const FloatFormat&,
+										 const Interval&	ival,
+										 ostream&			os)
+	{
+		os << "{";
+		if (ival.contains(false))
+			os << "false";
+		if (ival.contains(false) && ival.contains(true))
+			os << ", ";
+		if (ival.contains(true))
+			os << "true";
+		os << "}";
+	}
+};
+
+template<>
+struct Traits<int> : ScalarTraits<int>
+{
+	static void			doPrintValue	(const FloatFormat&,
+										 const int&			value,
+										 ostream&			os)
+	{
+		os << value;
+	}
+
+	static void			doPrintIVal		(const FloatFormat&,
+										 const Interval&	ival,
+										 ostream&			os)
+	{
+		os << "[" << int(ival.lo()) << ", " << int(ival.hi()) << "]";
+	}
+};
+
+//! Common traits for containers, i.e. vectors and matrices.
+//! T is the container type itself, I is the same type with interval elements.
+template <typename T, typename I>
+struct ContainerTraits
+{
+	typedef typename	T::Element		Element;
+	typedef				I				IVal;
+
+	static IVal			doMakeIVal		(const T& value)
+	{
+		IVal ret;
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+			ret[ndx] = makeIVal(value[ndx]);
+
+		return ret;
+	}
+
+	static IVal			doUnion			(const IVal& a, const IVal& b)
+	{
+		IVal ret;
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+			ret[ndx] = unionIVal<Element>(a[ndx], b[ndx]);
+
+		return ret;
+	}
+
+	static bool			doContains		(const IVal& ival, const T& value)
+	{
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+			if (!contains(ival[ndx], value[ndx]))
+				return false;
+
+		return true;
+	}
+
+	static void			doPrintIVal		(const FloatFormat& fmt, const IVal ival, ostream& os)
+	{
+		os << "(";
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+		{
+			if (ndx > 0)
+				os << ", ";
+
+			printIVal<Element>(fmt, ival[ndx], os);
+		}
+
+		os << ")";
+	}
+
+	static void			doPrintValue	(const FloatFormat& fmt, const T& value, ostream& os)
+	{
+		os << dataTypeNameOf<T>() << "(";
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+		{
+			if (ndx > 0)
+				os << ", ";
+
+			printValue<Element>(fmt, value[ndx], os);
+		}
+
+		os << ")";
+	}
+
+	static IVal			doConvert		(const FloatFormat& fmt, const IVal& value)
+	{
+		IVal ret;
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+			ret[ndx] = convert<Element>(fmt, value[ndx]);
+
+		return ret;
+	}
+
+	static IVal			doRound			(const FloatFormat& fmt, T value)
+	{
+		IVal ret;
+
+		for (int ndx = 0; ndx < T::SIZE; ++ndx)
+			ret[ndx] = round(fmt, value[ndx]);
+
+		return ret;
+	}
+};
+
+template <typename T, int Size>
+struct Traits<Vector<T, Size> > :
+	ContainerTraits<Vector<T, Size>, Vector<typename Traits<T>::IVal, Size> >
+{
+};
+
+template <typename T, int Rows, int Cols>
+struct Traits<Matrix<T, Rows, Cols> > :
+	ContainerTraits<Matrix<T, Rows, Cols>, Matrix<typename Traits<T>::IVal, Rows, Cols> >
+{
+};
+
+//! Void traits. These are just dummies, but technically valid: a Void is a
+//! unit type with a single possible value.
+template<>
+struct Traits<Void>
+{
+	typedef		Void			IVal;
+
+	static Void	doMakeIVal		(const Void& value)						{ return value; }
+	static Void	doUnion			(const Void&, const Void&)				{ return Void(); }
+	static bool	doContains		(const Void&, Void)						{ return true; }
+	static Void	doRound			(const FloatFormat&, const Void& value)	{ return value; }
+	static Void	doConvert		(const FloatFormat&, const Void& value)	{ return value; }
+
+	static void	doPrintValue	(const FloatFormat&, const Void&, ostream& os)
+	{
+		os << "()";
+	}
+
+	static void	doPrintIVal		(const FloatFormat&, const Void&, ostream& os)
+	{
+		os << "()";
+	}
+};
+
+//! This is needed for container-generic operations.
+//! We want a scalar type T to be its own "one-element vector".
+template <typename T, int Size> struct ContainerOf	{ typedef Vector<T, Size>	Container; };
+
+template <typename T>			struct ContainerOf<T, 1>		{ typedef T		Container; };
+template <int Size>				struct ContainerOf<Void, Size>	{ typedef Void	Container; };
+
+// This is a kludge that is only needed to get the ExprP::operator[] syntactic sugar to work.
+template <typename T>	struct ElementOf		{ typedef	typename T::Element	Element; };
+template <>				struct ElementOf<float>	{ typedef	void				Element; };
+template <>				struct ElementOf<bool>	{ typedef	void				Element; };
+template <>				struct ElementOf<int>	{ typedef	void				Element; };
+
+/*--------------------------------------------------------------------*//*!
+ *
+ * \name Abstract syntax for expressions and statements.
+ *
+ * We represent GLSL programs as syntax objects: an Expr<T> represents an
+ * expression whose GLSL type corresponds to the C++ type T, and a Statement
+ * represents a statement.
+ *
+ * To ease memory management, we use shared pointers to refer to expressions
+ * and statements. ExprP<T> is a shared pointer to an Expr<T>, and StatementP
+ * is a shared pointer to a Statement.
+ *
+ * \{
+ *
+ *//*--------------------------------------------------------------------*/
+
+class ExprBase;
+class ExpandContext;
+class Statement;
+class StatementP;
+class FuncBase;
+template <typename T> class ExprP;
+template <typename T> class Variable;
+template <typename T> class VariableP;
+template <typename T> class DefaultSampling;
+
+typedef set<const FuncBase*> FuncSet;
+
+template <typename T>
+VariableP<T>	variable			(const string& name);
+StatementP		compoundStatement	(const vector<StatementP>& statements);
+
+/*--------------------------------------------------------------------*//*!
+ * \brief A variable environment.
+ *
+ * An Environment object maintains the mapping between variables of the
+ * abstract syntax tree and their values.
+ *
+ * \todo [2014-03-28 lauri] At least run-time type safety.
+ *
+ *//*--------------------------------------------------------------------*/
+class Environment
+{
+public:
+	template<typename T>
+	void						bind	(const Variable<T>&					variable,
+										 const typename Traits<T>::IVal&	value)
+	{
+		deUint8* const data = new deUint8[sizeof(value)];
+
+		deMemcpy(data, &value, sizeof(value));
+		de::insert(m_map, variable.getName(), SharedPtr<deUint8>(data, de::ArrayDeleter<deUint8>()));
+	}
+
+	template<typename T>
+	typename Traits<T>::IVal&	lookup	(const Variable<T>& variable) const
+	{
+		deUint8* const data = de::lookup(m_map, variable.getName()).get();
+
+		return *reinterpret_cast<typename Traits<T>::IVal*>(data);
+	}
+
+private:
+	map<string, SharedPtr<deUint8> >	m_map;
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Evaluation context.
+ *
+ * The evaluation context contains everything that separates one execution of
+ * an expression from the next. Currently this means the desired floating
+ * point precision and the current variable environment.
+ *
+ *//*--------------------------------------------------------------------*/
+struct EvalContext
+{
+	EvalContext (const FloatFormat&	format_,
+				 Precision			floatPrecision_,
+				 Environment&		env_,
+				 int				callDepth_ = 0)
+		: format			(format_)
+		, floatPrecision	(floatPrecision_)
+		, env				(env_)
+		, callDepth			(callDepth_) {}
+
+	FloatFormat		format;
+	Precision		floatPrecision;
+	Environment&	env;
+	int				callDepth;
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Simple incremental counter.
+ *
+ * This is used to make sure that different ExpandContexts will not produce
+ * overlapping temporary names.
+ *
+ *//*--------------------------------------------------------------------*/
+class Counter
+{
+public:
+			Counter		(int count = 0) : m_count(count) {}
+	int		operator()	(void) { return m_count++; }
+
+private:
+	int		m_count;
+};
+
+class ExpandContext
+{
+public:
+						ExpandContext	(Counter& symCounter) : m_symCounter(symCounter) {}
+						ExpandContext	(const ExpandContext& parent)
+							: m_symCounter(parent.m_symCounter) {}
+
+	template<typename T>
+	VariableP<T>		genSym			(const string& baseName)
+	{
+		return variable<T>(baseName + de::toString(m_symCounter()));
+	}
+
+	void				addStatement	(const StatementP& stmt)
+	{
+		m_statements.push_back(stmt);
+	}
+
+	vector<StatementP>	getStatements	(void) const
+	{
+		return m_statements;
+	}
+private:
+	Counter&			m_symCounter;
+	vector<StatementP>	m_statements;
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief A statement or declaration.
+ *
+ * Statements have no values. Instead, they are executed for their side
+ * effects only: the execute() method should modify at least one variable in
+ * the environment.
+ *
+ * As a bit of a kludge, a Statement object can also represent a declaration:
+ * when it is evaluated, it can add a variable binding to the environment
+ * instead of modifying a current one.
+ *
+ *//*--------------------------------------------------------------------*/
+class Statement
+{
+public:
+	virtual			~Statement		(void)							{								 }
+	//! Execute the statement, modifying the environment of `ctx`
+	void			execute			(EvalContext&	ctx)	const	{ this->doExecute(ctx);			 }
+	void			print			(ostream&		os)		const	{ this->doPrint(os);			 }
+	//! Add the functions used in this statement to `dst`.
+	void			getUsedFuncs	(FuncSet& dst)			const	{ this->doGetUsedFuncs(dst);	 }
+
+protected:
+	virtual void	doPrint			(ostream& os)			const	= 0;
+	virtual void	doExecute		(EvalContext& ctx)		const	= 0;
+	virtual void	doGetUsedFuncs	(FuncSet& dst)			const	= 0;
+};
+
+ostream& operator<<(ostream& os, const Statement& stmt)
+{
+	stmt.print(os);
+	return os;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Smart pointer for statements (and declarations)
+ *
+ *//*--------------------------------------------------------------------*/
+class StatementP : public SharedPtr<const Statement>
+{
+public:
+	typedef		SharedPtr<const Statement>	Super;
+
+				StatementP			(void) {}
+	explicit	StatementP			(const Statement* ptr)	: Super(ptr) {}
+				StatementP			(const Super& ptr)		: Super(ptr) {}
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief
+ *
+ * A statement that modifies a variable or a declaration that binds a variable.
+ *
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+class VariableStatement : public Statement
+{
+public:
+					VariableStatement	(const VariableP<T>& variable, const ExprP<T>& value,
+										 bool isDeclaration)
+						: m_variable		(variable)
+						, m_value			(value)
+						, m_isDeclaration	(isDeclaration) {}
+
+protected:
+	void			doPrint				(ostream& os)							const
+	{
+		if (m_isDeclaration)
+			os << glu::declare(getVarTypeOf<T>(), m_variable->getName());
+		else
+			os << m_variable->getName();
+
+		os << " = " << *m_value << ";\n";
+	}
+
+	void			doExecute			(EvalContext& ctx)						const
+	{
+		if (m_isDeclaration)
+			ctx.env.bind(*m_variable, m_value->evaluate(ctx));
+		else
+			ctx.env.lookup(*m_variable) = m_value->evaluate(ctx);
+	}
+
+	void			doGetUsedFuncs		(FuncSet& dst)							const
+	{
+		m_value->getUsedFuncs(dst);
+	}
+
+	VariableP<T>	m_variable;
+	ExprP<T>		m_value;
+	bool			m_isDeclaration;
+};
+
+template <typename T>
+StatementP variableStatement (const VariableP<T>&	variable,
+							  const ExprP<T>&		value,
+							  bool					isDeclaration)
+{
+	return StatementP(new VariableStatement<T>(variable, value, isDeclaration));
+}
+
+template <typename T>
+StatementP variableDeclaration (const VariableP<T>& variable, const ExprP<T>& definiens)
+{
+	return variableStatement(variable, definiens, true);
+}
+
+template <typename T>
+StatementP variableAssignment (const VariableP<T>& variable, const ExprP<T>& value)
+{
+	return variableStatement(variable, value, false);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief A compound statement, i.e. a block.
+ *
+ * A compound statement is executed by executing its constituent statements in
+ * sequence.
+ *
+ *//*--------------------------------------------------------------------*/
+class CompoundStatement : public Statement
+{
+public:
+						CompoundStatement	(const vector<StatementP>& statements)
+							: m_statements	(statements) {}
+
+protected:
+	void				doPrint				(ostream&		os)						const
+	{
+		os << "{\n";
+
+		for (size_t ndx = 0; ndx < m_statements.size(); ++ndx)
+			os << *m_statements[ndx];
+
+		os << "}\n";
+	}
+
+	void				doExecute			(EvalContext&	ctx)					const
+	{
+		for (size_t ndx = 0; ndx < m_statements.size(); ++ndx)
+			m_statements[ndx]->execute(ctx);
+	}
+
+	void				doGetUsedFuncs		(FuncSet& dst)							const
+	{
+		for (size_t ndx = 0; ndx < m_statements.size(); ++ndx)
+			m_statements[ndx]->getUsedFuncs(dst);
+	}
+
+	vector<StatementP>	m_statements;
+};
+
+StatementP compoundStatement(const vector<StatementP>& statements)
+{
+	return StatementP(new CompoundStatement(statements));
+}
+
+//! Common base class for all expressions regardless of their type.
+class ExprBase
+{
+public:
+	virtual				~ExprBase		(void)									{}
+	void				printExpr		(ostream& os) const { this->doPrintExpr(os); }
+
+	//! Output the functions that this expression refers to
+	void				getUsedFuncs	(FuncSet& dst) const
+	{
+		this->doGetUsedFuncs(dst);
+	}
+
+protected:
+	virtual void		doPrintExpr		(ostream&)	const	{}
+	virtual void		doGetUsedFuncs	(FuncSet&)	const	{}
+};
+
+//! Type-specific operations for an expression representing type T.
+template <typename T>
+class Expr : public ExprBase
+{
+public:
+	typedef				T				Val;
+	typedef typename	Traits<T>::IVal	IVal;
+
+	IVal				evaluate		(const EvalContext&	ctx) const;
+
+protected:
+	virtual IVal		doEvaluate		(const EvalContext&	ctx) const = 0;
+};
+
+//! Evaluate an expression with the given context, optionally tracing the calls to stderr.
+template <typename T>
+typename Traits<T>::IVal Expr<T>::evaluate (const EvalContext& ctx) const
+{
+#ifdef GLS_ENABLE_TRACE
+	static const FloatFormat	highpFmt	(-126, 127, 23, true,
+											 tcu::MAYBE,
+											 tcu::YES,
+											 tcu::MAYBE);
+	EvalContext					newCtx		(ctx.format, ctx.floatPrecision,
+											 ctx.env, ctx.callDepth + 1);
+	const IVal					ret			= this->doEvaluate(newCtx);
+
+	if (isTypeValid<T>())
+	{
+		std::cerr << string(ctx.callDepth, ' ');
+		this->printExpr(std::cerr);
+		std::cerr << " -> " << intervalToString<T>(highpFmt, ret) << std::endl;
+	}
+	return ret;
+#else
+	return this->doEvaluate(ctx);
+#endif
+}
+
+template <typename T>
+class ExprPBase : public SharedPtr<const Expr<T> >
+{
+public:
+};
+
+ostream& operator<< (ostream& os, const ExprBase& expr)
+{
+	expr.printExpr(os);
+	return os;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Shared pointer to an expression of a container type.
+ *
+ * Container types (i.e. vectors and matrices) support the subscription
+ * operator. This class provides a bit of syntactic sugar to allow us to use
+ * the C++ subscription operator to create a subscription expression.
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+class ContainerExprPBase : public ExprPBase<T>
+{
+public:
+	ExprP<typename T::Element>	operator[]	(int i) const;
+};
+
+template <typename T>
+class ExprP : public ExprPBase<T> {};
+
+// We treat Voids as containers since the dummy parameters in generalized
+// vector functions are represented as Voids.
+template <>
+class ExprP<Void> : public ContainerExprPBase<Void> {};
+
+template <typename T, int Size>
+class ExprP<Vector<T, Size> > : public ContainerExprPBase<Vector<T, Size> > {};
+
+template <typename T, int Rows, int Cols>
+class ExprP<Matrix<T, Rows, Cols> > : public ContainerExprPBase<Matrix<T, Rows, Cols> > {};
+
+template <typename T> ExprP<T> exprP (void)
+{
+	return ExprP<T>();
+}
+
+template <typename T>
+ExprP<T> exprP (const SharedPtr<const Expr<T> >& ptr)
+{
+	ExprP<T> ret;
+	static_cast<SharedPtr<const Expr<T> >&>(ret) = ptr;
+	return ret;
+}
+
+template <typename T>
+ExprP<T> exprP (const Expr<T>* ptr)
+{
+	return exprP(SharedPtr<const Expr<T> >(ptr));
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief A shared pointer to a variable expression.
+ *
+ * This is just a narrowing of ExprP for the operations that require a variable
+ * instead of an arbitrary expression.
+ *
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+class VariableP : public SharedPtr<const Variable<T> >
+{
+public:
+	typedef		SharedPtr<const Variable<T> >	Super;
+	explicit	VariableP	(const Variable<T>* ptr) : Super(ptr) {}
+				VariableP	(void) {}
+				VariableP	(const Super& ptr) : Super(ptr) {}
+
+	operator	ExprP<T>	(void) const { return exprP(SharedPtr<const Expr<T> >(*this)); }
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \name Syntactic sugar operators for expressions.
+ *
+ * @{
+ *
+ * These operators allow the use of C++ syntax to construct GLSL expressions
+ * containing operators: e.g. "a+b" creates an addition expression with
+ * operands a and b, and so on.
+ *
+ *//*--------------------------------------------------------------------*/
+ExprP<float>						operator-(const ExprP<float>&						arg0);
+ExprP<float>						operator+(const ExprP<float>&						arg0,
+											  const ExprP<float>&						arg1);
+ExprP<float>						operator-(const ExprP<float>&						arg0,
+											  const ExprP<float>&						arg1);
+ExprP<float>						operator*(const ExprP<float>&						arg0,
+											  const ExprP<float>&						arg1);
+ExprP<float>						operator/(const ExprP<float>&						arg0,
+											  const ExprP<float>&						arg1);
+template<int Size>
+ExprP<Vector<float, Size> >			operator-(const ExprP<Vector<float, Size> >&		arg0);
+template<int Size>
+ExprP<Vector<float, Size> >			operator*(const ExprP<Vector<float, Size> >&		arg0,
+											  const ExprP<float>&						arg1);
+template<int Size>
+ExprP<Vector<float, Size> >			operator*(const ExprP<Vector<float, Size> >&		arg0,
+											  const ExprP<Vector<float, Size> >&		arg1);
+template<int Size>
+ExprP<Vector<float, Size> >			operator-(const ExprP<Vector<float, Size> >&		arg0,
+											  const ExprP<Vector<float, Size> >&		arg1);
+template<int Left, int Mid, int Right>
+ExprP<Matrix<float, Left, Right> >	operator* (const ExprP<Matrix<float, Left, Mid> >&	left,
+											   const ExprP<Matrix<float, Mid, Right> >&	right);
+template<int Rows, int Cols>
+ExprP<Vector<float, Rows> >			operator* (const ExprP<Vector<float, Cols> >&		left,
+											   const ExprP<Matrix<float, Rows, Cols> >&	right);
+template<int Rows, int Cols>
+ExprP<Vector<float, Cols> >			operator* (const ExprP<Matrix<float, Rows, Cols> >&	left,
+											   const ExprP<Vector<float, Rows> >&		right);
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> >	operator* (const ExprP<Matrix<float, Rows, Cols> >&	left,
+											   const ExprP<float>&						right);
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> >	operator+ (const ExprP<Matrix<float, Rows, Cols> >&	left,
+											   const ExprP<Matrix<float, Rows, Cols> >&	right);
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> >	operator- (const ExprP<Matrix<float, Rows, Cols> >&	mat);
+
+//! @}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Variable expression.
+ *
+ * A variable is evaluated by looking up its range of possible values from an
+ * environment.
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+class Variable : public Expr<T>
+{
+public:
+	typedef typename Expr<T>::IVal IVal;
+
+					Variable	(const string& name) : m_name (name) {}
+	string			getName		(void)							const { return m_name; }
+
+protected:
+	void			doPrintExpr	(ostream& os)					const { os << m_name; }
+	IVal			doEvaluate	(const EvalContext& ctx)		const
+	{
+		return ctx.env.lookup<T>(*this);
+	}
+
+private:
+	string	m_name;
+};
+
+template <typename T>
+VariableP<T> variable (const string& name)
+{
+	return VariableP<T>(new Variable<T>(name));
+}
+
+template <typename T>
+VariableP<T> bindExpression (const string& name, ExpandContext& ctx, const ExprP<T>& expr)
+{
+	VariableP<T> var = ctx.genSym<T>(name);
+	ctx.addStatement(variableDeclaration(var, expr));
+	return var;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Constant expression.
+ *
+ * A constant is evaluated by rounding it to a set of possible values allowed
+ * by the current floating point precision.
+ *//*--------------------------------------------------------------------*/
+template <typename T>
+class Constant : public Expr<T>
+{
+public:
+	typedef typename Expr<T>::IVal IVal;
+
+			Constant		(const T& value) : m_value(value) {}
+
+protected:
+	void	doPrintExpr		(ostream& os) const			{ os << m_value; }
+	IVal	doEvaluate		(const EvalContext&) const	{ return makeIVal(m_value); }
+
+private:
+	T		m_value;
+};
+
+template <typename T>
+ExprP<T> constant (const T& value)
+{
+	return exprP(new Constant<T>(value));
+}
+
+//! Return a reference to a singleton void constant.
+const ExprP<Void>& voidP (void)
+{
+	static const ExprP<Void> singleton = constant(Void());
+
+	return singleton;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Four-element tuple.
+ *
+ * This is used for various things where we need one thing for each possible
+ * function parameter. Currently the maximum supported number of parameters is
+ * four.
+ *//*--------------------------------------------------------------------*/
+template <typename T0 = Void, typename T1 = Void, typename T2 = Void, typename T3 = Void>
+struct Tuple4
+{
+	explicit Tuple4 (const T0& e0 = T0(),
+					 const T1& e1 = T1(),
+					 const T2& e2 = T2(),
+					 const T3& e3 = T3())
+		: a	(e0)
+		, b	(e1)
+		, c	(e2)
+		, d	(e3)
+	{
+	}
+
+	T0 a;
+	T1 b;
+	T2 c;
+	T3 d;
+};
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Function signature.
+ *
+ * This is a purely compile-time structure used to bundle all types in a
+ * function signature together. This makes passing the signature around in
+ * templates easier, since we only need to take and pass a single Sig instead
+ * of a bunch of parameter types and a return type.
+ *
+ *//*--------------------------------------------------------------------*/
+template <typename R,
+		  typename P0 = Void, typename P1 = Void,
+		  typename P2 = Void, typename P3 = Void>
+struct Signature
+{
+	typedef R							Ret;
+	typedef P0							Arg0;
+	typedef P1							Arg1;
+	typedef P2							Arg2;
+	typedef P3							Arg3;
+	typedef typename Traits<Ret>::IVal	IRet;
+	typedef typename Traits<Arg0>::IVal	IArg0;
+	typedef typename Traits<Arg1>::IVal	IArg1;
+	typedef typename Traits<Arg2>::IVal	IArg2;
+	typedef typename Traits<Arg3>::IVal	IArg3;
+
+	typedef Tuple4<	const Arg0&,	const Arg1&,	const Arg2&,	const Arg3&>	Args;
+	typedef Tuple4<	const IArg0&,	const IArg1&,	const IArg2&,	const IArg3&>	IArgs;
+	typedef Tuple4<	ExprP<Arg0>,	ExprP<Arg1>,	ExprP<Arg2>,	ExprP<Arg3> >	ArgExprs;
+};
+
+typedef vector<const ExprBase*> BaseArgExprs;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Type-independent operations for function objects.
+ *
+ *//*--------------------------------------------------------------------*/
+class FuncBase
+{
+public:
+	virtual			~FuncBase				(void)					{}
+	virtual string	getName					(void)					const = 0;
+	//! Name of extension that this function requires, or empty.
+	virtual string	getRequiredExtension	(void)					const { return ""; }
+	virtual void	print					(ostream&,
+											 const BaseArgExprs&)	const = 0;
+	//! Index of output parameter, or -1 if none of the parameters is output.
+	virtual int		getOutParamIndex		(void)					const { return -1; }
+
+	void			printDefinition			(ostream& os)			const
+	{
+		doPrintDefinition(os);
+	}
+
+	void				getUsedFuncs		(FuncSet& dst) const
+	{
+		this->doGetUsedFuncs(dst);
+	}
+
+protected:
+	virtual void	doPrintDefinition		(ostream& os)			const = 0;
+	virtual void	doGetUsedFuncs			(FuncSet& dst)			const = 0;
+};
+
+typedef Tuple4<string, string, string, string> ParamNames;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Function objects.
+ *
+ * Each Func object represents a GLSL function. It can be applied to interval
+ * arguments, and it returns the an interval that is a conservative
+ * approximation of the image of the GLSL function over the argument
+ * intervals. That is, it is given a set of possible arguments and it returns
+ * the set of possible values.
+ *
+ *//*--------------------------------------------------------------------*/
+template <typename Sig_>
+class Func : public FuncBase
+{
+public:
+	typedef Sig_										Sig;
+	typedef typename Sig::Ret							Ret;
+	typedef typename Sig::Arg0							Arg0;
+	typedef typename Sig::Arg1							Arg1;
+	typedef typename Sig::Arg2							Arg2;
+	typedef typename Sig::Arg3							Arg3;
+	typedef typename Sig::IRet							IRet;
+	typedef typename Sig::IArg0							IArg0;
+	typedef typename Sig::IArg1							IArg1;
+	typedef typename Sig::IArg2							IArg2;
+	typedef typename Sig::IArg3							IArg3;
+	typedef typename Sig::Args							Args;
+	typedef typename Sig::IArgs							IArgs;
+	typedef typename Sig::ArgExprs						ArgExprs;
+
+	void				print			(ostream&			os,
+										 const BaseArgExprs& args)				const
+	{
+		this->doPrint(os, args);
+	}
+
+	IRet				apply			(const EvalContext&	ctx,
+										 const IArg0&		arg0 = IArg0(),
+										 const IArg1&		arg1 = IArg1(),
+										 const IArg2&		arg2 = IArg2(),
+										 const IArg3&		arg3 = IArg3())		const
+	{
+		return this->applyArgs(ctx, IArgs(arg0, arg1, arg2, arg3));
+	}
+	IRet				applyArgs		(const EvalContext&	ctx,
+										 const IArgs&		args)				const
+	{
+		return this->doApply(ctx, args);
+	}
+	ExprP<Ret>			operator()		(const ExprP<Arg0>&		arg0 = voidP(),
+										 const ExprP<Arg1>&		arg1 = voidP(),
+										 const ExprP<Arg2>&		arg2 = voidP(),
+										 const ExprP<Arg3>&		arg3 = voidP())		const;
+
+	const ParamNames&	getParamNames	(void)									const
+	{
+		return this->doGetParamNames();
+	}
+
+protected:
+	virtual IRet		doApply			(const EvalContext&,
+										 const IArgs&)							const = 0;
+	virtual void		doPrint			(ostream& os, const BaseArgExprs& args)	const
+	{
+		os << getName() << "(";
+
+		if (isTypeValid<Arg0>())
+			os << *args[0];
+
+		if (isTypeValid<Arg1>())
+			os << ", " << *args[1];
+
+		if (isTypeValid<Arg2>())
+			os << ", " << *args[2];
+
+		if (isTypeValid<Arg3>())
+			os << ", " << *args[3];
+
+		os << ")";
+	}
+
+	virtual const ParamNames&	doGetParamNames	(void)							const
+	{
+		static ParamNames	names	("a", "b", "c", "d");
+		return names;
+	}
+};
+
+template <typename Sig>
+class Apply : public Expr<typename Sig::Ret>
+{
+public:
+	typedef typename Sig::Ret				Ret;
+	typedef typename Sig::Arg0				Arg0;
+	typedef typename Sig::Arg1				Arg1;
+	typedef typename Sig::Arg2				Arg2;
+	typedef typename Sig::Arg3				Arg3;
+	typedef typename Expr<Ret>::Val			Val;
+	typedef typename Expr<Ret>::IVal		IVal;
+	typedef Func<Sig>						ApplyFunc;
+	typedef typename ApplyFunc::ArgExprs	ArgExprs;
+
+						Apply	(const ApplyFunc&		func,
+								 const ExprP<Arg0>&		arg0 = voidP(),
+								 const ExprP<Arg1>&		arg1 = voidP(),
+								 const ExprP<Arg2>&		arg2 = voidP(),
+								 const ExprP<Arg3>&		arg3 = voidP())
+							: m_func	(func),
+							  m_args	(arg0, arg1, arg2, arg3) {}
+
+						Apply	(const ApplyFunc&	func,
+								 const ArgExprs&	args)
+							: m_func	(func),
+							  m_args	(args) {}
+protected:
+	void				doPrintExpr			(ostream& os) const
+	{
+		BaseArgExprs	args;
+		args.push_back(m_args.a.get());
+		args.push_back(m_args.b.get());
+		args.push_back(m_args.c.get());
+		args.push_back(m_args.d.get());
+		m_func.print(os, args);
+	}
+
+	IVal				doEvaluate		(const EvalContext& ctx) const
+	{
+		return m_func.apply(ctx,
+							m_args.a->evaluate(ctx), m_args.b->evaluate(ctx),
+							m_args.c->evaluate(ctx), m_args.d->evaluate(ctx));
+	}
+
+	void				doGetUsedFuncs	(FuncSet& dst) const
+	{
+		m_func.getUsedFuncs(dst);
+		m_args.a->getUsedFuncs(dst);
+		m_args.b->getUsedFuncs(dst);
+		m_args.c->getUsedFuncs(dst);
+		m_args.d->getUsedFuncs(dst);
+	}
+
+	const ApplyFunc&	m_func;
+	ArgExprs			m_args;
+};
+
+template<typename T>
+class Alternatives : public Func<Signature<T, T, T> >
+{
+public:
+	typedef typename	Alternatives::Sig		Sig;
+
+protected:
+	typedef typename	Alternatives::IRet		IRet;
+	typedef typename	Alternatives::IArgs		IArgs;
+
+	virtual string		getName				(void) const			{ return "alternatives"; }
+	virtual void		doPrintDefinition	(std::ostream&) const	{}
+	void				doGetUsedFuncs		(FuncSet&) const		{}
+
+	virtual IRet		doApply				(const EvalContext&, const IArgs& args) const
+	{
+		return unionIVal<T>(args.a, args.b);
+	}
+
+	virtual void		doPrint				(ostream& os, const BaseArgExprs& args)	const
+	{
+		os << "{" << *args[0] << " | " << *args[1] << "}";
+	}
+};
+
+template <typename Sig>
+ExprP<typename Sig::Ret> createApply (const Func<Sig>&						func,
+									  const typename Func<Sig>::ArgExprs&	args)
+{
+	return exprP(new Apply<Sig>(func, args));
+}
+
+template <typename Sig>
+ExprP<typename Sig::Ret> createApply (
+	const Func<Sig>&			func,
+	const ExprP<typename Sig::Arg0>&	arg0 = voidP(),
+	const ExprP<typename Sig::Arg1>&	arg1 = voidP(),
+	const ExprP<typename Sig::Arg2>&	arg2 = voidP(),
+	const ExprP<typename Sig::Arg3>&	arg3 = voidP())
+{
+	return exprP(new Apply<Sig>(func, arg0, arg1, arg2, arg3));
+}
+
+template <typename Sig>
+ExprP<typename Sig::Ret> Func<Sig>::operator() (const ExprP<typename Sig::Arg0>& arg0,
+												const ExprP<typename Sig::Arg1>& arg1,
+												const ExprP<typename Sig::Arg2>& arg2,
+												const ExprP<typename Sig::Arg3>& arg3) const
+{
+	return createApply(*this, arg0, arg1, arg2, arg3);
+}
+
+template <typename F>
+ExprP<typename F::Ret> app (const ExprP<typename F::Arg0>& arg0 = voidP(),
+							const ExprP<typename F::Arg1>& arg1 = voidP(),
+							const ExprP<typename F::Arg2>& arg2 = voidP(),
+							const ExprP<typename F::Arg3>& arg3 = voidP())
+{
+	return createApply(instance<F>(), arg0, arg1, arg2, arg3);
+}
+
+template <typename F>
+typename F::IRet call (const EvalContext&			ctx,
+					   const typename F::IArg0&		arg0 = Void(),
+					   const typename F::IArg1&		arg1 = Void(),
+					   const typename F::IArg2&		arg2 = Void(),
+					   const typename F::IArg3&		arg3 = Void())
+{
+	return instance<F>().apply(ctx, arg0, arg1, arg2, arg3);
+}
+
+template <typename T>
+ExprP<T> alternatives (const ExprP<T>& arg0,
+					   const ExprP<T>& arg1)
+{
+	return createApply<typename Alternatives<T>::Sig>(instance<Alternatives<T> >(), arg0, arg1);
+}
+
+template <typename Sig>
+class ApplyVar : public Apply<Sig>
+{
+public:
+	typedef typename Sig::Ret				Ret;
+	typedef typename Sig::Arg0				Arg0;
+	typedef typename Sig::Arg1				Arg1;
+	typedef typename Sig::Arg2				Arg2;
+	typedef typename Sig::Arg3				Arg3;
+	typedef typename Expr<Ret>::Val			Val;
+	typedef typename Expr<Ret>::IVal		IVal;
+	typedef Func<Sig>						ApplyFunc;
+	typedef typename ApplyFunc::ArgExprs	ArgExprs;
+
+						ApplyVar	(const ApplyFunc&			func,
+									 const VariableP<Arg0>&		arg0,
+									 const VariableP<Arg1>&		arg1,
+									 const VariableP<Arg2>&		arg2,
+									 const VariableP<Arg3>&		arg3)
+							: Apply<Sig> (func, arg0, arg1, arg2, arg3) {}
+protected:
+	IVal				doEvaluate		(const EvalContext& ctx) const
+	{
+		const Variable<Arg0>&	var0 = static_cast<const Variable<Arg0>&>(*this->m_args.a);
+		const Variable<Arg1>&	var1 = static_cast<const Variable<Arg1>&>(*this->m_args.b);
+		const Variable<Arg2>&	var2 = static_cast<const Variable<Arg2>&>(*this->m_args.c);
+		const Variable<Arg3>&	var3 = static_cast<const Variable<Arg3>&>(*this->m_args.d);
+		return this->m_func.apply(ctx,
+								  ctx.env.lookup(var0), ctx.env.lookup(var1),
+								  ctx.env.lookup(var2), ctx.env.lookup(var3));
+	}
+};
+
+template <typename Sig>
+ExprP<typename Sig::Ret> applyVar (const Func<Sig>&						func,
+								   const VariableP<typename Sig::Arg0>&	arg0,
+								   const VariableP<typename Sig::Arg1>&	arg1,
+								   const VariableP<typename Sig::Arg2>&	arg2,
+								   const VariableP<typename Sig::Arg3>&	arg3)
+{
+	return exprP(new ApplyVar<Sig>(func, arg0, arg1, arg2, arg3));
+}
+
+template <typename Sig_>
+class DerivedFunc : public Func<Sig_>
+{
+public:
+	typedef typename DerivedFunc::ArgExprs		ArgExprs;
+	typedef typename DerivedFunc::IRet			IRet;
+	typedef typename DerivedFunc::IArgs			IArgs;
+	typedef typename DerivedFunc::Ret			Ret;
+	typedef typename DerivedFunc::Arg0			Arg0;
+	typedef typename DerivedFunc::Arg1			Arg1;
+	typedef typename DerivedFunc::Arg2			Arg2;
+	typedef typename DerivedFunc::Arg3			Arg3;
+	typedef typename DerivedFunc::IArg0			IArg0;
+	typedef typename DerivedFunc::IArg1			IArg1;
+	typedef typename DerivedFunc::IArg2			IArg2;
+	typedef typename DerivedFunc::IArg3			IArg3;
+
+protected:
+	void						doPrintDefinition	(ostream& os) const
+	{
+		const ParamNames&	paramNames	= this->getParamNames();
+
+		initialize();
+
+		os << dataTypeNameOf<Ret>() << " " << this->getName()
+			<< "(";
+		if (isTypeValid<Arg0>())
+			os << dataTypeNameOf<Arg0>() << " " << paramNames.a;
+		if (isTypeValid<Arg1>())
+			os << ", " << dataTypeNameOf<Arg1>() << " " << paramNames.b;
+		if (isTypeValid<Arg2>())
+			os << ", " << dataTypeNameOf<Arg2>() << " " << paramNames.c;
+		if (isTypeValid<Arg3>())
+			os << ", " << dataTypeNameOf<Arg3>() << " " << paramNames.d;
+		os << ")\n{\n";
+
+		for (size_t ndx = 0; ndx < m_body.size(); ++ndx)
+			os << *m_body[ndx];
+		os << "return " << *m_ret << ";\n";
+		os << "}\n";
+	}
+
+	IRet						doApply			(const EvalContext&	ctx,
+												 const IArgs&		args) const
+	{
+		Environment	funEnv;
+		IArgs&		mutArgs		= const_cast<IArgs&>(args);
+		IRet		ret;
+
+		initialize();
+
+		funEnv.bind(*m_var0, args.a);
+		funEnv.bind(*m_var1, args.b);
+		funEnv.bind(*m_var2, args.c);
+		funEnv.bind(*m_var3, args.d);
+
+		{
+			EvalContext	funCtx(ctx.format, ctx.floatPrecision, funEnv, ctx.callDepth);
+
+			for (size_t ndx = 0; ndx < m_body.size(); ++ndx)
+				m_body[ndx]->execute(funCtx);
+
+			ret = m_ret->evaluate(funCtx);
+		}
+
+		// \todo [lauri] Store references instead of values in environment
+		const_cast<IArg0&>(mutArgs.a) = funEnv.lookup(*m_var0);
+		const_cast<IArg1&>(mutArgs.b) = funEnv.lookup(*m_var1);
+		const_cast<IArg2&>(mutArgs.c) = funEnv.lookup(*m_var2);
+		const_cast<IArg3&>(mutArgs.d) = funEnv.lookup(*m_var3);
+
+		return ret;
+	}
+
+	void						doGetUsedFuncs	(FuncSet& dst) const
+	{
+		initialize();
+		if (dst.insert(this).second)
+		{
+			for (size_t ndx = 0; ndx < m_body.size(); ++ndx)
+				m_body[ndx]->getUsedFuncs(dst);
+			m_ret->getUsedFuncs(dst);
+		}
+	}
+
+	virtual ExprP<Ret>			doExpand		(ExpandContext& ctx, const ArgExprs& args_) const = 0;
+
+	// These are transparently initialized when first needed. They cannot be
+	// initialized in the constructor because they depend on the doExpand
+	// method of the subclass.
+
+	mutable VariableP<Arg0>		m_var0;
+	mutable VariableP<Arg1>		m_var1;
+	mutable VariableP<Arg2>		m_var2;
+	mutable VariableP<Arg3>		m_var3;
+	mutable vector<StatementP>	m_body;
+	mutable ExprP<Ret>			m_ret;
+
+private:
+
+	void				initialize		(void)	const
+	{
+		if (!m_ret)
+		{
+			const ParamNames&	paramNames	= this->getParamNames();
+			Counter				symCounter;
+			ExpandContext		ctx			(symCounter);
+			ArgExprs			args;
+
+			args.a	= m_var0 = variable<Arg0>(paramNames.a);
+			args.b	= m_var1 = variable<Arg1>(paramNames.b);
+			args.c	= m_var2 = variable<Arg2>(paramNames.c);
+			args.d	= m_var3 = variable<Arg3>(paramNames.d);
+
+			m_ret	= this->doExpand(ctx, args);
+			m_body	= ctx.getStatements();
+		}
+	}
+};
+
+template <typename Sig>
+class PrimitiveFunc : public Func<Sig>
+{
+public:
+	typedef typename PrimitiveFunc::Ret			Ret;
+	typedef typename PrimitiveFunc::ArgExprs	ArgExprs;
+
+protected:
+	void	doPrintDefinition	(ostream&) const	{}
+	void	doGetUsedFuncs		(FuncSet&) const	{}
+};
+
+template <typename T>
+class Cond : public PrimitiveFunc<Signature<T, bool, T, T> >
+{
+public:
+	typedef typename Cond::IArgs	IArgs;
+	typedef typename Cond::IRet		IRet;
+
+	string	getName	(void) const
+	{
+		return "_cond";
+	}
+
+protected:
+
+	void	doPrint	(ostream& os, const BaseArgExprs& args) const
+	{
+		os << "(" << *args[0] << " ? " << *args[1] << " : " << *args[2] << ")";
+	}
+
+	IRet	doApply	(const EvalContext&, const IArgs& iargs)const
+	{
+		IRet	ret;
+
+		if (iargs.a.contains(true))
+			ret = unionIVal<T>(ret, iargs.b);
+
+		if (iargs.a.contains(false))
+			ret = unionIVal<T>(ret, iargs.c);
+
+		return ret;
+	}
+};
+
+template <typename T>
+class CompareOperator : public PrimitiveFunc<Signature<bool, T, T> >
+{
+public:
+	typedef typename CompareOperator::IArgs	IArgs;
+	typedef typename CompareOperator::IArg0	IArg0;
+	typedef typename CompareOperator::IArg1	IArg1;
+	typedef typename CompareOperator::IRet	IRet;
+
+protected:
+	void			doPrint	(ostream& os, const BaseArgExprs& args) const
+	{
+		os << "(" << *args[0] << getSymbol() << *args[1] << ")";
+	}
+
+	Interval		doApply	(const EvalContext&, const IArgs& iargs) const
+	{
+		const IArg0&	arg0 = iargs.a;
+		const IArg1&	arg1 = iargs.b;
+		IRet	ret;
+
+		if (canSucceed(arg0, arg1))
+			ret |= true;
+		if (canFail(arg0, arg1))
+			ret |= false;
+
+		return ret;
+	}
+
+	virtual string	getSymbol	(void) const = 0;
+	virtual bool	canSucceed	(const IArg0&, const IArg1&) const = 0;
+	virtual bool	canFail		(const IArg0&, const IArg1&) const = 0;
+};
+
+template <typename T>
+class LessThan : public CompareOperator<T>
+{
+public:
+	string	getName		(void) const									{ return "lessThan"; }
+
+protected:
+	string	getSymbol	(void) const									{ return "<";		}
+
+	bool	canSucceed	(const Interval& a, const Interval& b) const
+	{
+		return (a.lo() < b.hi());
+	}
+
+	bool	canFail		(const Interval& a, const Interval& b) const
+	{
+		return !(a.hi() < b.lo());
+	}
+};
+
+template <typename T>
+ExprP<bool> operator< (const ExprP<T>& a, const ExprP<T>& b)
+{
+	return app<LessThan<T> >(a, b);
+}
+
+template <typename T>
+ExprP<T> cond (const ExprP<bool>&	test,
+			   const ExprP<T>&		consequent,
+			   const ExprP<T>&		alternative)
+{
+	return app<Cond<T> >(test, consequent, alternative);
+}
+
+/*--------------------------------------------------------------------*//*!
+ *
+ * @}
+ *
+ *//*--------------------------------------------------------------------*/
+
+class FloatFunc1 : public PrimitiveFunc<Signature<float, float> >
+{
+protected:
+	Interval			doApply			(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		return this->applyMonotone(ctx, iargs.a);
+	}
+
+	Interval			applyMonotone	(const EvalContext& ctx, const Interval& iarg0) const
+	{
+		Interval ret;
+
+		TCU_INTERVAL_APPLY_MONOTONE1(ret, arg0, iarg0, val,
+									 TCU_SET_INTERVAL(val, point,
+													  point = this->applyPoint(ctx, arg0)));
+
+		ret |= innerExtrema(ctx, iarg0);
+		ret &= (this->getCodomain() | TCU_NAN);
+
+		return ctx.format.convert(ret);
+	}
+
+	virtual Interval	innerExtrema	(const EvalContext&, const Interval&) const
+	{
+		return Interval(); // empty interval, i.e. no extrema
+	}
+
+	virtual Interval	applyPoint		(const EvalContext& ctx, double arg0) const
+	{
+		const double	exact	= this->applyExact(arg0);
+		const double	prec	= this->precision(ctx, exact, arg0);
+
+		return exact + Interval(-prec, prec);
+	}
+
+	virtual double		applyExact		(double) const
+	{
+		TCU_THROW(InternalError, "Cannot apply");
+	}
+
+	virtual Interval	getCodomain		(void) const
+	{
+		return Interval::unbounded(true);
+	}
+
+	virtual double		precision		(const EvalContext& ctx, double, double) const = 0;
+};
+
+class CFloatFunc1 : public FloatFunc1
+{
+public:
+						CFloatFunc1	(const string& name, tcu::DoubleFunc1& func)
+							: m_name(name), m_func(func) {}
+
+	string				getName		(void) const		{ return m_name; }
+
+protected:
+	double				applyExact	(double x) const	{ return m_func(x); }
+
+	const string		m_name;
+	tcu::DoubleFunc1&	m_func;
+};
+
+class FloatFunc2 : public PrimitiveFunc<Signature<float, float, float> >
+{
+protected:
+	Interval			doApply			(const EvalContext&	ctx, const IArgs& iargs) const
+	{
+		return this->applyMonotone(ctx, iargs.a, iargs.b);
+	}
+
+	Interval			applyMonotone	(const EvalContext&	ctx,
+										 const Interval&	xi,
+										 const Interval&	yi) const
+	{
+		Interval reti;
+
+		TCU_INTERVAL_APPLY_MONOTONE2(reti, x, xi, y, yi, ret,
+									 TCU_SET_INTERVAL(ret, point,
+													  point = this->applyPoint(ctx, x, y)));
+		reti |= innerExtrema(ctx, xi, yi);
+		reti &= (this->getCodomain() | TCU_NAN);
+
+		return ctx.format.convert(reti);
+	}
+
+	virtual Interval	innerExtrema	(const EvalContext&,
+										 const Interval&,
+										 const Interval&) const
+	{
+		return Interval(); // empty interval, i.e. no extrema
+	}
+
+	virtual Interval	applyPoint		(const EvalContext&	ctx,
+										 double				x,
+										 double				y) const
+	{
+		const double exact	= this->applyExact(x, y);
+		const double prec	= this->precision(ctx, exact, x, y);
+
+		return exact + Interval(-prec, prec);
+	}
+
+	virtual double		applyExact		(double, double) const
+	{
+		TCU_THROW(InternalError, "Cannot apply");
+	}
+
+	virtual Interval	getCodomain		(void) const
+	{
+		return Interval::unbounded(true);
+	}
+
+	virtual double		precision		(const EvalContext&	ctx,
+										 double				ret,
+										 double				x,
+										 double				y) const = 0;
+};
+
+class CFloatFunc2 : public FloatFunc2
+{
+public:
+						CFloatFunc2	(const string&		name,
+									 tcu::DoubleFunc2&	func)
+							: m_name(name)
+							, m_func(func)
+	{
+	}
+
+	string				getName		(void) const						{ return m_name; }
+
+protected:
+	double				applyExact	(double x, double y) const			{ return m_func(x, y); }
+
+	const string		m_name;
+	tcu::DoubleFunc2&	m_func;
+};
+
+class InfixOperator : public FloatFunc2
+{
+protected:
+	virtual string	getSymbol		(void) const = 0;
+
+	void			doPrint			(ostream& os, const BaseArgExprs& args) const
+	{
+		os << "(" << *args[0] << " " << getSymbol() << " " << *args[1] << ")";
+	}
+
+	Interval		applyPoint		(const EvalContext&	ctx,
+									 double				x,
+									 double				y) const
+	{
+		const double exact	= this->applyExact(x, y);
+
+		// Allow either representable number on both sides of the exact value,
+		// but require exactly representable values to be preserved.
+		return ctx.format.roundOut(exact, !deIsInf(x) && !deIsInf(y));
+	}
+
+	double			precision		(const EvalContext&, double, double, double) const
+	{
+		return 0.0;
+	}
+};
+
+class FloatFunc3 : public PrimitiveFunc<Signature<float, float, float, float> >
+{
+protected:
+	Interval			doApply			(const EvalContext&	ctx, const IArgs& iargs) const
+	{
+		return this->applyMonotone(ctx, iargs.a, iargs.b, iargs.c);
+	}
+
+	Interval			applyMonotone	(const EvalContext&	ctx,
+										 const Interval&	xi,
+										 const Interval&	yi,
+										 const Interval&	zi) const
+	{
+		Interval reti;
+		TCU_INTERVAL_APPLY_MONOTONE3(reti, x, xi, y, yi, z, zi, ret,
+									 TCU_SET_INTERVAL(ret, point,
+													  point = this->applyPoint(ctx, x, y, z)));
+		return ctx.format.convert(reti);
+	}
+
+	virtual Interval	applyPoint		(const EvalContext&	ctx,
+										 double				x,
+										 double				y,
+										 double				z) const
+	{
+		const double exact	= this->applyExact(x, y, z);
+		const double prec	= this->precision(ctx, exact, x, y, z);
+		return exact + Interval(-prec, prec);
+	}
+
+	virtual double		applyExact		(double, double, double) const
+	{
+		TCU_THROW(InternalError, "Cannot apply");
+	}
+
+	virtual double		precision		(const EvalContext&	ctx,
+										 double				result,
+										 double				x,
+										 double				y,
+										 double				z) const = 0;
+};
+
+// We define syntactic sugar functions for expression constructors. Since
+// these have the same names as ordinary mathematical operations (sin, log
+// etc.), it's better to give them a dedicated namespace.
+namespace Functions
+{
+
+using namespace tcu;
+
+class Add : public InfixOperator
+{
+public:
+	string		getName		(void) const						{ return "add"; }
+	string		getSymbol	(void) const						{ return "+"; }
+
+	Interval	doApply		(const EvalContext&	ctx,
+							 const IArgs&		iargs) const
+	{
+		// Fast-path for common case
+		if (iargs.a.isOrdinary() && iargs.b.isOrdinary())
+		{
+			Interval ret;
+			TCU_SET_INTERVAL_BOUNDS(ret, sum,
+									sum = iargs.a.lo() + iargs.b.lo(),
+									sum = iargs.a.hi() + iargs.b.hi());
+			return ctx.format.convert(ctx.format.roundOut(ret, true));
+		}
+		return this->applyMonotone(ctx, iargs.a, iargs.b);
+	}
+
+protected:
+	double		applyExact	(double x, double y) const			{ return x + y; }
+};
+
+class Mul : public InfixOperator
+{
+public:
+	string		getName		(void) const									{ return "mul"; }
+	string		getSymbol	(void) const									{ return "*"; }
+
+	Interval	doApply		(const EvalContext&	ctx, const IArgs& iargs) const
+	{
+		Interval a = iargs.a;
+		Interval b = iargs.b;
+
+		// Fast-path for common case
+		if (a.isOrdinary() && b.isOrdinary())
+		{
+			Interval ret;
+			if (a.hi() < 0)
+			{
+				a = -a;
+				b = -b;
+			}
+			if (a.lo() >= 0 && b.lo() >= 0)
+			{
+				TCU_SET_INTERVAL_BOUNDS(ret, prod,
+										prod = iargs.a.lo() * iargs.b.lo(),
+										prod = iargs.a.hi() * iargs.b.hi());
+				return ctx.format.convert(ctx.format.roundOut(ret, true));
+			}
+			if (a.lo() >= 0 && b.hi() <= 0)
+			{
+				TCU_SET_INTERVAL_BOUNDS(ret, prod,
+										prod = iargs.a.hi() * iargs.b.lo(),
+										prod = iargs.a.lo() * iargs.b.hi());
+				return ctx.format.convert(ctx.format.roundOut(ret, true));
+			}
+		}
+		return this->applyMonotone(ctx, iargs.a, iargs.b);
+	}
+
+protected:
+	double		applyExact	(double x, double y) const						{ return x * y; }
+
+	Interval	innerExtrema(const EvalContext&, const Interval& xi, const Interval& yi) const
+	{
+		if (((xi.contains(-TCU_INFINITY) || xi.contains(TCU_INFINITY)) && yi.contains(0.0)) ||
+			((yi.contains(-TCU_INFINITY) || yi.contains(TCU_INFINITY)) && xi.contains(0.0)))
+			return Interval(TCU_NAN);
+
+		return Interval();
+	}
+};
+
+class Sub : public InfixOperator
+{
+public:
+	string		getName		(void) const				{ return "sub"; }
+	string		getSymbol	(void) const				{ return "-"; }
+
+	Interval	doApply		(const EvalContext&	ctx, const IArgs& iargs) const
+	{
+		// Fast-path for common case
+		if (iargs.a.isOrdinary() && iargs.b.isOrdinary())
+		{
+			Interval ret;
+
+			TCU_SET_INTERVAL_BOUNDS(ret, diff,
+									diff = iargs.a.lo() - iargs.b.hi(),
+									diff = iargs.a.hi() - iargs.b.lo());
+			return ctx.format.convert(ctx.format.roundOut(ret, true));
+
+		}
+		else
+		{
+			return this->applyMonotone(ctx, iargs.a, iargs.b);
+		}
+	}
+
+protected:
+	double		applyExact	(double x, double y) const	{ return x - y; }
+};
+
+class Negate : public FloatFunc1
+{
+public:
+	string	getName		(void) const									{ return "_negate"; }
+	void	doPrint		(ostream& os, const BaseArgExprs& args) const	{ os << "-" << *args[0]; }
+
+protected:
+	double	precision	(const EvalContext&, double, double) const		{ return 0.0; }
+	double	applyExact	(double x) const								{ return -x; }
+};
+
+class Div : public InfixOperator
+{
+public:
+	string		getName			(void) const						{ return "div"; }
+
+protected:
+	string		getSymbol		(void) const						{ return "/"; }
+
+	Interval	innerExtrema	(const EvalContext&,
+								 const Interval&		nom,
+								 const Interval&		den) const
+	{
+		Interval ret;
+
+		if (den.contains(0.0))
+		{
+			if (nom.contains(0.0))
+				ret |= TCU_NAN;
+
+			if (nom.lo() < 0.0 || nom.hi() > 0.0)
+				ret |= Interval::unbounded();
+		}
+
+		return ret;
+	}
+
+	double		applyExact		(double x, double y) const { return x / y; }
+
+	Interval	applyPoint		(const EvalContext&	ctx, double x, double y) const
+	{
+		Interval ret = FloatFunc2::applyPoint(ctx, x, y);
+
+		if (!deIsInf(x) && !deIsInf(y) && y != 0.0)
+		{
+			const Interval dst = ctx.format.convert(ret);
+			if (dst.contains(-TCU_INFINITY)) ret |= -ctx.format.getMaxValue();
+			if (dst.contains(+TCU_INFINITY)) ret |= +ctx.format.getMaxValue();
+		}
+
+		return ret;
+	}
+
+	double		precision		(const EvalContext& ctx, double ret, double, double den) const
+	{
+		const FloatFormat&	fmt		= ctx.format;
+
+		// \todo [2014-03-05 lauri] Check that the limits in GLSL 3.10 are actually correct.
+		// For now, we assume that division's precision is 2.5 ULP when the value is within
+		// [2^MINEXP, 2^MAXEXP-1]
+
+		if (den == 0.0)
+			return 0.0; // Result must be exactly inf
+		else if (de::inBounds(deAbs(den),
+							  deLdExp(1.0, fmt.getMinExp()),
+							  deLdExp(1.0, fmt.getMaxExp() - 1)))
+			return fmt.ulp(ret, 2.5);
+		else
+			return TCU_INFINITY; // Can be any number, but must be a number.
+	}
+};
+
+class InverseSqrt : public FloatFunc1
+{
+public:
+	string		getName		(void) const							{ return "inversesqrt"; }
+
+protected:
+	double		applyExact	(double x) const						{ return 1.0 / deSqrt(x); }
+
+	double		precision	(const EvalContext& ctx, double ret, double x) const
+	{
+		return x <= 0 ? TCU_NAN : ctx.format.ulp(ret, 2.0);
+	}
+
+	Interval	getCodomain	(void) const
+	{
+		return Interval(0.0, TCU_INFINITY);
+	}
+};
+
+class ExpFunc : public CFloatFunc1
+{
+public:
+				ExpFunc		(const string& name, DoubleFunc1& func)
+					: CFloatFunc1(name, func) {}
+protected:
+	double		precision	(const EvalContext& ctx, double ret, double x) const
+	{
+		switch (ctx.floatPrecision)
+		{
+			case glu::PRECISION_HIGHP:
+				return ctx.format.ulp(ret, 3.0 + 2.0 * deAbs(x));
+			case glu::PRECISION_MEDIUMP:
+				return ctx.format.ulp(ret, 2.0 + 2.0 * deAbs(x));
+			case glu::PRECISION_LOWP:
+				return ctx.format.ulp(ret, 2.0);
+			default:
+				DE_FATAL("Impossible");
+		}
+		return 0;
+	}
+
+	Interval	getCodomain	(void) const
+	{
+		return Interval(0.0, TCU_INFINITY);
+	}
+};
+
+class Exp2	: public ExpFunc	{ public: Exp2 (void)	: ExpFunc("exp2", deExp2) {} };
+class Exp	: public ExpFunc	{ public: Exp (void)	: ExpFunc("exp", deExp) {} };
+
+ExprP<float> exp2	(const ExprP<float>& x)	{ return app<Exp2>(x); }
+ExprP<float> exp	(const ExprP<float>& x)	{ return app<Exp>(x); }
+
+class LogFunc : public CFloatFunc1
+{
+public:
+				LogFunc		(const string& name, DoubleFunc1& func)
+					: CFloatFunc1(name, func) {}
+
+protected:
+	double		precision	(const EvalContext& ctx, double ret, double x) const
+	{
+		if (x <= 0)
+			return TCU_NAN;
+
+		switch (ctx.floatPrecision)
+		{
+			case glu::PRECISION_HIGHP:
+				return (0.5 <= x && x <= 2.0) ? deLdExp(1.0, -21) : ctx.format.ulp(ret, 3.0);
+			case glu::PRECISION_MEDIUMP:
+				return (0.5 <= x && x <= 2.0) ? deLdExp(1.0, -7) : ctx.format.ulp(ret, 2.0);
+			case glu::PRECISION_LOWP:
+				return ctx.format.ulp(ret, 2.0);
+			default:
+				DE_FATAL("Impossible");
+		}
+
+		return 0;
+	}
+};
+
+class Log2	: public LogFunc		{ public: Log2	(void) : LogFunc("log2", deLog2) {} };
+class Log	: public LogFunc		{ public: Log	(void) : LogFunc("log", deLog) {} };
+
+ExprP<float> log2	(const ExprP<float>& x)	{ return app<Log2>(x); }
+ExprP<float> log	(const ExprP<float>& x)	{ return app<Log>(x); }
+
+#define DEFINE_CONSTRUCTOR1(CLASS, TRET, NAME, T0) \
+ExprP<TRET> NAME (const ExprP<T0>& arg0) { return app<CLASS>(arg0); }
+
+#define DEFINE_DERIVED1(CLASS, TRET, NAME, T0, ARG0, EXPANSION)			\
+class CLASS : public DerivedFunc<Signature<TRET, T0> >					\
+{																		\
+public:																	\
+	string			getName		(void) const		{ return #NAME; }	\
+																		\
+protected:																\
+	ExprP<TRET>		doExpand		(ExpandContext&,					\
+									 const CLASS::ArgExprs& args_) const \
+	{																	\
+		const ExprP<float>& ARG0 = args_.a;								\
+		return EXPANSION;												\
+	}																	\
+};																		\
+DEFINE_CONSTRUCTOR1(CLASS, TRET, NAME, T0)
+
+#define DEFINE_DERIVED_FLOAT1(CLASS, NAME, ARG0, EXPANSION) \
+	DEFINE_DERIVED1(CLASS, float, NAME, float, ARG0, EXPANSION)
+
+#define DEFINE_CONSTRUCTOR2(CLASS, TRET, NAME, T0, T1)				\
+ExprP<TRET> NAME (const ExprP<T0>& arg0, const ExprP<T1>& arg1)		\
+{																	\
+	return app<CLASS>(arg0, arg1);									\
+}
+
+#define DEFINE_DERIVED2(CLASS, TRET, NAME, T0, Arg0, T1, Arg1, EXPANSION) \
+class CLASS : public DerivedFunc<Signature<TRET, T0, T1> >				\
+{																		\
+public:																	\
+	string			getName		(void) const		{ return #NAME; }	\
+																		\
+protected:																\
+	ExprP<TRET>		doExpand	(ExpandContext&, const ArgExprs& args_) const \
+	{																	\
+		const ExprP<T0>& Arg0 = args_.a;								\
+		const ExprP<T1>& Arg1 = args_.b;								\
+		return EXPANSION;												\
+	}																	\
+};																		\
+DEFINE_CONSTRUCTOR2(CLASS, TRET, NAME, T0, T1)
+
+#define DEFINE_DERIVED_FLOAT2(CLASS, NAME, Arg0, Arg1, EXPANSION)		\
+	DEFINE_DERIVED2(CLASS, float, NAME, float, Arg0, float, Arg1, EXPANSION)
+
+#define DEFINE_CONSTRUCTOR3(CLASS, TRET, NAME, T0, T1, T2)				\
+ExprP<TRET> NAME (const ExprP<T0>& arg0, const ExprP<T1>& arg1, const ExprP<T2>& arg2) \
+{																		\
+	return app<CLASS>(arg0, arg1, arg2);								\
+}
+
+#define DEFINE_DERIVED3(CLASS, TRET, NAME, T0, ARG0, T1, ARG1, T2, ARG2, EXPANSION) \
+class CLASS : public DerivedFunc<Signature<TRET, T0, T1, T2> >					\
+{																				\
+public:																			\
+	string			getName		(void) const	{ return #NAME; }				\
+																				\
+protected:																		\
+	ExprP<TRET>		doExpand	(ExpandContext&, const ArgExprs& args_) const	\
+	{																			\
+		const ExprP<T0>& ARG0 = args_.a;										\
+		const ExprP<T1>& ARG1 = args_.b;										\
+		const ExprP<T2>& ARG2 = args_.c;										\
+		return EXPANSION;														\
+	}																			\
+};																				\
+DEFINE_CONSTRUCTOR3(CLASS, TRET, NAME, T0, T1, T2)
+
+#define DEFINE_DERIVED_FLOAT3(CLASS, NAME, ARG0, ARG1, ARG2, EXPANSION)			\
+	DEFINE_DERIVED3(CLASS, float, NAME, float, ARG0, float, ARG1, float, ARG2, EXPANSION)
+
+#define DEFINE_CONSTRUCTOR4(CLASS, TRET, NAME, T0, T1, T2, T3)			\
+ExprP<TRET> NAME (const ExprP<T0>& arg0, const ExprP<T1>& arg1,			\
+				  const ExprP<T2>& arg2, const ExprP<T3>& arg3)			\
+{																		\
+	return app<CLASS>(arg0, arg1, arg2, arg3);							\
+}
+
+DEFINE_DERIVED_FLOAT1(Sqrt,		sqrt,		x,		constant(1.0f) / app<InverseSqrt>(x));
+DEFINE_DERIVED_FLOAT2(Pow,		pow,		x,	y,	exp2(y * log2(x)));
+DEFINE_DERIVED_FLOAT1(Radians,	radians,	d,		(constant(DE_PI) / constant(180.0f)) * d);
+DEFINE_DERIVED_FLOAT1(Degrees,	degrees,	r,		(constant(180.0f) / constant(DE_PI)) * r);
+
+class TrigFunc : public CFloatFunc1
+{
+public:
+					TrigFunc		(const string&		name,
+									 DoubleFunc1&		func,
+									 const Interval&	loEx,
+									 const Interval&	hiEx)
+						: CFloatFunc1	(name, func)
+						, m_loExtremum	(loEx)
+						, m_hiExtremum	(hiEx) {}
+
+protected:
+	Interval		innerExtrema	(const EvalContext&, const Interval& angle) const
+	{
+		const double		lo		= angle.lo();
+		const double		hi		= angle.hi();
+		const int			loSlope	= doGetSlope(lo);
+		const int			hiSlope	= doGetSlope(hi);
+
+		// Detect the high and low values the function can take between the
+		// interval endpoints.
+		if (angle.length() >= 2.0 * DE_PI_DOUBLE)
+		{
+			// The interval is longer than a full cycle, so it must get all possible values.
+			return m_hiExtremum | m_loExtremum;
+		}
+		else if (loSlope == 1 && hiSlope == -1)
+		{
+			// The slope can change from positive to negative only at the maximum value.
+			return m_hiExtremum;
+		}
+		else if (loSlope == -1 && hiSlope == 1)
+		{
+			// The slope can change from negative to positive only at the maximum value.
+			return m_loExtremum;
+		}
+		else if (loSlope == hiSlope &&
+				 deIntSign(applyExact(hi) - applyExact(lo)) * loSlope == -1)
+		{
+			// The slope has changed twice between the endpoints, so both extrema are included.
+			return m_hiExtremum | m_loExtremum;
+		}
+
+		return Interval();
+	}
+
+	Interval	getCodomain			(void) const
+	{
+		// Ensure that result is always within [-1, 1], or NaN (for +-inf)
+		return Interval(-1.0, 1.0) | TCU_NAN;
+	}
+
+	double		precision			(const EvalContext& ctx, double ret, double arg) const
+	{
+		if (ctx.floatPrecision == glu::PRECISION_HIGHP)
+		{
+			// Use precision from OpenCL fast relaxed math
+			if (-DE_PI_DOUBLE <= arg && arg <= DE_PI_DOUBLE)
+			{
+				return deLdExp(1.0, -11);
+			}
+			else
+			{
+				// "larger otherwise", let's pick |x| * 2^-12 , which is slightly over
+				// 2^-11 at x == pi.
+				return deLdExp(deAbs(arg), -12);
+			}
+		}
+		else if (ctx.floatPrecision == glu::PRECISION_MEDIUMP)
+		{
+			if (-DE_PI_DOUBLE <= arg && arg <= DE_PI_DOUBLE)
+			{
+				// from OpenCL half-float extension specification
+				return ctx.format.ulp(ret, 2.0);
+			}
+			else
+			{
+				// |x| * 2^-10, slightly larger than 2 ULP at x == pi
+				return deLdExp(deAbs(arg), -10);
+			}
+		}
+		else
+		{
+			DE_ASSERT(ctx.floatPrecision == glu::PRECISION_LOWP);
+
+			// from OpenCL half-float extension specification
+			return ctx.format.ulp(ret, 2.0);
+		}
+	}
+
+	virtual int		doGetSlope		(double angle) const = 0;
+
+	Interval		m_loExtremum;
+	Interval		m_hiExtremum;
+};
+
+class Sin : public TrigFunc
+{
+public:
+				Sin			(void) : TrigFunc("sin", deSin, -1.0, 1.0) {}
+
+protected:
+	int			doGetSlope	(double angle) const { return deIntSign(deCos(angle)); }
+};
+
+ExprP<float> sin (const ExprP<float>& x) { return app<Sin>(x); }
+
+class Cos : public TrigFunc
+{
+public:
+				Cos			(void) : TrigFunc("cos", deCos, -1.0, 1.0) {}
+
+protected:
+	int			doGetSlope	(double angle) const { return -deIntSign(deSin(angle)); }
+};
+
+ExprP<float> cos (const ExprP<float>& x) { return app<Cos>(x); }
+
+DEFINE_DERIVED_FLOAT1(Tan, tan, x, sin(x) * (constant(1.0f) / cos(x)));
+
+class ASin : public CFloatFunc1
+{
+public:
+					ASin		(void) : CFloatFunc1("asin", deAsin) {}
+
+protected:
+	double			precision	(const EvalContext& ctx, double, double x) const
+	{
+		if (!de::inBounds(x, -1.0, 1.0))
+			return TCU_NAN;
+
+		if (ctx.floatPrecision == glu::PRECISION_HIGHP)
+		{
+			// Absolute error of 2^-11
+			return deLdExp(1.0, -11);
+		}
+		else
+		{
+			// Absolute error of 2^-8
+			return deLdExp(1.0, -8);
+		}
+
+	}
+};
+
+class ArcTrigFunc : public CFloatFunc1
+{
+public:
+					ArcTrigFunc	(const string&		name,
+								 DoubleFunc1&		func,
+								 double				precisionULPs,
+								 const Interval&	domain,
+								 const Interval&	codomain)
+						: CFloatFunc1		(name, func)
+						, m_precision		(precisionULPs)
+						, m_domain			(domain)
+						, m_codomain		(codomain) {}
+
+protected:
+	double			precision	(const EvalContext& ctx, double ret, double x) const
+	{
+		if (!m_domain.contains(x))
+			return TCU_NAN;
+
+		if (ctx.floatPrecision == glu::PRECISION_HIGHP)
+		{
+			// Use OpenCL's fast relaxed math precision
+			return ctx.format.ulp(ret, m_precision);
+		}
+		else
+		{
+			// Use OpenCL half-float spec
+			return ctx.format.ulp(ret, 2.0);
+		}
+	}
+
+	// We could implement getCodomain with m_codomain, but choose not to,
+	// because it seems too strict with trascendental constants like pi.
+
+	const double	m_precision;
+	const Interval	m_domain;
+	const Interval	m_codomain;
+};
+
+class ACos : public ArcTrigFunc
+{
+public:
+	ACos (void) : ArcTrigFunc("acos", deAcos, 4096.0,
+							  Interval(-1.0, 1.0),
+							  Interval(0.0, DE_PI_DOUBLE)) {}
+};
+
+class ATan : public ArcTrigFunc
+{
+public:
+	ATan (void) : ArcTrigFunc("atan", deAtanOver, 4096.0,
+							  Interval::unbounded(),
+							  Interval(-DE_PI_DOUBLE * 0.5, DE_PI_DOUBLE * 0.5)) {}
+};
+
+class ATan2 : public CFloatFunc2
+{
+public:
+				ATan2			(void) : CFloatFunc2 ("atan", deAtan2) {}
+
+protected:
+	Interval	innerExtrema	(const EvalContext&		ctx,
+								 const Interval&		yi,
+								 const Interval&		xi) const
+	{
+		Interval ret;
+
+		if (yi.contains(0.0))
+		{
+			if (xi.contains(0.0))
+				ret |= TCU_NAN;
+			if (xi.intersects(Interval(-TCU_INFINITY, 0.0)))
+				ret |= Interval(-DE_PI_DOUBLE, DE_PI_DOUBLE);
+		}
+
+		if (ctx.format.hasInf() != YES && (!yi.isFinite() || !xi.isFinite()))
+		{
+			// Infinities may not be supported, allow anything, including NaN
+			ret |= TCU_NAN;
+		}
+
+		return ret;
+	}
+
+	double		precision		(const EvalContext& ctx, double ret, double, double) const
+	{
+		if (ctx.floatPrecision == glu::PRECISION_HIGHP)
+			return ctx.format.ulp(ret, 4096.0);
+		else
+			return ctx.format.ulp(ret, 2.0);
+	}
+
+	// Codomain could be [-pi, pi], but that would probably be too strict.
+};
+
+DEFINE_DERIVED_FLOAT1(Sinh, sinh, x, (exp(x) - exp(-x)) / constant(2.0f));
+DEFINE_DERIVED_FLOAT1(Cosh, cosh, x, (exp(x) + exp(-x)) / constant(2.0f));
+DEFINE_DERIVED_FLOAT1(Tanh, tanh, x, sinh(x) / cosh(x));
+
+// These are not defined as derived forms in the GLSL ES spec, but
+// that gives us a reasonable precision.
+DEFINE_DERIVED_FLOAT1(ASinh, asinh, x, log(x + sqrt(x * x + constant(1.0f))));
+DEFINE_DERIVED_FLOAT1(ACosh, acosh, x, log(x + sqrt(alternatives((x + constant(1.0f)) * (x - constant(1.0f)),
+																 (x*x - constant(1.0f))))));
+DEFINE_DERIVED_FLOAT1(ATanh, atanh, x, constant(0.5f) * log((constant(1.0f) + x) /
+															(constant(1.0f) - x)));
+
+template <typename T>
+class GetComponent : public PrimitiveFunc<Signature<typename T::Element, T, int> >
+{
+public:
+	typedef		typename GetComponent::IRet	IRet;
+
+	string		getName		(void) const { return "_getComponent"; }
+
+	void		print		(ostream&				os,
+							 const BaseArgExprs&	args) const
+	{
+		os << *args[0] << "[" << *args[1] << "]";
+	}
+
+protected:
+	IRet		doApply		(const EvalContext&,
+							 const typename GetComponent::IArgs& iargs) const
+	{
+		IRet ret;
+
+		for (int compNdx = 0; compNdx < T::SIZE; ++compNdx)
+		{
+			if (iargs.b.contains(compNdx))
+				ret = unionIVal<typename T::Element>(ret, iargs.a[compNdx]);
+		}
+
+		return ret;
+	}
+
+};
+
+template <typename T>
+ExprP<typename T::Element> getComponent (const ExprP<T>& container, int ndx)
+{
+	DE_ASSERT(0 <= ndx && ndx < T::SIZE);
+	return app<GetComponent<T> >(container, constant(ndx));
+}
+
+template <typename T>	string	vecNamePrefix			(void);
+template <>				string	vecNamePrefix<float>	(void) { return ""; }
+template <>				string	vecNamePrefix<int>		(void) { return "i"; }
+template <>				string	vecNamePrefix<bool>		(void) { return "b"; }
+
+template <typename T, int Size>
+string vecName (void) { return vecNamePrefix<T>() + "vec" + de::toString(Size); }
+
+template <typename T, int Size> class GenVec;
+
+template <typename T>
+class GenVec<T, 1> : public DerivedFunc<Signature<T, T> >
+{
+public:
+	typedef typename GenVec<T, 1>::ArgExprs ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "_" + vecName<T, 1>();
+	}
+
+protected:
+
+	ExprP<T>	doExpand	(ExpandContext&, const ArgExprs& args) const { return args.a; }
+};
+
+template <typename T>
+class GenVec<T, 2> : public PrimitiveFunc<Signature<Vector<T, 2>, T, T> >
+{
+public:
+	typedef typename GenVec::IRet	IRet;
+	typedef typename GenVec::IArgs	IArgs;
+
+	string		getName		(void) const
+	{
+		return vecName<T, 2>();
+	}
+
+protected:
+	IRet		doApply		(const EvalContext&, const IArgs& iargs) const
+	{
+		return IRet(iargs.a, iargs.b);
+	}
+};
+
+template <typename T>
+class GenVec<T, 3> : public PrimitiveFunc<Signature<Vector<T, 3>, T, T, T> >
+{
+public:
+	typedef typename GenVec::IRet	IRet;
+	typedef typename GenVec::IArgs	IArgs;
+
+	string	getName		(void) const
+	{
+		return vecName<T, 3>();
+	}
+
+protected:
+	IRet	doApply		(const EvalContext&, const IArgs& iargs) const
+	{
+		return IRet(iargs.a, iargs.b, iargs.c);
+	}
+};
+
+template <typename T>
+class GenVec<T, 4> : public PrimitiveFunc<Signature<Vector<T, 4>, T, T, T, T> >
+{
+public:
+	typedef typename GenVec::IRet	IRet;
+	typedef typename GenVec::IArgs	IArgs;
+
+	string		getName		(void) const { return vecName<T, 4>(); }
+
+protected:
+	IRet		doApply		(const EvalContext&, const IArgs& iargs) const
+	{
+		return IRet(iargs.a, iargs.b, iargs.c, iargs.d);
+	}
+};
+
+
+
+template <typename T, int Rows, int Columns>
+class GenMat;
+
+template <typename T, int Rows>
+class GenMat<T, Rows, 2> : public PrimitiveFunc<
+	Signature<Matrix<T, Rows, 2>, Vector<T, Rows>, Vector<T, Rows> > >
+{
+public:
+	typedef typename GenMat::Ret	Ret;
+	typedef typename GenMat::IRet	IRet;
+	typedef typename GenMat::IArgs	IArgs;
+
+	string		getName		(void) const
+	{
+		return dataTypeNameOf<Ret>();
+	}
+
+protected:
+
+	IRet		doApply		(const EvalContext&, const IArgs& iargs) const
+	{
+		IRet	ret;
+		ret[0] = iargs.a;
+		ret[1] = iargs.b;
+		return ret;
+	}
+};
+
+template <typename T, int Rows>
+class GenMat<T, Rows, 3> : public PrimitiveFunc<
+	Signature<Matrix<T, Rows, 3>, Vector<T, Rows>, Vector<T, Rows>, Vector<T, Rows> > >
+{
+public:
+	typedef typename GenMat::Ret	Ret;
+	typedef typename GenMat::IRet	IRet;
+	typedef typename GenMat::IArgs	IArgs;
+
+	string	getName	(void) const
+	{
+		return dataTypeNameOf<Ret>();
+	}
+
+protected:
+
+	IRet	doApply	(const EvalContext&, const IArgs& iargs) const
+	{
+		IRet	ret;
+		ret[0] = iargs.a;
+		ret[1] = iargs.b;
+		ret[2] = iargs.c;
+		return ret;
+	}
+};
+
+template <typename T, int Rows>
+class GenMat<T, Rows, 4> : public PrimitiveFunc<
+	Signature<Matrix<T, Rows, 4>,
+			  Vector<T, Rows>, Vector<T, Rows>, Vector<T, Rows>, Vector<T, Rows> > >
+{
+public:
+	typedef typename GenMat::Ret	Ret;
+	typedef typename GenMat::IRet	IRet;
+	typedef typename GenMat::IArgs	IArgs;
+
+	string	getName	(void) const
+	{
+		return dataTypeNameOf<Ret>();
+	}
+
+protected:
+	IRet	doApply	(const EvalContext&, const IArgs& iargs) const
+	{
+		IRet	ret;
+		ret[0] = iargs.a;
+		ret[1] = iargs.b;
+		ret[2] = iargs.c;
+		ret[3] = iargs.d;
+		return ret;
+	}
+};
+
+template <typename T, int Rows>
+ExprP<Matrix<T, Rows, 2> > mat2 (const ExprP<Vector<T, Rows> >& arg0,
+								 const ExprP<Vector<T, Rows> >& arg1)
+{
+	return app<GenMat<T, Rows, 2> >(arg0, arg1);
+}
+
+template <typename T, int Rows>
+ExprP<Matrix<T, Rows, 3> > mat3 (const ExprP<Vector<T, Rows> >& arg0,
+								 const ExprP<Vector<T, Rows> >& arg1,
+								 const ExprP<Vector<T, Rows> >& arg2)
+{
+	return app<GenMat<T, Rows, 3> >(arg0, arg1, arg2);
+}
+
+template <typename T, int Rows>
+ExprP<Matrix<T, Rows, 4> > mat4 (const ExprP<Vector<T, Rows> >& arg0,
+								 const ExprP<Vector<T, Rows> >& arg1,
+								 const ExprP<Vector<T, Rows> >& arg2,
+								 const ExprP<Vector<T, Rows> >& arg3)
+{
+	return app<GenMat<T, Rows, 4> >(arg0, arg1, arg2, arg3);
+}
+
+
+template <int Rows, int Cols>
+class MatNeg : public PrimitiveFunc<Signature<Matrix<float, Rows, Cols>,
+											  Matrix<float, Rows, Cols> > >
+{
+public:
+	typedef typename MatNeg::IRet		IRet;
+	typedef typename MatNeg::IArgs		IArgs;
+
+	string	getName	(void) const
+	{
+		return "_matNeg";
+	}
+
+protected:
+	void	doPrint	(ostream& os, const BaseArgExprs& args) const
+	{
+		os << "-(" << *args[0] << ")";
+	}
+
+	IRet	doApply	(const EvalContext&, const IArgs& iargs)			const
+	{
+		IRet	ret;
+
+		for (int col = 0; col < Cols; ++col)
+		{
+			for (int row = 0; row < Rows; ++row)
+				ret[col][row] = -iargs.a[col][row];
+		}
+
+		return ret;
+	}
+};
+
+template <typename T, typename Sig>
+class CompWiseFunc : public PrimitiveFunc<Sig>
+{
+public:
+	typedef Func<Signature<T, T, T> >	ScalarFunc;
+
+	string				getName			(void)									const
+	{
+		return doGetScalarFunc().getName();
+	}
+protected:
+	void				doPrint			(ostream&				os,
+										 const BaseArgExprs&	args)			const
+	{
+		doGetScalarFunc().print(os, args);
+	}
+
+	virtual
+	const ScalarFunc&	doGetScalarFunc	(void)									const = 0;
+};
+
+template <int Rows, int Cols>
+class CompMatFuncBase : public CompWiseFunc<float, Signature<Matrix<float, Rows, Cols>,
+															 Matrix<float, Rows, Cols>,
+															 Matrix<float, Rows, Cols> > >
+{
+public:
+	typedef typename CompMatFuncBase::IRet		IRet;
+	typedef typename CompMatFuncBase::IArgs		IArgs;
+
+protected:
+
+	IRet	doApply	(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		IRet			ret;
+
+		for (int col = 0; col < Cols; ++col)
+		{
+			for (int row = 0; row < Rows; ++row)
+				ret[col][row] = this->doGetScalarFunc().apply(ctx,
+															  iargs.a[col][row],
+															  iargs.b[col][row]);
+		}
+
+		return ret;
+	}
+};
+
+template <typename F, int Rows, int Cols>
+class CompMatFunc : public CompMatFuncBase<Rows, Cols>
+{
+protected:
+	const typename CompMatFunc::ScalarFunc&	doGetScalarFunc	(void) const
+	{
+		return instance<F>();
+	}
+};
+
+class ScalarMatrixCompMult : public Mul
+{
+public:
+	string	getName	(void) const
+	{
+		return "matrixCompMult";
+	}
+
+	void	doPrint	(ostream& os, const BaseArgExprs& args) const
+	{
+		Func<Sig>::doPrint(os, args);
+	}
+};
+
+template <int Rows, int Cols>
+class MatrixCompMult : public CompMatFunc<ScalarMatrixCompMult, Rows, Cols>
+{
+};
+
+template <int Rows, int Cols>
+class ScalarMatFuncBase : public CompWiseFunc<float, Signature<Matrix<float, Rows, Cols>,
+															   Matrix<float, Rows, Cols>,
+															   float> >
+{
+public:
+	typedef typename ScalarMatFuncBase::IRet	IRet;
+	typedef typename ScalarMatFuncBase::IArgs	IArgs;
+
+protected:
+
+	IRet	doApply	(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		IRet	ret;
+
+		for (int col = 0; col < Cols; ++col)
+		{
+			for (int row = 0; row < Rows; ++row)
+				ret[col][row] = this->doGetScalarFunc().apply(ctx, iargs.a[col][row], iargs.b);
+		}
+
+		return ret;
+	}
+};
+
+template <typename F, int Rows, int Cols>
+class ScalarMatFunc : public ScalarMatFuncBase<Rows, Cols>
+{
+protected:
+	const typename ScalarMatFunc::ScalarFunc&	doGetScalarFunc	(void)	const
+	{
+		return instance<F>();
+	}
+};
+
+template<typename T, int Size> struct GenXType;
+
+template<typename T>
+struct GenXType<T, 1>
+{
+	static ExprP<T>	genXType	(const ExprP<T>& x) { return x; }
+};
+
+template<typename T>
+struct GenXType<T, 2>
+{
+	static ExprP<Vector<T, 2> >	genXType	(const ExprP<T>& x)
+	{
+		return app<GenVec<T, 2> >(x, x);
+	}
+};
+
+template<typename T>
+struct GenXType<T, 3>
+{
+	static ExprP<Vector<T, 3> >	genXType	(const ExprP<T>& x)
+	{
+		return app<GenVec<T, 3> >(x, x, x);
+	}
+};
+
+template<typename T>
+struct GenXType<T, 4>
+{
+	static ExprP<Vector<T, 4> >	genXType	(const ExprP<T>& x)
+	{
+		return app<GenVec<T, 4> >(x, x, x, x);
+	}
+};
+
+//! Returns an expression of vector of size `Size` (or scalar if Size == 1),
+//! with each element initialized with the expression `x`.
+template<typename T, int Size>
+ExprP<typename ContainerOf<T, Size>::Container> genXType (const ExprP<T>& x)
+{
+	return GenXType<T, Size>::genXType(x);
+}
+
+typedef GenVec<float, 2> FloatVec2;
+DEFINE_CONSTRUCTOR2(FloatVec2, Vec2, vec2, float, float)
+
+typedef GenVec<float, 3> FloatVec3;
+DEFINE_CONSTRUCTOR3(FloatVec3, Vec3, vec3, float, float, float)
+
+typedef GenVec<float, 4> FloatVec4;
+DEFINE_CONSTRUCTOR4(FloatVec4, Vec4, vec4, float, float, float, float)
+
+template <int Size>
+class Dot : public DerivedFunc<Signature<float, Vector<float, Size>, Vector<float, Size> > >
+{
+public:
+	typedef typename Dot::ArgExprs ArgExprs;
+
+	string			getName		(void) const
+	{
+		return "dot";
+	}
+
+protected:
+	ExprP<float>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		ExprP<float> val = args.a[0] * args.b[0];
+
+		for (int ndx = 1; ndx < Size; ++ndx)
+			val = val + args.a[ndx] * args.b[ndx];
+
+		return val;
+	}
+};
+
+template <>
+class Dot<1> : public DerivedFunc<Signature<float, float, float> >
+{
+public:
+	string			getName		(void) const
+	{
+		return "dot";
+	}
+
+	ExprP<float>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		return args.a * args.b;
+	}
+};
+
+template <int Size>
+ExprP<float> dot (const ExprP<Vector<float, Size> >& x, const ExprP<Vector<float, Size> >& y)
+{
+	return app<Dot<Size> >(x, y);
+}
+
+ExprP<float> dot (const ExprP<float>& x, const ExprP<float>& y)
+{
+	return app<Dot<1> >(x, y);
+}
+
+template <int Size>
+class Length : public DerivedFunc<
+	Signature<float, typename ContainerOf<float, Size>::Container> >
+{
+public:
+	typedef typename Length::ArgExprs ArgExprs;
+
+	string			getName		(void) const
+	{
+		return "length";
+	}
+
+protected:
+	ExprP<float>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		return sqrt(dot(args.a, args.a));
+	}
+};
+
+template <int Size>
+ExprP<float> length (const ExprP<typename ContainerOf<float, Size>::Container>& x)
+{
+	return app<Length<Size> >(x);
+}
+
+template <int Size>
+class Distance : public DerivedFunc<
+	Signature<float,
+			  typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container> >
+{
+public:
+	typedef typename	Distance::Ret		Ret;
+	typedef typename	Distance::ArgExprs	ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "distance";
+	}
+
+protected:
+	ExprP<Ret>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		return length<Size>(args.a - args.b);
+	}
+};
+
+// cross
+
+class Cross : public DerivedFunc<Signature<Vec3, Vec3, Vec3> >
+{
+public:
+	string			getName		(void) const
+	{
+		return "cross";
+	}
+
+protected:
+	ExprP<Vec3>		doExpand	(ExpandContext&, const ArgExprs& x) const
+	{
+		return vec3(x.a[1] * x.b[2] - x.b[1] * x.a[2],
+					x.a[2] * x.b[0] - x.b[2] * x.a[0],
+					x.a[0] * x.b[1] - x.b[0] * x.a[1]);
+	}
+};
+
+DEFINE_CONSTRUCTOR2(Cross, Vec3, cross, Vec3, Vec3)
+
+template<int Size>
+class Normalize : public DerivedFunc<
+	Signature<typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container> >
+{
+public:
+	typedef typename	Normalize::Ret		Ret;
+	typedef typename	Normalize::ArgExprs	ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "normalize";
+	}
+
+protected:
+	ExprP<Ret>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		return args.a / length<Size>(args.a);
+	}
+};
+
+template <int Size>
+class FaceForward : public DerivedFunc<
+	Signature<typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container> >
+{
+public:
+	typedef typename	FaceForward::Ret		Ret;
+	typedef typename	FaceForward::ArgExprs	ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "faceforward";
+	}
+
+protected:
+
+
+	ExprP<Ret>	doExpand	(ExpandContext&, const ArgExprs& args) const
+	{
+		return cond(dot(args.c, args.b) < constant(0.0f), args.a, -args.a);
+	}
+};
+
+template <int Size>
+class Reflect : public DerivedFunc<
+	Signature<typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container> >
+{
+public:
+	typedef typename	Reflect::Ret		Ret;
+	typedef typename	Reflect::Arg0		Arg0;
+	typedef typename	Reflect::Arg1		Arg1;
+	typedef typename	Reflect::ArgExprs	ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "reflect";
+	}
+
+protected:
+	ExprP<Ret>	doExpand	(ExpandContext& ctx, const ArgExprs& args) const
+	{
+		const ExprP<Arg0>&	i		= args.a;
+		const ExprP<Arg1>&	n		= args.b;
+		const ExprP<float>	dotNI	= bindExpression("dotNI", ctx, dot(n, i));
+
+		return i - alternatives((n * dotNI) * constant(2.0f),
+								n * (dotNI * constant(2.0f)));
+	}
+};
+
+template <int Size>
+class Refract : public DerivedFunc<
+	Signature<typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container,
+			  typename ContainerOf<float, Size>::Container,
+			  float> >
+{
+public:
+	typedef typename	Refract::Ret		Ret;
+	typedef typename	Refract::Arg0		Arg0;
+	typedef typename	Refract::Arg1		Arg1;
+	typedef typename	Refract::ArgExprs	ArgExprs;
+
+	string		getName		(void) const
+	{
+		return "refract";
+	}
+
+protected:
+	ExprP<Ret>	doExpand	(ExpandContext&	ctx, const ArgExprs& args) const
+	{
+		const ExprP<Arg0>&	i		= args.a;
+		const ExprP<Arg1>&	n		= args.b;
+		const ExprP<float>&	eta		= args.c;
+		const ExprP<float>	dotNI	= bindExpression("dotNI", ctx, dot(n, i));
+		const ExprP<float>	k		= bindExpression("k", ctx, constant(1.0f) - eta * eta *
+												 (constant(1.0f) - dotNI * dotNI));
+
+		return cond(k < constant(0.0f),
+					genXType<float, Size>(constant(0.0f)),
+					i * eta - n * (eta * dotNI + sqrt(k)));
+	}
+};
+
+class PreciseFunc1 : public CFloatFunc1
+{
+public:
+			PreciseFunc1	(const string& name, DoubleFunc1& func) : CFloatFunc1(name, func) {}
+protected:
+	double	precision		(const EvalContext&, double, double) const	{ return 0.0; }
+};
+
+class Abs : public PreciseFunc1
+{
+public:
+	Abs (void) : PreciseFunc1("abs", deAbs) {}
+};
+
+class Sign : public PreciseFunc1
+{
+public:
+	Sign (void) : PreciseFunc1("sign", deSign) {}
+};
+
+class Floor : public PreciseFunc1
+{
+public:
+	Floor (void) : PreciseFunc1("floor", deFloor) {}
+};
+
+class Trunc : public PreciseFunc1
+{
+public:
+	Trunc (void) : PreciseFunc1("trunc", deTrunc) {}
+};
+
+class Round : public FloatFunc1
+{
+public:
+	string		getName		(void) const								{ return "round"; }
+
+protected:
+	Interval	applyPoint	(const EvalContext&, double x) const
+	{
+		double			truncated	= 0.0;
+		const double	fract		= deModf(x, &truncated);
+		Interval		ret;
+
+		if (fabs(fract) <= 0.5)
+			ret |= truncated;
+		if (fabs(fract) >= 0.5)
+			ret |= truncated + deSign(fract);
+
+		return ret;
+	}
+
+	double		precision	(const EvalContext&, double, double) const	{ return 0.0; }
+};
+
+class RoundEven : public PreciseFunc1
+{
+public:
+	RoundEven (void) : PreciseFunc1("roundEven", deRoundEven) {}
+};
+
+class Ceil : public PreciseFunc1
+{
+public:
+	Ceil (void) : PreciseFunc1("ceil", deCeil) {}
+};
+
+DEFINE_DERIVED_FLOAT1(Fract, fract, x, x - app<Floor>(x));
+
+class PreciseFunc2 : public CFloatFunc2
+{
+public:
+			PreciseFunc2	(const string& name, DoubleFunc2& func) : CFloatFunc2(name, func) {}
+protected:
+	double	precision		(const EvalContext&, double, double, double) const { return 0.0; }
+};
+
+DEFINE_DERIVED_FLOAT2(Mod, mod, x, y, x - y * app<Floor>(x / y));
+
+class Modf : public PrimitiveFunc<Signature<float, float, float> >
+{
+public:
+	string	getName				(void) const
+	{
+		return "modf";
+	}
+
+protected:
+	IRet	doApply				(const EvalContext&, const IArgs& iargs) const
+	{
+		Interval	fracIV;
+		Interval&	wholeIV		= const_cast<Interval&>(iargs.b);
+		double		intPart		= 0;
+
+		TCU_INTERVAL_APPLY_MONOTONE1(fracIV, x, iargs.a, frac, frac = deModf(x, &intPart));
+		TCU_INTERVAL_APPLY_MONOTONE1(wholeIV, x, iargs.a, whole,
+									 deModf(x, &intPart); whole = intPart);
+
+		if (!iargs.a.isFinite())
+		{
+			// Behavior on modf(Inf) not well-defined, allow anything as a fractional part
+			// See Khronos bug 13907
+			fracIV |= TCU_NAN;
+		}
+
+		return fracIV;
+	}
+
+	int		getOutParamIndex	(void) const
+	{
+		return 1;
+	}
+};
+
+class Min : public PreciseFunc2 { public: Min (void) : PreciseFunc2("min", deMin) {} };
+class Max : public PreciseFunc2 { public: Max (void) : PreciseFunc2("max", deMax) {} };
+
+class Clamp : public FloatFunc3
+{
+public:
+	string	getName		(void) const { return "clamp"; }
+
+	double	applyExact	(double x, double minVal, double maxVal) const
+	{
+		return de::min(de::max(x, minVal), maxVal);
+	}
+
+	double	precision	(const EvalContext&, double, double, double minVal, double maxVal) const
+	{
+		return minVal > maxVal ? TCU_NAN : 0.0;
+	}
+};
+
+ExprP<float> clamp(const ExprP<float>& x, const ExprP<float>& minVal, const ExprP<float>& maxVal)
+{
+	return app<Clamp>(x, minVal, maxVal);
+}
+
+DEFINE_DERIVED_FLOAT3(Mix, mix, x, y, a, alternatives((x * (constant(1.0f) - a)) + y * a,
+													  x + (y - x) * a));
+
+static double step (double edge, double x)
+{
+	return x < edge ? 0.0 : 1.0;
+}
+
+class Step : public PreciseFunc2 { public: Step (void) : PreciseFunc2("step", step) {} };
+
+class SmoothStep : public DerivedFunc<Signature<float, float, float, float> >
+{
+public:
+	string		getName		(void) const
+	{
+		return "smoothstep";
+	}
+
+protected:
+
+	ExprP<Ret>	doExpand	(ExpandContext& ctx, const ArgExprs& args) const
+	{
+		const ExprP<float>&		edge0	= args.a;
+		const ExprP<float>&		edge1	= args.b;
+		const ExprP<float>&		x		= args.c;
+		const ExprP<float>		tExpr	= clamp((x - edge0) / (edge1 - edge0),
+											constant(0.0f), constant(1.0f));
+		const ExprP<float>		t		= bindExpression("t", ctx, tExpr);
+
+		return (t * t * (constant(3.0f) - constant(2.0f) * t));
+	}
+};
+
+class FrExp : public PrimitiveFunc<Signature<float, float, int> >
+{
+public:
+	string	getName			(void) const
+	{
+		return "frexp";
+	}
+
+protected:
+	IRet	doApply			(const EvalContext&, const IArgs& iargs) const
+	{
+		IRet			ret;
+		const IArg0&	x			= iargs.a;
+		IArg1&			exponent	= const_cast<IArg1&>(iargs.b);
+
+		if (x.hasNaN() || x.contains(TCU_INFINITY) || x.contains(-TCU_INFINITY))
+		{
+			// GLSL (in contrast to IEEE) says that result of applying frexp
+			// to infinity is undefined
+			ret = Interval::unbounded() | TCU_NAN;
+			exponent = Interval(-deLdExp(1.0, 31), deLdExp(1.0, 31)-1);
+		}
+		else if (!x.empty())
+		{
+			int				loExp	= 0;
+			const double	loFrac	= deFrExp(x.lo(), &loExp);
+			int				hiExp	= 0;
+			const double	hiFrac	= deFrExp(x.hi(), &hiExp);
+
+			if (deSign(loFrac) != deSign(hiFrac))
+			{
+				exponent = Interval(-TCU_INFINITY, de::max(loExp, hiExp));
+				ret = Interval();
+				if (deSign(loFrac) < 0)
+					ret |= Interval(-1.0 + DBL_EPSILON*0.5, 0.0);
+				if (deSign(hiFrac) > 0)
+					ret |= Interval(0.0, 1.0 - DBL_EPSILON*0.5);
+			}
+			else
+			{
+				exponent = Interval(loExp, hiExp);
+				if (loExp == hiExp)
+					ret = Interval(loFrac, hiFrac);
+				else
+					ret = deSign(loFrac) * Interval(0.5, 1.0 - DBL_EPSILON*0.5);
+			}
+		}
+
+		return ret;
+	}
+
+	int	getOutParamIndex	(void) const
+	{
+		return 1;
+	}
+};
+
+class LdExp : public PrimitiveFunc<Signature<float, float, int> >
+{
+public:
+	string		getName			(void) const
+	{
+		return "ldexp";
+	}
+
+protected:
+	Interval	doApply			(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		Interval	ret = call<Exp2>(ctx, iargs.b);
+		// Khronos bug 11180 consensus: if exp2(exponent) cannot be represented,
+		// the result is undefined.
+
+		if (ret.contains(TCU_INFINITY) | ret.contains(-TCU_INFINITY))
+			ret |= TCU_NAN;
+
+		return call<Mul>(ctx, iargs.a, ret);
+	}
+};
+
+template<int Rows, int Columns>
+class Transpose : public PrimitiveFunc<Signature<Matrix<float, Rows, Columns>,
+												 Matrix<float, Columns, Rows> > >
+{
+public:
+	typedef typename Transpose::IRet	IRet;
+	typedef typename Transpose::IArgs	IArgs;
+
+	string		getName		(void) const
+	{
+		return "transpose";
+	}
+
+protected:
+	IRet		doApply		(const EvalContext&, const IArgs& iargs) const
+	{
+		IRet ret;
+
+		for (int rowNdx = 0; rowNdx < Rows; ++rowNdx)
+		{
+			for (int colNdx = 0; colNdx < Columns; ++colNdx)
+				ret(rowNdx, colNdx) = iargs.a(colNdx, rowNdx);
+		}
+
+		return ret;
+	}
+};
+
+template<typename Ret, typename Arg0, typename Arg1>
+class MulFunc : public PrimitiveFunc<Signature<Ret, Arg0, Arg1> >
+{
+public:
+	string	getName	(void) const									{ return "mul"; }
+
+protected:
+	void	doPrint	(ostream& os, const BaseArgExprs& args) const
+	{
+		os << "(" << *args[0] << " * " << *args[1] << ")";
+	}
+};
+
+template<int LeftRows, int Middle, int RightCols>
+class MatMul : public MulFunc<Matrix<float, LeftRows, RightCols>,
+							  Matrix<float, LeftRows, Middle>,
+							  Matrix<float, Middle, RightCols> >
+{
+protected:
+	typedef typename MatMul::IRet	IRet;
+	typedef typename MatMul::IArgs	IArgs;
+	typedef typename MatMul::IArg0	IArg0;
+	typedef typename MatMul::IArg1	IArg1;
+
+	IRet	doApply	(const EvalContext&	ctx, const IArgs& iargs) const
+	{
+		const IArg0&	left	= iargs.a;
+		const IArg1&	right	= iargs.b;
+		IRet			ret;
+
+		for (int row = 0; row < LeftRows; ++row)
+		{
+			for (int col = 0; col < RightCols; ++col)
+			{
+				Interval	element	(0.0);
+
+				for (int ndx = 0; ndx < Middle; ++ndx)
+					element = call<Add>(ctx, element,
+										call<Mul>(ctx, left[ndx][row], right[col][ndx]));
+
+				ret[col][row] = element;
+			}
+		}
+
+		return ret;
+	}
+};
+
+template<int Rows, int Cols>
+class VecMatMul : public MulFunc<Vector<float, Cols>,
+								 Vector<float, Rows>,
+								 Matrix<float, Rows, Cols> >
+{
+public:
+	typedef typename VecMatMul::IRet	IRet;
+	typedef typename VecMatMul::IArgs	IArgs;
+	typedef typename VecMatMul::IArg0	IArg0;
+	typedef typename VecMatMul::IArg1	IArg1;
+
+protected:
+	IRet	doApply	(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		const IArg0&	left	= iargs.a;
+		const IArg1&	right	= iargs.b;
+		IRet			ret;
+
+		for (int col = 0; col < Cols; ++col)
+		{
+			Interval	element	(0.0);
+
+			for (int row = 0; row < Rows; ++row)
+				element = call<Add>(ctx, element, call<Mul>(ctx, left[row], right[col][row]));
+
+			ret[col] = element;
+		}
+
+		return ret;
+	}
+};
+
+template<int Rows, int Cols>
+class MatVecMul : public MulFunc<Vector<float, Rows>,
+								 Matrix<float, Rows, Cols>,
+								 Vector<float, Cols> >
+{
+public:
+	typedef typename MatVecMul::IRet	IRet;
+	typedef typename MatVecMul::IArgs	IArgs;
+	typedef typename MatVecMul::IArg0	IArg0;
+	typedef typename MatVecMul::IArg1	IArg1;
+
+protected:
+	IRet	doApply	(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		const IArg0&	left	= iargs.a;
+		const IArg1&	right	= iargs.b;
+
+		return call<VecMatMul<Cols, Rows> >(ctx, right,
+											call<Transpose<Rows, Cols> >(ctx, left));
+	}
+};
+
+template<int Rows, int Cols>
+class OuterProduct : public PrimitiveFunc<Signature<Matrix<float, Rows, Cols>,
+													Vector<float, Rows>,
+													Vector<float, Cols> > >
+{
+public:
+	typedef typename OuterProduct::IRet		IRet;
+	typedef typename OuterProduct::IArgs	IArgs;
+
+	string	getName	(void) const
+	{
+		return "outerProduct";
+	}
+
+protected:
+	IRet	doApply	(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		IRet	ret;
+
+		for (int row = 0; row < Rows; ++row)
+		{
+			for (int col = 0; col < Cols; ++col)
+				ret[col][row] = call<Mul>(ctx, iargs.a[row], iargs.b[col]);
+		}
+
+		return ret;
+	}
+};
+
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> > outerProduct (const ExprP<Vector<float, Rows> >& left,
+												const ExprP<Vector<float, Cols> >& right)
+{
+	return app<OuterProduct<Rows, Cols> >(left, right);
+}
+
+template<int Size>
+class DeterminantBase : public DerivedFunc<Signature<float, Matrix<float, Size, Size> > >
+{
+public:
+	string	getName	(void) const { return "determinant"; }
+};
+
+template<int Size>
+class Determinant;
+
+template<int Size>
+ExprP<float> determinant (ExprP<Matrix<float, Size, Size> > mat)
+{
+	return app<Determinant<Size> >(mat);
+}
+
+template<>
+class Determinant<2> : public DeterminantBase<2>
+{
+protected:
+	ExprP<Ret>	doExpand (ExpandContext&, const ArgExprs& args)	const
+	{
+		ExprP<Mat2>	mat	= args.a;
+
+		return mat[0][0] * mat[1][1] - mat[1][0] * mat[0][1];
+	}
+};
+
+template<>
+class Determinant<3> : public DeterminantBase<3>
+{
+protected:
+	ExprP<Ret> doExpand (ExpandContext&, const ArgExprs& args) const
+	{
+		ExprP<Mat3>	mat	= args.a;
+
+		return (mat[0][0] * (mat[1][1] * mat[2][2] - mat[1][2] * mat[2][1]) +
+				mat[0][1] * (mat[1][2] * mat[2][0] - mat[1][0] * mat[2][2]) +
+				mat[0][2] * (mat[1][0] * mat[2][1] - mat[1][1] * mat[2][0]));
+	}
+};
+
+template<>
+class Determinant<4> : public DeterminantBase<4>
+{
+protected:
+	 ExprP<Ret>	doExpand	(ExpandContext& ctx, const ArgExprs& args) const
+	{
+		ExprP<Mat4>	mat	= args.a;
+		ExprP<Mat3>	minors[4];
+
+		for (int ndx = 0; ndx < 4; ++ndx)
+		{
+			ExprP<Vec4>		minorColumns[3];
+			ExprP<Vec3>		columns[3];
+
+			for (int col = 0; col < 3; ++col)
+				minorColumns[col] = mat[col < ndx ? col : col + 1];
+
+			for (int col = 0; col < 3; ++col)
+				columns[col] = vec3(minorColumns[0][col+1],
+									minorColumns[1][col+1],
+									minorColumns[2][col+1]);
+
+			minors[ndx] = bindExpression("minor", ctx,
+										 mat3(columns[0], columns[1], columns[2]));
+		}
+
+		return (mat[0][0] * determinant(minors[0]) -
+				mat[1][0] * determinant(minors[1]) +
+				mat[2][0] * determinant(minors[2]) -
+				mat[3][0] * determinant(minors[3]));
+	}
+};
+
+template<int Size> class Inverse;
+
+template <int Size>
+ExprP<Matrix<float, Size, Size> > inverse (ExprP<Matrix<float, Size, Size> > mat)
+{
+	return app<Inverse<Size> >(mat);
+}
+
+template<>
+class Inverse<2> : public DerivedFunc<Signature<Mat2, Mat2> >
+{
+public:
+	string		getName	(void) const
+	{
+		return "inverse";
+	}
+
+protected:
+	ExprP<Ret>	doExpand (ExpandContext& ctx, const ArgExprs& args) const
+	{
+		ExprP<Mat2>		mat = args.a;
+		ExprP<float>	det	= bindExpression("det", ctx, determinant(mat));
+
+		return mat2(vec2(mat[1][1] / det, -mat[0][1] / det),
+					vec2(-mat[1][0] / det, mat[0][0] / det));
+	}
+};
+
+template<>
+class Inverse<3> : public DerivedFunc<Signature<Mat3, Mat3> >
+{
+public:
+	string		getName		(void) const
+	{
+		return "inverse";
+	}
+
+protected:
+	ExprP<Ret>	doExpand	(ExpandContext& ctx, const ArgExprs& args)			const
+	{
+		ExprP<Mat3>		mat		= args.a;
+		ExprP<Mat2>		invA	= bindExpression("invA", ctx,
+												 inverse(mat2(vec2(mat[0][0], mat[0][1]),
+															  vec2(mat[1][0], mat[1][1]))));
+
+		ExprP<Vec2>		matB	= bindExpression("matB", ctx, vec2(mat[2][0], mat[2][1]));
+		ExprP<Vec2>		matC	= bindExpression("matC", ctx, vec2(mat[0][2], mat[1][2]));
+		ExprP<float>	matD	= bindExpression("matD", ctx, mat[2][2]);
+
+		ExprP<float>	schur	= bindExpression("schur", ctx,
+												 constant(1.0f) /
+												 (matD - dot(matC * invA, matB)));
+
+		ExprP<Vec2>		t1		= invA * matB;
+		ExprP<Vec2>		t2		= t1 * schur;
+		ExprP<Mat2>		t3		= outerProduct(t2, matC);
+		ExprP<Mat2>		t4		= t3 * invA;
+		ExprP<Mat2>		t5		= invA + t4;
+		ExprP<Mat2>		blockA	= bindExpression("blockA", ctx, t5);
+		ExprP<Vec2>		blockB	= bindExpression("blockB", ctx,
+												 (invA * matB) * -schur);
+		ExprP<Vec2>		blockC	= bindExpression("blockC", ctx,
+												 (matC * invA) * -schur);
+
+		return mat3(vec3(blockA[0][0], blockA[0][1], blockC[0]),
+					vec3(blockA[1][0], blockA[1][1], blockC[1]),
+					vec3(blockB[0], blockB[1], schur));
+	}
+};
+
+template<>
+class Inverse<4> : public DerivedFunc<Signature<Mat4, Mat4> >
+{
+public:
+	string		getName		(void) const { return "inverse"; }
+
+protected:
+	ExprP<Ret>			doExpand			(ExpandContext&		ctx,
+											 const ArgExprs&	args)			const
+	{
+		ExprP<Mat4>	mat		= args.a;
+		ExprP<Mat2>	invA	= bindExpression("invA", ctx,
+											 inverse(mat2(vec2(mat[0][0], mat[0][1]),
+														  vec2(mat[1][0], mat[1][1]))));
+		ExprP<Mat2>	matB	= bindExpression("matB", ctx,
+											 mat2(vec2(mat[2][0], mat[2][1]),
+												  vec2(mat[3][0], mat[3][1])));
+		ExprP<Mat2>	matC	= bindExpression("matC", ctx,
+											 mat2(vec2(mat[0][2], mat[0][3]),
+												  vec2(mat[1][2], mat[1][3])));
+		ExprP<Mat2>	matD	= bindExpression("matD", ctx,
+											 mat2(vec2(mat[2][2], mat[2][3]),
+												  vec2(mat[3][2], mat[3][3])));
+		ExprP<Mat2>	schur	= bindExpression("schur", ctx,
+											 inverse(matD + -(matC * invA * matB)));
+		ExprP<Mat2>	blockA	= bindExpression("blockA", ctx,
+											 invA + (invA * matB * schur * matC * invA));
+		ExprP<Mat2>	blockB	= bindExpression("blockB", ctx,
+											 (-invA) * matB * schur);
+		ExprP<Mat2>	blockC	= bindExpression("blockC", ctx,
+											 (-schur) * matC * invA);
+
+		return mat4(vec4(blockA[0][0], blockA[0][1], blockC[0][0], blockC[0][1]),
+					vec4(blockA[1][0], blockA[1][1], blockC[1][0], blockC[1][1]),
+					vec4(blockB[0][0], blockB[0][1], schur[0][0], schur[0][1]),
+					vec4(blockB[1][0], blockB[1][1], schur[1][0], schur[1][1]));
+	}
+};
+
+class Fma : public DerivedFunc<Signature<float, float, float, float> >
+{
+public:
+	string			getName					(void) const
+	{
+		return "fma";
+	}
+
+	string			getRequiredExtension	(void) const
+	{
+		return "GL_EXT_gpu_shader5";
+	}
+
+protected:
+	ExprP<float>	doExpand				(ExpandContext&, const ArgExprs& x) const
+	{
+		return x.a * x.b + x.c;
+	}
+};
+
+} // Functions
+
+using namespace Functions;
+
+template <typename T>
+ExprP<typename T::Element> ContainerExprPBase<T>::operator[] (int i) const
+{
+	return Functions::getComponent(exprP<T>(*this), i);
+}
+
+ExprP<float> operator+ (const ExprP<float>& arg0, const ExprP<float>& arg1)
+{
+	return app<Add>(arg0, arg1);
+}
+
+ExprP<float> operator- (const ExprP<float>& arg0, const ExprP<float>& arg1)
+{
+	return app<Sub>(arg0, arg1);
+}
+
+ExprP<float> operator- (const ExprP<float>& arg0)
+{
+	return app<Negate>(arg0);
+}
+
+ExprP<float> operator* (const ExprP<float>& arg0, const ExprP<float>& arg1)
+{
+	return app<Mul>(arg0, arg1);
+}
+
+ExprP<float> operator/ (const ExprP<float>& arg0, const ExprP<float>& arg1)
+{
+	return app<Div>(arg0, arg1);
+}
+
+template <typename Sig_, int Size>
+class GenFunc : public PrimitiveFunc<Signature<
+	typename ContainerOf<typename Sig_::Ret, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg0, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg1, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg2, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg3, Size>::Container> >
+{
+public:
+	typedef typename GenFunc::IArgs		IArgs;
+	typedef typename GenFunc::IRet		IRet;
+
+			GenFunc					(const Func<Sig_>&	scalarFunc) : m_func (scalarFunc) {}
+
+	string	getName					(void) const
+	{
+		return m_func.getName();
+	}
+
+	int		getOutParamIndex		(void) const
+	{
+		return m_func.getOutParamIndex();
+	}
+
+	string	getRequiredExtension	(void) const
+	{
+		return m_func.getRequiredExtension();
+	}
+
+protected:
+	void	doPrint					(ostream& os, const BaseArgExprs& args) const
+	{
+		m_func.print(os, args);
+	}
+
+	IRet	doApply					(const EvalContext& ctx, const IArgs& iargs) const
+	{
+		IRet ret;
+
+		for (int ndx = 0; ndx < Size; ++ndx)
+		{
+			ret[ndx] =
+				m_func.apply(ctx, iargs.a[ndx], iargs.b[ndx], iargs.c[ndx], iargs.d[ndx]);
+		}
+
+		return ret;
+	}
+
+	void	doGetUsedFuncs			(FuncSet& dst) const
+	{
+		m_func.getUsedFuncs(dst);
+	}
+
+	const Func<Sig_>&	m_func;
+};
+
+template <typename F, int Size>
+class VectorizedFunc : public GenFunc<typename F::Sig, Size>
+{
+public:
+	VectorizedFunc	(void) : GenFunc<typename F::Sig, Size>(instance<F>()) {}
+};
+
+
+
+template <typename Sig_, int Size>
+class FixedGenFunc : public PrimitiveFunc <Signature<
+	typename ContainerOf<typename Sig_::Ret, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg0, Size>::Container,
+	typename Sig_::Arg1,
+	typename ContainerOf<typename Sig_::Arg2, Size>::Container,
+	typename ContainerOf<typename Sig_::Arg3, Size>::Container> >
+{
+public:
+	typedef typename FixedGenFunc::IArgs		IArgs;
+	typedef typename FixedGenFunc::IRet			IRet;
+
+	string						getName			(void) const
+	{
+		return this->doGetScalarFunc().getName();
+	}
+
+protected:
+	void						doPrint			(ostream& os, const BaseArgExprs& args) const
+	{
+		this->doGetScalarFunc().print(os, args);
+	}
+
+	IRet						doApply			(const EvalContext& ctx,
+												 const IArgs&		iargs) const
+	{
+		IRet				ret;
+		const Func<Sig_>&	func	= this->doGetScalarFunc();
+
+		for (int ndx = 0; ndx < Size; ++ndx)
+			ret[ndx] = func.apply(ctx, iargs.a[ndx], iargs.b, iargs.c[ndx], iargs.d[ndx]);
+
+		return ret;
+	}
+
+	virtual const Func<Sig_>&	doGetScalarFunc	(void) const = 0;
+};
+
+template <typename F, int Size>
+class FixedVecFunc : public FixedGenFunc<typename F::Sig, Size>
+{
+protected:
+	const Func<typename F::Sig>& doGetScalarFunc	(void) const { return instance<F>(); }
+};
+
+template<typename Sig>
+struct GenFuncs
+{
+	GenFuncs (const Func<Sig>&			func_,
+			  const GenFunc<Sig, 2>&	func2_,
+			  const GenFunc<Sig, 3>&	func3_,
+			  const GenFunc<Sig, 4>&	func4_)
+		: func	(func_)
+		, func2	(func2_)
+		, func3	(func3_)
+		, func4	(func4_)
+	{}
+
+	const Func<Sig>&		func;
+	const GenFunc<Sig, 2>&	func2;
+	const GenFunc<Sig, 3>&	func3;
+	const GenFunc<Sig, 4>&	func4;
+};
+
+template<typename F>
+GenFuncs<typename F::Sig> makeVectorizedFuncs (void)
+{
+	return GenFuncs<typename F::Sig>(instance<F>(),
+									 instance<VectorizedFunc<F, 2> >(),
+									 instance<VectorizedFunc<F, 3> >(),
+									 instance<VectorizedFunc<F, 4> >());
+}
+
+template<int Size>
+ExprP<Vector<float, Size> > operator*(const ExprP<Vector<float, Size> >& arg0,
+									  const ExprP<Vector<float, Size> >& arg1)
+{
+	return app<VectorizedFunc<Mul, Size> >(arg0, arg1);
+}
+
+template<int Size>
+ExprP<Vector<float, Size> > operator*(const ExprP<Vector<float, Size> >&	arg0,
+									  const ExprP<float>&					arg1)
+{
+	return app<FixedVecFunc<Mul, Size> >(arg0, arg1);
+}
+
+template<int Size>
+ExprP<Vector<float, Size> > operator/(const ExprP<Vector<float, Size> >&	arg0,
+									  const ExprP<float>&					arg1)
+{
+	return app<FixedVecFunc<Div, Size> >(arg0, arg1);
+}
+
+template<int Size>
+ExprP<Vector<float, Size> > operator-(const ExprP<Vector<float, Size> >& arg0)
+{
+	return app<VectorizedFunc<Negate, Size> >(arg0);
+}
+
+template<int Size>
+ExprP<Vector<float, Size> > operator-(const ExprP<Vector<float, Size> >& arg0,
+									  const ExprP<Vector<float, Size> >& arg1)
+{
+	return app<VectorizedFunc<Sub, Size> >(arg0, arg1);
+}
+
+template<int LeftRows, int Middle, int RightCols>
+ExprP<Matrix<float, LeftRows, RightCols> >
+operator* (const ExprP<Matrix<float, LeftRows, Middle> >&	left,
+		   const ExprP<Matrix<float, Middle, RightCols> >&	right)
+{
+	return app<MatMul<LeftRows, Middle, RightCols> >(left, right);
+}
+
+template<int Rows, int Cols>
+ExprP<Vector<float, Rows> > operator* (const ExprP<Vector<float, Cols> >&		left,
+									   const ExprP<Matrix<float, Rows, Cols> >&	right)
+{
+	return app<VecMatMul<Rows, Cols> >(left, right);
+}
+
+template<int Rows, int Cols>
+ExprP<Vector<float, Cols> > operator* (const ExprP<Matrix<float, Rows, Cols> >&	left,
+									   const ExprP<Vector<float, Rows> >&		right)
+{
+	return app<MatVecMul<Rows, Cols> >(left, right);
+}
+
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> > operator* (const ExprP<Matrix<float, Rows, Cols> >&	left,
+											 const ExprP<float>&						right)
+{
+	return app<ScalarMatFunc<Mul, Rows, Cols> >(left, right);
+}
+
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> > operator+ (const ExprP<Matrix<float, Rows, Cols> >&	left,
+											 const ExprP<Matrix<float, Rows, Cols> >&	right)
+{
+	return app<CompMatFunc<Add, Rows, Cols> >(left, right);
+}
+
+template<int Rows, int Cols>
+ExprP<Matrix<float, Rows, Cols> > operator- (const ExprP<Matrix<float, Rows, Cols> >&	mat)
+{
+	return app<MatNeg<Rows, Cols> >(mat);
+}
+
+template <typename T>
+class Sampling
+{
+public:
+	virtual void	genFixeds	(const FloatFormat&, vector<T>&)			const {}
+	virtual T		genRandom	(const FloatFormat&, Precision, Random&)	const { return T(); }
+	virtual double	getWeight	(void)										const { return 0.0; }
+};
+
+template <>
+class DefaultSampling<Void> : public Sampling<Void>
+{
+public:
+	void	genFixeds	(const FloatFormat&, vector<Void>& dst) const { dst.push_back(Void()); }
+};
+
+template <>
+class DefaultSampling<bool> : public Sampling<bool>
+{
+public:
+	void	genFixeds	(const FloatFormat&, vector<bool>& dst) const
+	{
+		dst.push_back(true);
+		dst.push_back(false);
+	}
+};
+
+template <>
+class DefaultSampling<int> : public Sampling<int>
+{
+public:
+	int		genRandom	(const FloatFormat&, Precision prec, Random& rnd) const
+	{
+		const int	exp		= rnd.getInt(0, getNumBits(prec)-2);
+		const int	sign	= rnd.getBool() ? -1 : 1;
+
+		return sign * rnd.getInt(0, (deInt32)1 << exp);
+	}
+
+	void	genFixeds	(const FloatFormat&, vector<int>& dst) const
+	{
+		dst.push_back(0);
+		dst.push_back(-1);
+		dst.push_back(1);
+	}
+	double	getWeight	(void) const { return 1.0; }
+
+private:
+	static inline int getNumBits (Precision prec)
+	{
+		switch (prec)
+		{
+			case glu::PRECISION_LOWP:		return 8;
+			case glu::PRECISION_MEDIUMP:	return 16;
+			case glu::PRECISION_HIGHP:		return 32;
+			default:
+				DE_ASSERT(false);
+				return 0;
+		}
+	}
+};
+
+template <>
+class DefaultSampling<float> : public Sampling<float>
+{
+public:
+	float	genRandom	(const FloatFormat& format, Precision prec, Random& rnd) const;
+	void	genFixeds	(const FloatFormat& format, vector<float>& dst) const;
+	double	getWeight	(void) const { return 1.0; }
+};
+
+//! Generate a random float from a reasonable general-purpose distribution.
+float DefaultSampling<float>::genRandom (const FloatFormat& format,
+										 Precision,
+										 Random&			rnd) const
+{
+	const int		minExp			= format.getMinExp();
+	const int		maxExp			= format.getMaxExp();
+	const bool		haveSubnormal	= format.hasSubnormal() != tcu::NO;
+
+	// Choose exponent so that the cumulative distribution is cubic.
+	// This makes the probability distribution quadratic, with the peak centered on zero.
+	const double	minRoot			= deCbrt(minExp - 0.5 - (haveSubnormal ? 1.0 : 0.0));
+	const double	maxRoot			= deCbrt(maxExp + 0.5);
+	const int		fractionBits	= format.getFractionBits();
+	const int		exp				= int(deRoundEven(dePow(rnd.getDouble(minRoot, maxRoot),
+															3.0)));
+	float			base			= 0.0f; // integral power of two
+	float			quantum			= 0.0f; // smallest representable difference in the binade
+	float			significand		= 0.0f; // Significand.
+
+	DE_ASSERT(fractionBits < std::numeric_limits<float>::digits);
+
+	// Generate some occasional special numbers
+	switch (rnd.getInt(0, 64))
+	{
+		case 0:		return 0;
+		case 1:		return TCU_INFINITY;
+		case 2:		return -TCU_INFINITY;
+		case 3:		return TCU_NAN;
+		default:	break;
+	}
+
+	if (exp >= minExp)
+	{
+		// Normal number
+		base = deFloatLdExp(1.0f, exp);
+		quantum = deFloatLdExp(1.0f, exp - fractionBits);
+	}
+	else
+	{
+		// Subnormal
+		base = 0.0f;
+		quantum = deFloatLdExp(1.0f, minExp - fractionBits);
+	}
+
+	switch (rnd.getInt(0, 16))
+	{
+		case 0: // The highest number in this binade, significand is all bits one.
+			significand = base - quantum;
+			break;
+		case 1: // Significand is one.
+			significand = quantum;
+			break;
+		case 2: // Significand is zero.
+			significand = 0.0;
+			break;
+		default: // Random (evenly distributed) significand.
+		{
+			deUint64 intFraction = rnd.getUint64() & ((1 << fractionBits) - 1);
+			significand = float(intFraction) * quantum;
+		}
+	}
+
+	// Produce positive numbers more often than negative.
+	return (rnd.getInt(0,3) == 0 ? -1.0f : 1.0f) * (base + significand);
+}
+
+//! Generate a standard set of floats that should always be tested.
+void DefaultSampling<float>::genFixeds (const FloatFormat& format, vector<float>& dst) const
+{
+	const int			minExp			= format.getMinExp();
+	const int			maxExp			= format.getMaxExp();
+	const int			fractionBits	= format.getFractionBits();
+	const float			minQuantum		= deFloatLdExp(1.0f, minExp - fractionBits);
+	const float			minNormalized	= deFloatLdExp(1.0f, minExp);
+	const float			maxQuantum		= deFloatLdExp(1.0f, maxExp - fractionBits);
+
+	// NaN
+	dst.push_back(TCU_NAN);
+	// Zero
+	dst.push_back(0.0f);
+
+	for (int sign = -1; sign <= 1; sign += 2)
+	{
+		// Smallest subnormal
+		dst.push_back((float)sign * minQuantum);
+
+		// Largest subnormal
+		dst.push_back((float)sign * (minNormalized - minQuantum));
+
+		// Smallest normalized
+		dst.push_back((float)sign * minNormalized);
+
+		// Next smallest normalized
+		dst.push_back((float)sign * (minNormalized + minQuantum));
+
+		dst.push_back((float)sign * 0.5f);
+		dst.push_back((float)sign * 1.0f);
+		dst.push_back((float)sign * 2.0f);
+
+		// Largest number
+		dst.push_back((float)sign * (deFloatLdExp(1.0f, maxExp) +
+									(deFloatLdExp(1.0f, maxExp) - maxQuantum)));
+
+		dst.push_back((float)sign * TCU_INFINITY);
+	}
+}
+
+template <typename T, int Size>
+class DefaultSampling<Vector<T, Size> > : public Sampling<Vector<T, Size> >
+{
+public:
+	typedef Vector<T, Size>		Value;
+
+	Value	genRandom	(const FloatFormat& fmt, Precision prec, Random& rnd) const
+	{
+		Value ret;
+
+		for (int ndx = 0; ndx < Size; ++ndx)
+			ret[ndx] = instance<DefaultSampling<T> >().genRandom(fmt, prec, rnd);
+
+		return ret;
+	}
+
+	void	genFixeds	(const FloatFormat& fmt, vector<Value>& dst) const
+	{
+		vector<T> scalars;
+
+		instance<DefaultSampling<T> >().genFixeds(fmt, scalars);
+
+		for (size_t scalarNdx = 0; scalarNdx < scalars.size(); ++scalarNdx)
+			dst.push_back(Value(scalars[scalarNdx]));
+	}
+
+	double	getWeight	(void) const
+	{
+		return dePow(instance<DefaultSampling<T> >().getWeight(), Size);
+	}
+};
+
+template <typename T, int Rows, int Columns>
+class DefaultSampling<Matrix<T, Rows, Columns> > : public Sampling<Matrix<T, Rows, Columns> >
+{
+public:
+	typedef Matrix<T, Rows, Columns>		Value;
+
+	Value	genRandom	(const FloatFormat& fmt, Precision prec, Random& rnd) const
+	{
+		Value ret;
+
+		for (int rowNdx = 0; rowNdx < Rows; ++rowNdx)
+			for (int colNdx = 0; colNdx < Columns; ++colNdx)
+				ret(rowNdx, colNdx) = instance<DefaultSampling<T> >().genRandom(fmt, prec, rnd);
+
+		return ret;
+	}
+
+	void	genFixeds	(const FloatFormat& fmt, vector<Value>& dst) const
+	{
+		vector<T> scalars;
+
+		instance<DefaultSampling<T> >().genFixeds(fmt, scalars);
+
+		for (size_t scalarNdx = 0; scalarNdx < scalars.size(); ++scalarNdx)
+			dst.push_back(Value(scalars[scalarNdx]));
+
+		if (Columns == Rows)
+		{
+			Value	mat	(0.0);
+			T		x	= T(1.0f);
+			mat[0][0] = x;
+			for (int ndx = 0; ndx < Columns; ++ndx)
+			{
+				mat[Columns-1-ndx][ndx] = x;
+				x *= T(2.0f);
+			}
+			dst.push_back(mat);
+		}
+	}
+
+	double	getWeight	(void) const
+	{
+		return dePow(instance<DefaultSampling<T> >().getWeight(), Rows * Columns);
+	}
+};
+
+struct CaseContext
+{
+					CaseContext		(const string&		name_,
+									 TestContext&		testContext_,
+									 const FloatFormat&	floatFormat_,
+									 const FloatFormat&	highpFormat_,
+									 Precision			precision_,
+									 ShaderType			shaderType_,
+									 size_t				numRandoms_)
+						: name				(name_)
+						, testContext		(testContext_)
+						, floatFormat		(floatFormat_)
+						, highpFormat		(highpFormat_)
+						, precision			(precision_)
+						, shaderType		(shaderType_)
+						, numRandoms		(numRandoms_) {}
+
+	string			name;
+	TestContext&	testContext;
+	FloatFormat		floatFormat;
+	FloatFormat		highpFormat;
+	Precision		precision;
+	ShaderType		shaderType;
+	size_t			numRandoms;
+};
+
+template<typename In0_ = Void, typename In1_ = Void, typename In2_ = Void, typename In3_ = Void>
+struct InTypes
+{
+	typedef	In0_	In0;
+	typedef	In1_	In1;
+	typedef	In2_	In2;
+	typedef	In3_	In3;
+};
+
+template <typename In>
+int numInputs (void)
+{
+	return (!isTypeValid<typename In::In0>() ? 0 :
+			!isTypeValid<typename In::In1>() ? 1 :
+			!isTypeValid<typename In::In2>() ? 2 :
+			!isTypeValid<typename In::In3>() ? 3 :
+			4);
+}
+
+template<typename Out0_, typename Out1_ = Void>
+struct OutTypes
+{
+	typedef	Out0_	Out0;
+	typedef	Out1_	Out1;
+};
+
+template <typename Out>
+int numOutputs (void)
+{
+	return (!isTypeValid<typename Out::Out0>() ? 0 :
+			!isTypeValid<typename Out::Out1>() ? 1 :
+			2);
+}
+
+template<typename In>
+struct Inputs
+{
+	vector<typename In::In0>	in0;
+	vector<typename In::In1>	in1;
+	vector<typename In::In2>	in2;
+	vector<typename In::In3>	in3;
+};
+
+template<typename Out>
+struct Outputs
+{
+	Outputs	(size_t size) : out0(size), out1(size) {}
+
+	vector<typename Out::Out0>	out0;
+	vector<typename Out::Out1>	out1;
+};
+
+template<typename In, typename Out>
+struct Variables
+{
+	VariableP<typename In::In0>		in0;
+	VariableP<typename In::In1>		in1;
+	VariableP<typename In::In2>		in2;
+	VariableP<typename In::In3>		in3;
+	VariableP<typename Out::Out0>	out0;
+	VariableP<typename Out::Out1>	out1;
+};
+
+template<typename In>
+struct Samplings
+{
+	Samplings	(const Sampling<typename In::In0>&	in0_,
+				 const Sampling<typename In::In1>&	in1_,
+				 const Sampling<typename In::In2>&	in2_,
+				 const Sampling<typename In::In3>&	in3_)
+		: in0 (in0_), in1 (in1_), in2 (in2_), in3 (in3_) {}
+
+	const Sampling<typename In::In0>&	in0;
+	const Sampling<typename In::In1>&	in1;
+	const Sampling<typename In::In2>&	in2;
+	const Sampling<typename In::In3>&	in3;
+};
+
+template<typename In>
+struct DefaultSamplings : Samplings<In>
+{
+	DefaultSamplings	(void)
+		: Samplings<In>(instance<DefaultSampling<typename In::In0> >(),
+						instance<DefaultSampling<typename In::In1> >(),
+						instance<DefaultSampling<typename In::In2> >(),
+						instance<DefaultSampling<typename In::In3> >()) {}
+};
+
+template <typename In, typename Out>
+class BuiltinPrecisionCaseTestInstance : public TestInstance
+{
+public:
+									BuiltinPrecisionCaseTestInstance	(Context&						context,
+																		 const  CaseContext				caseCtx,
+																		 ShaderExecutor&				executor,
+																		 const  Variables<In, Out>		variables,
+																		 const  Samplings<In>&			samplings,
+																		 const  StatementP				stmt)
+										: TestInstance	(context)
+										, m_caseCtx		(caseCtx)
+										, m_executor	(executor)
+										, m_variables	(variables)
+										, m_samplings	(samplings)
+										, m_stmt		(stmt)
+									{
+									}
+	virtual tcu::TestStatus			iterate								(void);
+
+protected:
+	CaseContext						m_caseCtx;
+	ShaderExecutor&					m_executor;
+	Variables<In, Out>				m_variables;
+	const Samplings<In>&			m_samplings;
+	StatementP						m_stmt;
+};
+
+template<class In, class Out>
+tcu::TestStatus BuiltinPrecisionCaseTestInstance<In, Out>::iterate (void)
+{
+	typedef typename	In::In0		In0;
+	typedef typename	In::In1		In1;
+	typedef typename	In::In2		In2;
+	typedef typename	In::In3		In3;
+	typedef typename	Out::Out0	Out0;
+	typedef typename	Out::Out1	Out1;
+
+	Inputs<In>			inputs		= generateInputs(m_samplings, m_caseCtx.floatFormat, m_caseCtx.precision, m_caseCtx.numRandoms, 0xdeadbeefu + m_caseCtx.testContext.getCommandLine().getBaseSeed());
+	const FloatFormat&	fmt			= m_caseCtx.floatFormat;
+	const int			inCount		= numInputs<In>();
+	const int			outCount	= numOutputs<Out>();
+	const size_t		numValues	= (inCount > 0) ? inputs.in0.size() : 1;
+	Outputs<Out>		outputs		(numValues);
+	const FloatFormat	highpFmt	= m_caseCtx.highpFormat;
+	const int			maxMsgs		= 100;
+	int					numErrors	= 0;
+	Environment			env;		// Hoisted out of the inner loop for optimization.
+	ResultCollector		status;
+	TestLog&			testLog		= m_context.getTestContext().getLog();
+
+	const void*			inputArr[]	=
+	{
+		&inputs.in0.front(), &inputs.in1.front(), &inputs.in2.front(), &inputs.in3.front(),
+	};
+	void*				outputArr[]	=
+	{
+		&outputs.out0.front(), &outputs.out1.front(),
+	};
+
+	// Print out the statement and its definitions
+	testLog << TestLog::Message << "Statement: " << m_stmt << TestLog::EndMessage;
+	{
+		ostringstream	oss;
+		FuncSet			funcs;
+
+		m_stmt->getUsedFuncs(funcs);
+		for (FuncSet::const_iterator it = funcs.begin(); it != funcs.end(); ++it)
+		{
+			(*it)->printDefinition(oss);
+		}
+		if (!funcs.empty())
+			testLog << TestLog::Message << "Reference definitions:\n" << oss.str()
+				  << TestLog::EndMessage;
+	}
+
+	switch (inCount)
+	{
+		case 4: DE_ASSERT(inputs.in3.size() == numValues);
+		case 3: DE_ASSERT(inputs.in2.size() == numValues);
+		case 2: DE_ASSERT(inputs.in1.size() == numValues);
+		case 1: DE_ASSERT(inputs.in0.size() == numValues);
+		default: break;
+	}
+
+	m_executor.execute(m_context, int(numValues), inputArr, outputArr);
+
+	// Initialize environment with dummy values so we don't need to bind in inner loop.
+	{
+		const typename Traits<In0>::IVal		in0;
+		const typename Traits<In1>::IVal		in1;
+		const typename Traits<In2>::IVal		in2;
+		const typename Traits<In3>::IVal		in3;
+		const typename Traits<Out0>::IVal		reference0;
+		const typename Traits<Out1>::IVal		reference1;
+
+		env.bind(*m_variables.in0, in0);
+		env.bind(*m_variables.in1, in1);
+		env.bind(*m_variables.in2, in2);
+		env.bind(*m_variables.in3, in3);
+		env.bind(*m_variables.out0, reference0);
+		env.bind(*m_variables.out1, reference1);
+	}
+
+	// For each input tuple, compute output reference interval and compare
+	// shader output to the reference.
+	for (size_t valueNdx = 0; valueNdx < numValues; valueNdx++)
+	{
+		bool						result		= true;
+		typename Traits<Out0>::IVal	reference0;
+		typename Traits<Out1>::IVal	reference1;
+
+		env.lookup(*m_variables.in0) = convert<In0>(fmt, round(fmt, inputs.in0[valueNdx]));
+		env.lookup(*m_variables.in1) = convert<In1>(fmt, round(fmt, inputs.in1[valueNdx]));
+		env.lookup(*m_variables.in2) = convert<In2>(fmt, round(fmt, inputs.in2[valueNdx]));
+		env.lookup(*m_variables.in3) = convert<In3>(fmt, round(fmt, inputs.in3[valueNdx]));
+
+		{
+			EvalContext	ctx (fmt, m_caseCtx.precision, env);
+			m_stmt->execute(ctx);
+		}
+
+		switch (outCount)
+		{
+			case 2:
+				reference1 = convert<Out1>(highpFmt, env.lookup(*m_variables.out1));
+				if (!status.check(contains(reference1, outputs.out1[valueNdx]),
+									"Shader output 1 is outside acceptable range"))
+					result = false;
+			case 1:
+				reference0 = convert<Out0>(highpFmt, env.lookup(*m_variables.out0));
+				if (!status.check(contains(reference0, outputs.out0[valueNdx]),
+									"Shader output 0 is outside acceptable range"))
+					result = false;
+			default: break;
+		}
+
+		if (!result)
+			++numErrors;
+
+		if ((!result && numErrors <= maxMsgs) || GLS_LOG_ALL_RESULTS)
+		{
+			MessageBuilder	builder	= testLog.message();
+
+			builder << (result ? "Passed" : "Failed") << " sample:\n";
+
+			if (inCount > 0)
+			{
+				builder << "\t" << m_variables.in0->getName() << " = "
+						<< valueToString(highpFmt, inputs.in0[valueNdx]) << "\n";
+			}
+
+			if (inCount > 1)
+			{
+				builder << "\t" << m_variables.in1->getName() << " = "
+						<< valueToString(highpFmt, inputs.in1[valueNdx]) << "\n";
+			}
+
+			if (inCount > 2)
+			{
+				builder << "\t" << m_variables.in2->getName() << " = "
+						<< valueToString(highpFmt, inputs.in2[valueNdx]) << "\n";
+			}
+
+			if (inCount > 3)
+			{
+				builder << "\t" << m_variables.in3->getName() << " = "
+						<< valueToString(highpFmt, inputs.in3[valueNdx]) << "\n";
+			}
+
+			if (outCount > 0)
+			{
+				builder << "\t" << m_variables.out0->getName() << " = "
+						<< valueToString(highpFmt, outputs.out0[valueNdx]) << "\n"
+						<< "\tExpected range: "
+						<< intervalToString<typename Out::Out0>(highpFmt, reference0) << "\n";
+			}
+
+			if (outCount > 1)
+			{
+				builder << "\t" << m_variables.out1->getName() << " = "
+						<< valueToString(highpFmt, outputs.out1[valueNdx]) << "\n"
+						<< "\tExpected range: "
+						<< intervalToString<typename Out::Out1>(highpFmt, reference1) << "\n";
+			}
+
+			builder << TestLog::EndMessage;
+		}
+	}
+
+	if (numErrors > maxMsgs)
+	{
+		testLog << TestLog::Message << "(Skipped " << (numErrors - maxMsgs) << " messages.)"
+			  << TestLog::EndMessage;
+	}
+
+	if (numErrors == 0)
+	{
+		testLog << TestLog::Message << "All " << numValues << " inputs passed."
+			  << TestLog::EndMessage;
+	}
+	else
+	{
+		testLog << TestLog::Message << numErrors << "/" << numValues << " inputs failed."
+			  << TestLog::EndMessage;
+	}
+
+	if (numErrors)
+		return tcu::TestStatus::fail(de::toString(numErrors) + string(" test failed. Check log for the details"));
+	else
+		return tcu::TestStatus::pass("Pass");
+
+}
+
+class PrecisionCase : public TestCase
+{
+protected:
+						PrecisionCase	(const CaseContext& context, const string& name, const string& extension = "")
+							: TestCase		(context.testContext, name.c_str(), name.c_str())
+							, m_ctx			(context)
+							, m_extension	(extension)
+							, m_executor	(DE_NULL)
+							{
+							}
+
+	virtual void		initPrograms	(vk::SourceCollections& programCollection) const
+	{
+		m_executor->setShaderSources(programCollection);
+	}
+
+	const FloatFormat&	getFormat		(void) const			{ return m_ctx.floatFormat; }
+
+	template <typename In, typename Out>
+	void				testStatement	(const Variables<In, Out>& variables, const Statement& stmt);
+
+	template<typename T>
+	Symbol				makeSymbol		(const Variable<T>& variable)
+	{
+		return Symbol(variable.getName(), getVarTypeOf<T>(m_ctx.precision));
+	}
+
+	CaseContext							m_ctx;
+	const string						m_extension;
+	ShaderSpec							m_spec;
+	de::MovePtr<ShaderExecutor>			m_executor;
+};
+
+template <typename In, typename Out>
+void PrecisionCase::testStatement (const Variables<In, Out>& variables, const Statement& stmt)
+{
+	const int		inCount		= numInputs<In>();
+	const int		outCount	= numOutputs<Out>();
+	Environment		env;		// Hoisted out of the inner loop for optimization.
+
+	// Initialize ShaderSpec from precision, variables and statement.
+	{
+		ostringstream os;
+		os << "precision " << glu::getPrecisionName(m_ctx.precision) << " float;\n";
+		m_spec.globalDeclarations = os.str();
+	}
+
+	if (!m_extension.empty())
+		m_spec.globalDeclarations = "#extension " + m_extension + " : require\n";
+
+	m_spec.inputs.resize(inCount);
+
+	switch (inCount)
+	{
+		case 4: m_spec.inputs[3] = makeSymbol(*variables.in3);
+		case 3:	m_spec.inputs[2] = makeSymbol(*variables.in2);
+		case 2:	m_spec.inputs[1] = makeSymbol(*variables.in1);
+		case 1:	m_spec.inputs[0] = makeSymbol(*variables.in0);
+		default: break;
+	}
+
+	m_spec.outputs.resize(outCount);
+
+	switch (outCount)
+	{
+		case 2:	m_spec.outputs[1] = makeSymbol(*variables.out1);
+		case 1:	m_spec.outputs[0] = makeSymbol(*variables.out0);
+		default: break;
+	}
+
+	m_spec.source = de::toString(stmt);
+
+	m_executor = de::MovePtr<ShaderExecutor>(createExecutor(m_ctx.shaderType, m_spec));
+}
+
+template <typename T>
+struct InputLess
+{
+	bool operator() (const T& val1, const T& val2) const
+	{
+		return val1 < val2;
+	}
+};
+
+template <typename T>
+bool inputLess (const T& val1, const T& val2)
+{
+	return InputLess<T>()(val1, val2);
+}
+
+template <>
+struct InputLess<float>
+{
+	bool operator() (const float& val1, const float& val2) const
+	{
+		if (deIsNaN(val1))
+			return false;
+		if (deIsNaN(val2))
+			return true;
+		return val1 < val2;
+	}
+};
+
+template <typename T, int Size>
+struct InputLess<Vector<T, Size> >
+{
+	bool operator() (const Vector<T, Size>& vec1, const Vector<T, Size>& vec2) const
+	{
+		for (int ndx = 0; ndx < Size; ++ndx)
+		{
+			if (inputLess(vec1[ndx], vec2[ndx]))
+				return true;
+			if (inputLess(vec2[ndx], vec1[ndx]))
+				return false;
+		}
+
+		return false;
+	}
+};
+
+template <typename T, int Rows, int Cols>
+struct InputLess<Matrix<T, Rows, Cols> >
+{
+	bool operator() (const Matrix<T, Rows, Cols>& mat1,
+					 const Matrix<T, Rows, Cols>& mat2) const
+	{
+		for (int col = 0; col < Cols; ++col)
+		{
+			if (inputLess(mat1[col], mat2[col]))
+				return true;
+			if (inputLess(mat2[col], mat1[col]))
+				return false;
+		}
+
+		return false;
+	}
+};
+
+template <typename In>
+struct InTuple :
+	public Tuple4<typename In::In0, typename In::In1, typename In::In2, typename In::In3>
+{
+	InTuple	(const typename In::In0& in0,
+			 const typename In::In1& in1,
+			 const typename In::In2& in2,
+			 const typename In::In3& in3)
+		: Tuple4<typename In::In0, typename In::In1, typename In::In2, typename In::In3>
+		  (in0, in1, in2, in3) {}
+};
+
+template <typename In>
+struct InputLess<InTuple<In> >
+{
+	bool operator() (const InTuple<In>& in1, const InTuple<In>& in2) const
+	{
+		if (inputLess(in1.a, in2.a))
+			return true;
+		if (inputLess(in2.a, in1.a))
+			return false;
+		if (inputLess(in1.b, in2.b))
+			return true;
+		if (inputLess(in2.b, in1.b))
+			return false;
+		if (inputLess(in1.c, in2.c))
+			return true;
+		if (inputLess(in2.c, in1.c))
+			return false;
+		if (inputLess(in1.d, in2.d))
+			return true;
+		return false;
+	};
+};
+
+template<typename In>
+Inputs<In> generateInputs (const Samplings<In>&	samplings,
+						   const FloatFormat&	floatFormat,
+						   Precision			intPrecision,
+						   size_t				numSamples,
+						   deUint32				seed)
+{
+	Random										rnd(seed);
+	Inputs<In>									ret;
+	Inputs<In>									fixedInputs;
+	set<InTuple<In>, InputLess<InTuple<In> > >	seenInputs;
+
+	samplings.in0.genFixeds(floatFormat, fixedInputs.in0);
+	samplings.in1.genFixeds(floatFormat, fixedInputs.in1);
+	samplings.in2.genFixeds(floatFormat, fixedInputs.in2);
+	samplings.in3.genFixeds(floatFormat, fixedInputs.in3);
+
+	for (size_t ndx0 = 0; ndx0 < fixedInputs.in0.size(); ++ndx0)
+	{
+		for (size_t ndx1 = 0; ndx1 < fixedInputs.in1.size(); ++ndx1)
+		{
+			for (size_t ndx2 = 0; ndx2 < fixedInputs.in2.size(); ++ndx2)
+			{
+				for (size_t ndx3 = 0; ndx3 < fixedInputs.in3.size(); ++ndx3)
+				{
+					const InTuple<In>	tuple	(fixedInputs.in0[ndx0],
+												 fixedInputs.in1[ndx1],
+												 fixedInputs.in2[ndx2],
+												 fixedInputs.in3[ndx3]);
+
+					seenInputs.insert(tuple);
+					ret.in0.push_back(tuple.a);
+					ret.in1.push_back(tuple.b);
+					ret.in2.push_back(tuple.c);
+					ret.in3.push_back(tuple.d);
+				}
+			}
+		}
+	}
+
+	for (size_t ndx = 0; ndx < numSamples; ++ndx)
+	{
+		const typename In::In0	in0		= samplings.in0.genRandom(floatFormat, intPrecision, rnd);
+		const typename In::In1	in1		= samplings.in1.genRandom(floatFormat, intPrecision, rnd);
+		const typename In::In2	in2		= samplings.in2.genRandom(floatFormat, intPrecision, rnd);
+		const typename In::In3	in3		= samplings.in3.genRandom(floatFormat, intPrecision, rnd);
+		const InTuple<In>		tuple	(in0, in1, in2, in3);
+
+		if (de::contains(seenInputs, tuple))
+			continue;
+
+		seenInputs.insert(tuple);
+		ret.in0.push_back(in0);
+		ret.in1.push_back(in1);
+		ret.in2.push_back(in2);
+		ret.in3.push_back(in3);
+	}
+
+	return ret;
+}
+
+class FuncCaseBase : public PrecisionCase
+{
+protected:
+				FuncCaseBase	(const CaseContext& context, const string& name, const FuncBase& func)
+									: PrecisionCase	(context, name, func.getRequiredExtension())
+								{
+								}
+
+	StatementP	m_stmt;
+};
+
+template <typename Sig>
+class FuncCase : public FuncCaseBase
+{
+public:
+	typedef Func<Sig>						CaseFunc;
+	typedef typename Sig::Ret				Ret;
+	typedef typename Sig::Arg0				Arg0;
+	typedef typename Sig::Arg1				Arg1;
+	typedef typename Sig::Arg2				Arg2;
+	typedef typename Sig::Arg3				Arg3;
+	typedef InTypes<Arg0, Arg1, Arg2, Arg3>	In;
+	typedef OutTypes<Ret>					Out;
+
+											FuncCase		(const CaseContext& context, const string& name, const CaseFunc& func)
+												: FuncCaseBase	(context, name, func)
+												, m_func		(func)
+												{
+													buildTest();
+												}
+
+	virtual	TestInstance*					createInstance	(Context& context) const
+	{
+		return new BuiltinPrecisionCaseTestInstance<In, Out>(context, m_ctx, *m_executor, m_variables, getSamplings(), m_stmt);
+	}
+
+protected:
+	void									buildTest		(void);
+	virtual const Samplings<In>&			getSamplings	(void) const
+	{
+		return instance<DefaultSamplings<In> >();
+	}
+
+private:
+	const CaseFunc&							m_func;
+	Variables<In, Out>						m_variables;
+};
+
+template <typename Sig>
+void FuncCase<Sig>::buildTest (void)
+{
+	m_variables.out0	= variable<Ret>("out0");
+	m_variables.out1	= variable<Void>("out1");
+	m_variables.in0		= variable<Arg0>("in0");
+	m_variables.in1		= variable<Arg1>("in1");
+	m_variables.in2		= variable<Arg2>("in2");
+	m_variables.in3		= variable<Arg3>("in3");
+
+	{
+		ExprP<Ret> expr	= applyVar(m_func, m_variables.in0, m_variables.in1, m_variables.in2, m_variables.in3);
+		m_stmt			= variableAssignment(m_variables.out0, expr);
+
+		this->testStatement(m_variables, *m_stmt);
+	}
+}
+
+template <typename Sig>
+class InOutFuncCase : public FuncCaseBase
+{
+public:
+	typedef Func<Sig>					CaseFunc;
+	typedef typename Sig::Ret			Ret;
+	typedef typename Sig::Arg0			Arg0;
+	typedef typename Sig::Arg1			Arg1;
+	typedef typename Sig::Arg2			Arg2;
+	typedef typename Sig::Arg3			Arg3;
+	typedef InTypes<Arg0, Arg2, Arg3>	In;
+	typedef OutTypes<Ret, Arg1>			Out;
+
+										InOutFuncCase	(const CaseContext& context, const string& name, const CaseFunc& func)
+											: FuncCaseBase	(context, name, func)
+											, m_func		(func)
+											{
+												buildTest();
+											}
+	virtual TestInstance*				createInstance	(Context& context) const
+	{
+		return new BuiltinPrecisionCaseTestInstance<In, Out>(context, m_ctx, *m_executor, m_variables, getSamplings(), m_stmt);
+	}
+
+protected:
+	void								buildTest		(void);
+	virtual const Samplings<In>&		getSamplings	(void) const
+	{
+		return instance<DefaultSamplings<In> >();
+	}
+
+private:
+	const CaseFunc&						m_func;
+	Variables<In, Out>					m_variables;
+};
+
+template <typename Sig>
+void InOutFuncCase<Sig>::buildTest (void)
+{
+
+	m_variables.out0	= variable<Ret>("out0");
+	m_variables.out1	= variable<Arg1>("out1");
+	m_variables.in0		= variable<Arg0>("in0");
+	m_variables.in1		= variable<Arg2>("in1");
+	m_variables.in2		= variable<Arg3>("in2");
+	m_variables.in3		= variable<Void>("in3");
+
+	{
+		ExprP<Ret> expr	= applyVar(m_func, m_variables.in0, m_variables.out1, m_variables.in1, m_variables.in2);
+		m_stmt			= variableAssignment(m_variables.out0, expr);
+
+		this->testStatement(m_variables, *m_stmt);
+	}
+}
+
+template <typename Sig>
+PrecisionCase* createFuncCase (const CaseContext& context, const string& name, const Func<Sig>&	func)
+{
+	switch (func.getOutParamIndex())
+	{
+		case -1:
+			return new FuncCase<Sig>(context, name, func);
+		case 1:
+			return new InOutFuncCase<Sig>(context, name, func);
+		default:
+			DE_FATAL("Impossible");
+	}
+	return DE_NULL;
+}
+
+class CaseFactory
+{
+public:
+	virtual						~CaseFactory	(void) {}
+	virtual MovePtr<TestNode>	createCase		(const CaseContext& ctx) const = 0;
+	virtual string				getName			(void) const = 0;
+	virtual string				getDesc			(void) const = 0;
+};
+
+class FuncCaseFactory : public CaseFactory
+{
+public:
+	virtual const FuncBase&		getFunc			(void) const = 0;
+	string						getName			(void) const { return de::toLower(getFunc().getName()); }
+	string						getDesc			(void) const { return "Function '" + getFunc().getName() + "'";	}
+};
+
+template <typename Sig>
+class GenFuncCaseFactory : public CaseFactory
+{
+public:
+						GenFuncCaseFactory	(const GenFuncs<Sig>& funcs, const string& name)
+							: m_funcs			(funcs)
+							, m_name			(de::toLower(name))
+							{
+							}
+
+	MovePtr<TestNode>	createCase			(const CaseContext& ctx) const
+	{
+		TestCaseGroup* group = new TestCaseGroup(ctx.testContext, ctx.name.c_str(), ctx.name.c_str());
+
+		group->addChild(createFuncCase(ctx, "scalar", m_funcs.func));
+		group->addChild(createFuncCase(ctx, "vec2", m_funcs.func2));
+		group->addChild(createFuncCase(ctx, "vec3", m_funcs.func3));
+		group->addChild(createFuncCase(ctx, "vec4", m_funcs.func4));
+		return MovePtr<TestNode>(group);
+	}
+
+	string				getName				(void) const { return m_name; }
+	string				getDesc				(void) const { return "Function '" + m_funcs.func.getName() + "'"; }
+
+private:
+	const GenFuncs<Sig>	m_funcs;
+	string				m_name;
+};
+
+template <template <int> class GenF>
+class TemplateFuncCaseFactory : public FuncCaseFactory
+{
+public:
+	MovePtr<TestNode>	createCase		(const CaseContext& ctx) const
+	{
+		TestCaseGroup*	group = new TestCaseGroup(ctx.testContext, ctx.name.c_str(), ctx.name.c_str());
+
+		group->addChild(createFuncCase(ctx, "scalar", instance<GenF<1> >()));
+		group->addChild(createFuncCase(ctx, "vec2", instance<GenF<2> >()));
+		group->addChild(createFuncCase(ctx, "vec3", instance<GenF<3> >()));
+		group->addChild(createFuncCase(ctx, "vec4", instance<GenF<4> >()));
+
+		return MovePtr<TestNode>(group);
+	}
+
+	const FuncBase&		getFunc			(void) const { return instance<GenF<1> >(); }
+};
+
+template <template <int> class GenF>
+class SquareMatrixFuncCaseFactory : public FuncCaseFactory
+{
+public:
+	MovePtr<TestNode>	createCase		(const CaseContext& ctx) const
+	{
+		TestCaseGroup* group = new TestCaseGroup(ctx.testContext, ctx.name.c_str(), ctx.name.c_str());
+
+		group->addChild(createFuncCase(ctx, "mat2", instance<GenF<2> >()));
+#if 0
+		// disabled until we get reasonable results
+		group->addChild(createFuncCase(ctx, "mat3", instance<GenF<3> >()));
+		group->addChild(createFuncCase(ctx, "mat4", instance<GenF<4> >()));
+#endif
+
+		return MovePtr<TestNode>(group);
+	}
+
+	const FuncBase&		getFunc			(void) const { return instance<GenF<2> >(); }
+};
+
+template <template <int, int> class GenF>
+class MatrixFuncCaseFactory : public FuncCaseFactory
+{
+public:
+	MovePtr<TestNode>	createCase		(const CaseContext& ctx) const
+	{
+		TestCaseGroup*	const group = new TestCaseGroup(ctx.testContext, ctx.name.c_str(), ctx.name.c_str());
+
+		this->addCase<2, 2>(ctx, group);
+		this->addCase<3, 2>(ctx, group);
+		this->addCase<4, 2>(ctx, group);
+		this->addCase<2, 3>(ctx, group);
+		this->addCase<3, 3>(ctx, group);
+		this->addCase<4, 3>(ctx, group);
+		this->addCase<2, 4>(ctx, group);
+		this->addCase<3, 4>(ctx, group);
+		this->addCase<4, 4>(ctx, group);
+
+		return MovePtr<TestNode>(group);
+	}
+
+	const FuncBase&		getFunc			(void) const { return instance<GenF<2,2> >(); }
+
+private:
+	template <int Rows, int Cols>
+	void				addCase			(const CaseContext& ctx, TestCaseGroup* group) const
+	{
+		const char*	const name = dataTypeNameOf<Matrix<float, Rows, Cols> >();
+		group->addChild(createFuncCase(ctx, name, instance<GenF<Rows, Cols> >()));
+	}
+};
+
+template <typename Sig>
+class SimpleFuncCaseFactory : public CaseFactory
+{
+public:
+						SimpleFuncCaseFactory	(const Func<Sig>& func) : m_func(func) {}
+
+	MovePtr<TestNode>	createCase				(const CaseContext& ctx) const	{ return MovePtr<TestNode>(createFuncCase(ctx, ctx.name.c_str(), m_func)); }
+	string				getName					(void) const					{ return de::toLower(m_func.getName()); }
+	string				getDesc					(void) const					{ return "Function '" + getName() + "'"; }
+
+private:
+	const Func<Sig>&	m_func;
+};
+
+template <typename F>
+SharedPtr<SimpleFuncCaseFactory<typename F::Sig> > createSimpleFuncCaseFactory (void)
+{
+	return SharedPtr<SimpleFuncCaseFactory<typename F::Sig> >(new SimpleFuncCaseFactory<typename F::Sig>(instance<F>()));
+}
+
+class CaseFactories
+{
+public:
+	virtual											~CaseFactories	(void) {}
+	virtual const std::vector<const CaseFactory*>	getFactories	(void) const = 0;
+};
+
+class BuiltinFuncs : public CaseFactories
+{
+public:
+	const vector<const CaseFactory*>		getFactories	(void) const
+	{
+		vector<const CaseFactory*> ret;
+
+		for (size_t ndx = 0; ndx < m_factories.size(); ++ndx)
+			ret.push_back(m_factories[ndx].get());
+
+		return ret;
+	}
+
+	void									addFactory		(SharedPtr<const CaseFactory> fact) { m_factories.push_back(fact); }
+
+private:
+	vector<SharedPtr<const CaseFactory> >	m_factories;
+};
+
+template <typename F>
+void addScalarFactory(BuiltinFuncs& funcs, string name = "")
+{
+	if (name.empty())
+		name = instance<F>().getName();
+
+	funcs.addFactory(SharedPtr<const CaseFactory>(new GenFuncCaseFactory<typename F::Sig>(makeVectorizedFuncs<F>(), name)));
+}
+
+MovePtr<const CaseFactories> createComputeOnlyBuiltinCases (void)
+{
+	MovePtr<BuiltinFuncs>	funcs	(new BuiltinFuncs());
+
+	// Tests for ES3 builtins
+
+	addScalarFactory<Add>(*funcs);
+	addScalarFactory<Sub>(*funcs);
+	addScalarFactory<Mul>(*funcs);
+	addScalarFactory<Div>(*funcs);
+
+	addScalarFactory<Radians>(*funcs);
+	addScalarFactory<Degrees>(*funcs);
+	addScalarFactory<Sin>(*funcs);
+	addScalarFactory<Cos>(*funcs);
+	addScalarFactory<Tan>(*funcs);
+	addScalarFactory<ASin>(*funcs);
+	addScalarFactory<ACos>(*funcs);
+	addScalarFactory<ATan2>(*funcs, "atan2");
+	addScalarFactory<ATan>(*funcs);
+	addScalarFactory<Sinh>(*funcs);
+	addScalarFactory<Cosh>(*funcs);
+	addScalarFactory<Tanh>(*funcs);
+	addScalarFactory<ASinh>(*funcs);
+	addScalarFactory<ACosh>(*funcs);
+	addScalarFactory<ATanh>(*funcs);
+
+	addScalarFactory<Pow>(*funcs);
+	addScalarFactory<Exp>(*funcs);
+	addScalarFactory<Log>(*funcs);
+	addScalarFactory<Exp2>(*funcs);
+	addScalarFactory<Log2>(*funcs);
+	addScalarFactory<Sqrt>(*funcs);
+	addScalarFactory<InverseSqrt>(*funcs);
+
+	addScalarFactory<Abs>(*funcs);
+	addScalarFactory<Sign>(*funcs);
+	addScalarFactory<Floor>(*funcs);
+	addScalarFactory<Trunc>(*funcs);
+	addScalarFactory<Round>(*funcs);
+	addScalarFactory<RoundEven>(*funcs);
+	addScalarFactory<Ceil>(*funcs);
+	addScalarFactory<Fract>(*funcs);
+	addScalarFactory<Mod>(*funcs);
+	funcs->addFactory(createSimpleFuncCaseFactory<Modf>());
+	addScalarFactory<Min>(*funcs);
+	addScalarFactory<Max>(*funcs);
+	addScalarFactory<Clamp>(*funcs);
+	addScalarFactory<Mix>(*funcs);
+	addScalarFactory<Step>(*funcs);
+	addScalarFactory<SmoothStep>(*funcs);
+
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Length>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Distance>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Dot>()));
+	funcs->addFactory(createSimpleFuncCaseFactory<Cross>());
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Normalize>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<FaceForward>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Reflect>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new TemplateFuncCaseFactory<Refract>()));
+
+
+	funcs->addFactory(SharedPtr<const CaseFactory>(new MatrixFuncCaseFactory<MatrixCompMult>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new MatrixFuncCaseFactory<OuterProduct>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new MatrixFuncCaseFactory<Transpose>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new SquareMatrixFuncCaseFactory<Determinant>()));
+	funcs->addFactory(SharedPtr<const CaseFactory>(new SquareMatrixFuncCaseFactory<Inverse>()));
+
+	return MovePtr<const CaseFactories>(funcs.release());
+}
+
+MovePtr<const CaseFactories> createCompleteBuiltinCases (void)
+{
+	MovePtr<BuiltinFuncs>	funcs	(new BuiltinFuncs());
+
+	// Tests for ES31 builtins
+	addScalarFactory<FrExp>(*funcs);
+	addScalarFactory<LdExp>(*funcs);
+	addScalarFactory<Fma>(*funcs);
+
+	return MovePtr<const CaseFactories>(funcs.release());
+}
+
+struct PrecisionTestContext
+{
+							PrecisionTestContext	(TestContext&				testCtx_,
+													 const FloatFormat&			highp_,
+													 const FloatFormat&			mediump_,
+													 const FloatFormat&			lowp_,
+													 const vector<ShaderType>&	shaderTypes_,
+													 int						numRandoms_)
+								: testCtx				(testCtx_)
+								, shaderTypes			(shaderTypes_)
+								, numRandoms			(numRandoms_)
+								{
+									formats[glu::PRECISION_HIGHP]	= &highp_;
+									formats[glu::PRECISION_MEDIUMP]	= &mediump_;
+									formats[glu::PRECISION_LOWP]	= &lowp_;
+								}
+
+	TestContext&			testCtx;
+	const FloatFormat*		formats[glu::PRECISION_LAST];
+	vector<ShaderType>		shaderTypes;
+	int						numRandoms;
+};
+
+TestCaseGroup* createFuncGroup (const PrecisionTestContext&	ctx, const CaseFactory& factory)
+{
+	TestCaseGroup* const	group	= new TestCaseGroup(ctx.testCtx, factory.getName().c_str(), factory.getDesc().c_str());
+
+	for (int precNdx = glu::PRECISION_MEDIUMP; precNdx < glu::PRECISION_LAST; ++precNdx)
+	{
+		const Precision		precision	= Precision(precNdx);
+		const string		precName	(glu::getPrecisionName(precision));
+		const FloatFormat&	fmt			= *de::getSizedArrayElement<glu::PRECISION_LAST>(ctx.formats, precNdx);
+		const FloatFormat&	highpFmt	= *de::getSizedArrayElement<glu::PRECISION_LAST>(ctx.formats,
+																						 glu::PRECISION_HIGHP);
+
+		for (size_t shaderNdx = 0; shaderNdx < ctx.shaderTypes.size(); ++shaderNdx)
+		{
+			const ShaderType	shaderType	= ctx.shaderTypes[shaderNdx];
+			const string		shaderName	(glu::getShaderTypeName(shaderType));
+			const string		name		= precName + "_" + shaderName;
+			const CaseContext	caseCtx		(name, ctx.testCtx, fmt, highpFmt,
+											 precision, shaderType, ctx.numRandoms);
+
+			group->addChild(factory.createCase(caseCtx).release());
+		}
+	}
+
+	return group;
+}
+
+void addBuiltinPrecisionTests (TestContext&					testCtx,
+							   const CaseFactories&			cases,
+							   const vector<ShaderType>&	shaderTypes,
+							   TestCaseGroup&				dstGroup)
+{
+	const int						userRandoms	= testCtx.getCommandLine().getTestIterationCount();
+	const int						defRandoms	= 16384;
+	const int						numRandoms	= userRandoms > 0 ? userRandoms : defRandoms;
+	const FloatFormat				highp		(-126, 127, 23, true,
+												 tcu::MAYBE,	// subnormals
+												 tcu::YES,		// infinities
+												 tcu::MAYBE);	// NaN
+	// \todo [2014-04-01 lauri] Check these once Khronos bug 11840 is resolved.
+	const FloatFormat				mediump		(-13, 13, 9, false);
+	// A fixed-point format is just a floating point format with a fixed
+	// exponent and support for subnormals.
+	const FloatFormat				lowp		(0, 0, 7, false, tcu::YES);
+	const PrecisionTestContext		ctx			(testCtx, highp, mediump, lowp,
+												 shaderTypes, numRandoms);
+
+	for (size_t ndx = 0; ndx < cases.getFactories().size(); ++ndx)
+		dstGroup.addChild(createFuncGroup(ctx, *cases.getFactories()[ndx]));
+}
+
+BuiltinPrecisionTests::BuiltinPrecisionTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup(testCtx, "precision", "Builtin precision tests")
+{
+}
+
+BuiltinPrecisionTests::~BuiltinPrecisionTests (void)
+{
+}
+
+void BuiltinPrecisionTests::init (void)
+{
+	std::vector<glu::ShaderType>		shaderTypes;
+	de::MovePtr<const CaseFactories>	computeOnlyCases	= createComputeOnlyBuiltinCases();
+	de::MovePtr<const CaseFactories>	completeCases		= createCompleteBuiltinCases();
+
+	shaderTypes.push_back(glu::SHADERTYPE_COMPUTE);
+
+	addBuiltinPrecisionTests(m_testCtx,
+							 *computeOnlyCases,
+							 shaderTypes,
+							 *this);
+
+	shaderTypes.clear();
+	shaderTypes.push_back(glu::SHADERTYPE_VERTEX);
+	shaderTypes.push_back(glu::SHADERTYPE_FRAGMENT);
+	shaderTypes.push_back(glu::SHADERTYPE_COMPUTE);
+
+	addBuiltinPrecisionTests(m_testCtx,
+							 *completeCases,
+							 shaderTypes,
+							 *this);
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.hpp
new file mode 100644
index 0000000..006c0f2
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinPrecisionTests.hpp
@@ -0,0 +1,63 @@
+#ifndef _VKTSHADERBUILTINPRECISIONTESTS_HPP
+#define _VKTSHADERBUILTINPRECISIONTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Precision and range tests for builtins and types.
+ *
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+class BuiltinPrecisionTests : public tcu::TestCaseGroup
+{
+public:
+									BuiltinPrecisionTests				(tcu::TestContext& testCtx);
+	virtual							~BuiltinPrecisionTests				(void);
+
+	virtual void					init								(void);
+
+private:
+									BuiltinPrecisionTests				(const BuiltinPrecisionTests&);		// not allowed!
+	BuiltinPrecisionTests&			operator=							(const BuiltinPrecisionTests&);		// not allowed!
+};
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADERBUILTINPRECISIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.cpp
new file mode 100644
index 0000000..3b5c427
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.cpp
@@ -0,0 +1,66 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan shader render test cases
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderBuiltinTests.hpp"
+
+#include "deUniquePtr.hpp"
+
+#include "vktShaderBuiltinPrecisionTests.hpp"
+#include "vktShaderCommonFunctionTests.hpp"
+#include "vktShaderIntegerFunctionTests.hpp"
+#include "vktShaderPackingFunctionTests.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+tcu::TestCaseGroup* createBuiltinTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	builtinTests			(new tcu::TestCaseGroup(testCtx, "builtin",		"Built-in tests"));
+	de::MovePtr<tcu::TestCaseGroup>	builtinFunctionTests	(new tcu::TestCaseGroup(testCtx, "function",	"Built-in Function Tests"));
+
+	builtinFunctionTests->addChild(new ShaderCommonFunctionTests(testCtx));
+	builtinFunctionTests->addChild(new ShaderIntegerFunctionTests(testCtx));
+	builtinFunctionTests->addChild(new ShaderPackingFunctionTests(testCtx));
+
+	builtinTests->addChild(builtinFunctionTests.release());
+	builtinTests->addChild(new BuiltinPrecisionTests(testCtx));
+
+	return builtinTests.release();
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.hpp
new file mode 100644
index 0000000..8d0a054
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderBuiltinTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERBUILTINTESTS_HPP
+#define _VKTSHADERBUILTINTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief GLSL built-in tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+tcu::TestCaseGroup*		createBuiltinTests	(tcu::TestContext& testCtx);
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADERBUILTINTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.cpp
new file mode 100644
index 0000000..6c78083
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.cpp
@@ -0,0 +1,2512 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Common built-in function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderCommonFunctionTests.hpp"
+#include "vktShaderExecutor.hpp"
+#include "gluContextInfo.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuFloat.hpp"
+#include "tcuInterval.hpp"
+#include "tcuFloatFormat.hpp"
+#include "deRandom.hpp"
+#include "deMath.h"
+#include "deString.h"
+#include "deArrayUtil.hpp"
+#include "deSharedPtr.hpp"
+
+namespace vkt
+{
+
+namespace shaderexecutor
+{
+
+
+using std::vector;
+using std::string;
+using tcu::TestLog;
+
+using tcu::Vec2;
+using tcu::Vec3;
+using tcu::Vec4;
+using tcu::IVec2;
+using tcu::IVec3;
+using tcu::IVec4;
+
+namespace
+{
+
+// Utilities
+
+template<typename T, int Size>
+struct VecArrayAccess
+{
+public:
+									VecArrayAccess	(const void* ptr) : m_array((tcu::Vector<T, Size>*)ptr) {}
+									~VecArrayAccess	(void) {}
+
+	const tcu::Vector<T, Size>&		operator[]		(size_t offset) const	{ return m_array[offset];	}
+	tcu::Vector<T, Size>&			operator[]		(size_t offset)			{ return m_array[offset];	}
+
+private:
+	tcu::Vector<T, Size>*			m_array;
+};
+
+template<typename T>	T			randomScalar	(de::Random& rnd, T minValue, T maxValue);
+template<> inline		float		randomScalar	(de::Random& rnd, float minValue, float maxValue)		{ return rnd.getFloat(minValue, maxValue);	}
+template<> inline		deInt32		randomScalar	(de::Random& rnd, deInt32 minValue, deInt32 maxValue)	{ return rnd.getInt(minValue, maxValue);	}
+
+template<typename T, int Size>
+inline tcu::Vector<T, Size> randomVector (de::Random& rnd, const tcu::Vector<T, Size>& minValue, const tcu::Vector<T, Size>& maxValue)
+{
+	tcu::Vector<T, Size> res;
+	for (int ndx = 0; ndx < Size; ndx++)
+		res[ndx] = randomScalar<T>(rnd, minValue[ndx], maxValue[ndx]);
+	return res;
+}
+
+template<typename T, int Size>
+static void fillRandomVectors (de::Random& rnd, const tcu::Vector<T, Size>& minValue, const tcu::Vector<T, Size>& maxValue, void* dst, int numValues, int offset = 0)
+{
+	VecArrayAccess<T, Size> access(dst);
+	for (int ndx = 0; ndx < numValues; ndx++)
+		access[offset + ndx] = randomVector<T, Size>(rnd, minValue, maxValue);
+}
+
+template<typename T>
+static void fillRandomScalars (de::Random& rnd, T minValue, T maxValue, void* dst, int numValues, int offset = 0)
+{
+	T* typedPtr = (T*)dst;
+	for (int ndx = 0; ndx < numValues; ndx++)
+		typedPtr[offset + ndx] = randomScalar<T>(rnd, minValue, maxValue);
+}
+
+inline int numBitsLostInOp (float input, float output)
+{
+	const int	inExp		= tcu::Float32(input).exponent();
+	const int	outExp		= tcu::Float32(output).exponent();
+
+	return de::max(0, inExp-outExp); // Lost due to mantissa shift.
+}
+
+inline deUint32 getUlpDiff (float a, float b)
+{
+	const deUint32	aBits	= tcu::Float32(a).bits();
+	const deUint32	bBits	= tcu::Float32(b).bits();
+	return aBits > bBits ? aBits - bBits : bBits - aBits;
+}
+
+inline deUint32 getUlpDiffIgnoreZeroSign (float a, float b)
+{
+	if (tcu::Float32(a).isZero())
+		return getUlpDiff(tcu::Float32::construct(tcu::Float32(b).sign(), 0, 0).asFloat(), b);
+	else if (tcu::Float32(b).isZero())
+		return getUlpDiff(a, tcu::Float32::construct(tcu::Float32(a).sign(), 0, 0).asFloat());
+	else
+		return getUlpDiff(a, b);
+}
+
+inline bool supportsSignedZero (glu::Precision precision)
+{
+	// \note GLSL ES 3.1 doesn't really require support for -0, but we require it for highp
+	//		 as it is very widely supported.
+	return precision == glu::PRECISION_HIGHP;
+}
+
+inline float getEpsFromMaxUlpDiff (float value, deUint32 ulpDiff)
+{
+	const int exp = tcu::Float32(value).exponent();
+	return tcu::Float32::construct(+1, exp, (1u<<23) | ulpDiff).asFloat() - tcu::Float32::construct(+1, exp, 1u<<23).asFloat();
+}
+
+inline deUint32 getMaxUlpDiffFromBits (int numAccurateBits)
+{
+	const int		numGarbageBits	= 23-numAccurateBits;
+	const deUint32	mask			= (1u<<numGarbageBits)-1u;
+
+	return mask;
+}
+
+inline float getEpsFromBits (float value, int numAccurateBits)
+{
+	return getEpsFromMaxUlpDiff(value, getMaxUlpDiffFromBits(numAccurateBits));
+}
+
+static int getMinMantissaBits (glu::Precision precision)
+{
+	const int bits[] =
+	{
+		7,		// lowp
+		10,		// mediump
+		23		// highp
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(bits) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(bits)));
+	return bits[precision];
+}
+
+static int getMaxNormalizedValueExponent (glu::Precision precision)
+{
+	const int exponent[] =
+	{
+		0,		// lowp
+		13,		// mediump
+		127		// highp
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(exponent) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(exponent)));
+	return exponent[precision];
+}
+
+static int getMinNormalizedValueExponent (glu::Precision precision)
+{
+	const int exponent[] =
+	{
+		-7,		// lowp
+		-13,	// mediump
+		-126	// highp
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(exponent) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(exponent)));
+	return exponent[precision];
+}
+
+static float makeFloatRepresentable (float f, glu::Precision precision)
+{
+	if (precision == glu::PRECISION_HIGHP)
+	{
+		// \note: assuming f is not extended-precision
+		return f;
+	}
+	else
+	{
+		const int			numMantissaBits				= getMinMantissaBits(precision);
+		const int			maxNormalizedValueExponent	= getMaxNormalizedValueExponent(precision);
+		const int			minNormalizedValueExponent	= getMinNormalizedValueExponent(precision);
+		const deUint32		representableMantissaMask	= ((deUint32(1) << numMantissaBits) - 1) << (23 - (deUint32)numMantissaBits);
+		const float			largestRepresentableValue	= tcu::Float32::constructBits(+1, maxNormalizedValueExponent, ((1u << numMantissaBits) - 1u) << (23u - (deUint32)numMantissaBits)).asFloat();
+		const bool			zeroNotRepresentable		= (precision == glu::PRECISION_LOWP);
+
+		// if zero is not required to be representable, use smallest positive non-subnormal value
+		const float			zeroValue					= (zeroNotRepresentable) ? (tcu::Float32::constructBits(+1, minNormalizedValueExponent, 1).asFloat()) : (0.0f);
+
+		const tcu::Float32	float32Representation		(f);
+
+		if (float32Representation.exponent() < minNormalizedValueExponent)
+		{
+			// flush too small values to zero
+			return zeroValue;
+		}
+		else if (float32Representation.exponent() > maxNormalizedValueExponent)
+		{
+			// clamp too large values
+			return (float32Representation.sign() == +1) ? (largestRepresentableValue) : (-largestRepresentableValue);
+		}
+		else
+		{
+			// remove unrepresentable mantissa bits
+			const tcu::Float32 targetRepresentation(tcu::Float32::constructBits(float32Representation.sign(),
+													float32Representation.exponent(),
+													float32Representation.mantissaBits() & representableMantissaMask));
+
+			return targetRepresentation.asFloat();
+		}
+	}
+}
+
+static vector<int> getScalarSizes (const vector<Symbol>& symbols)
+{
+	vector<int> sizes(symbols.size());
+	for (int ndx = 0; ndx < (int)symbols.size(); ++ndx)
+		sizes[ndx] = symbols[ndx].varType.getScalarSize();
+	return sizes;
+}
+
+static int computeTotalScalarSize (const vector<Symbol>& symbols)
+{
+	int totalSize = 0;
+	for (vector<Symbol>::const_iterator sym = symbols.begin(); sym != symbols.end(); ++sym)
+		totalSize += sym->varType.getScalarSize();
+	return totalSize;
+}
+
+static vector<void*> getInputOutputPointers (const vector<Symbol>& symbols, vector<deUint32>& data, const int numValues)
+{
+	vector<void*>	pointers		(symbols.size());
+	int				curScalarOffset	= 0;
+
+	for (int varNdx = 0; varNdx < (int)symbols.size(); ++varNdx)
+	{
+		const Symbol&	var				= symbols[varNdx];
+		const int		scalarSize		= var.varType.getScalarSize();
+
+		// Uses planar layout as input/output specs do not support strides.
+		pointers[varNdx] = &data[curScalarOffset];
+		curScalarOffset += scalarSize*numValues;
+	}
+
+	DE_ASSERT(curScalarOffset == (int)data.size());
+
+	return pointers;
+}
+
+// \todo [2013-08-08 pyry] Make generic utility and move to glu?
+
+struct HexFloat
+{
+	const float value;
+	HexFloat (const float value_) : value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const HexFloat& v)
+{
+	return str << v.value << " / " << tcu::toHex(tcu::Float32(v.value).bits());
+}
+
+struct HexBool
+{
+	const deUint32 value;
+	HexBool (const deUint32 value_) : value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const HexBool& v)
+{
+	return str << (v.value ? "true" : "false") << " / " << tcu::toHex(v.value);
+}
+
+struct VarValue
+{
+	const glu::VarType&	type;
+	const void*			value;
+
+	VarValue (const glu::VarType& type_, const void* value_) : type(type_), value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const VarValue& varValue)
+{
+	DE_ASSERT(varValue.type.isBasicType());
+
+	const glu::DataType		basicType		= varValue.type.getBasicType();
+	const glu::DataType		scalarType		= glu::getDataTypeScalarType(basicType);
+	const int				numComponents	= glu::getDataTypeScalarSize(basicType);
+
+	if (numComponents > 1)
+		str << glu::getDataTypeName(basicType) << "(";
+
+	for (int compNdx = 0; compNdx < numComponents; compNdx++)
+	{
+		if (compNdx != 0)
+			str << ", ";
+
+		switch (scalarType)
+		{
+			case glu::TYPE_FLOAT:	str << HexFloat(((const float*)varValue.value)[compNdx]);			break;
+			case glu::TYPE_INT:		str << ((const deInt32*)varValue.value)[compNdx];					break;
+			case glu::TYPE_UINT:	str << tcu::toHex(((const deUint32*)varValue.value)[compNdx]);		break;
+			case glu::TYPE_BOOL:	str << HexBool(((const deUint32*)varValue.value)[compNdx]);			break;
+
+			default:
+				DE_ASSERT(false);
+		}
+	}
+
+	if (numComponents > 1)
+		str << ")";
+
+	return str;
+}
+
+static const char* getPrecisionPostfix (glu::Precision precision)
+{
+	static const char* s_postfix[] =
+	{
+		"_lowp",
+		"_mediump",
+		"_highp"
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_postfix) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[precision];
+}
+
+static const char* getShaderTypePostfix (glu::ShaderType shaderType)
+{
+	static const char* s_postfix[] =
+	{
+		"_vertex",
+		"_fragment",
+		"_geometry",
+		"_tess_control",
+		"_tess_eval",
+		"_compute"
+	};
+	DE_ASSERT(de::inBounds<int>(shaderType, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[shaderType];
+}
+
+static std::string getCommonFuncCaseName (glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+{
+	return string(glu::getDataTypeName(baseType)) + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType);
+}
+
+static inline void frexp (float in, float* significand, int* exponent)
+{
+	const tcu::Float32 fpValue(in);
+
+	if (!fpValue.isZero())
+	{
+		// Construct float that has exactly the mantissa, and exponent of -1.
+		*significand	= tcu::Float32::construct(fpValue.sign(), -1, fpValue.mantissa()).asFloat();
+		*exponent		= fpValue.exponent()+1;
+	}
+	else
+	{
+		*significand	= fpValue.sign() < 0 ? -0.0f : 0.0f;
+		*exponent		= 0;
+	}
+}
+
+static inline float ldexp (float significand, int exponent)
+{
+	const tcu::Float32 mant(significand);
+
+	if (exponent == 0 && mant.isZero())
+	{
+		return mant.sign() < 0 ? -0.0f : 0.0f;
+	}
+	else
+	{
+		return tcu::Float32::construct(mant.sign(), exponent+mant.exponent(), mant.mantissa()).asFloat();
+	}
+}
+
+template<class TestClass>
+static void addFunctionCases (tcu::TestCaseGroup* parent, const char* functionName, bool floatTypes, bool intTypes, bool uintTypes, deUint32 shaderBits)
+{
+	tcu::TestCaseGroup* group = new tcu::TestCaseGroup(parent->getTestContext(), functionName, functionName);
+	parent->addChild(group);
+
+	const glu::DataType scalarTypes[] =
+	{
+		glu::TYPE_FLOAT,
+		glu::TYPE_INT,
+		glu::TYPE_UINT
+	};
+
+	for (int scalarTypeNdx = 0; scalarTypeNdx < DE_LENGTH_OF_ARRAY(scalarTypes); scalarTypeNdx++)
+	{
+		const glu::DataType scalarType = scalarTypes[scalarTypeNdx];
+
+		if ((!floatTypes && scalarType == glu::TYPE_FLOAT)	||
+			(!intTypes && scalarType == glu::TYPE_INT)		||
+			(!uintTypes && scalarType == glu::TYPE_UINT))
+			continue;
+
+		for (int vecSize = 1; vecSize <= 4; vecSize++)
+		{
+			for (int prec = glu::PRECISION_MEDIUMP; prec <= glu::PRECISION_HIGHP; prec++)
+			{
+				for (int shaderTypeNdx = 0; shaderTypeNdx < glu::SHADERTYPE_LAST; shaderTypeNdx++)
+				{
+					if (shaderBits & (1<<shaderTypeNdx))
+						group->addChild(new TestClass(parent->getTestContext(), glu::DataType(scalarType + vecSize - 1), glu::Precision(prec), glu::ShaderType(shaderTypeNdx)));
+				}
+			}
+		}
+	}
+}
+
+// CommonFunctionCase
+
+class CommonFunctionCase : public TestCase
+{
+public:
+										CommonFunctionCase			(tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType);
+										~CommonFunctionCase			(void);
+	virtual	void						initPrograms				(vk::SourceCollections& programCollection) const
+										{
+											m_executor->setShaderSources(programCollection);
+										}
+
+	virtual TestInstance*				createInstance				(Context& context) const = 0;
+
+	void								init						(void);
+
+protected:
+										CommonFunctionCase			(const CommonFunctionCase& other);
+	CommonFunctionCase&					operator=					(const CommonFunctionCase& other);
+
+	const glu::ShaderType				m_shaderType;
+	ShaderSpec							m_spec;
+	const int							m_numValues;
+	de::MovePtr<ShaderExecutor>			m_executor;
+};
+
+CommonFunctionCase::CommonFunctionCase (tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType)
+	: TestCase		(testCtx, name, description)
+	, m_shaderType	(shaderType)
+	, m_numValues	(100)
+	, m_executor	(DE_NULL)
+{
+}
+
+CommonFunctionCase::~CommonFunctionCase (void)
+{
+}
+
+void CommonFunctionCase::init (void)
+{
+	DE_ASSERT(!m_executor);
+
+	m_executor = de::MovePtr<ShaderExecutor>(createExecutor(m_shaderType, m_spec));
+	m_testCtx.getLog() << *m_executor;
+}
+
+// CommonFunctionTestInstance
+
+class CommonFunctionTestInstance : public TestInstance
+{
+public:
+										CommonFunctionTestInstance	(Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+											: TestInstance	(context)
+											, m_shaderType	(shaderType)
+											, m_spec		(spec)
+											, m_numValues	(numValues)
+											, m_name		(name)
+											, m_executor	(executor)
+										{
+										}
+	virtual tcu::TestStatus				iterate						(void);
+
+protected:
+	virtual void						getInputValues				(int numValues, void* const* values) const = 0;
+	virtual bool						compare						(const void* const* inputs, const void* const* outputs) = 0;
+
+	const glu::ShaderType				m_shaderType;
+	ShaderSpec							m_spec;
+	const int							m_numValues;
+
+	const char*							m_name;
+
+	std::ostringstream					m_failMsg;					//!< Comparison failure help message.
+
+	ShaderExecutor&						m_executor;
+};
+
+tcu::TestStatus CommonFunctionTestInstance::iterate (void)
+{
+	const int				numInputScalars			= computeTotalScalarSize(m_spec.inputs);
+	const int				numOutputScalars		= computeTotalScalarSize(m_spec.outputs);
+	vector<deUint32>		inputData				(numInputScalars * m_numValues);
+	vector<deUint32>		outputData				(numOutputScalars * m_numValues);
+	const vector<void*>		inputPointers			= getInputOutputPointers(m_spec.inputs, inputData, m_numValues);
+	const vector<void*>		outputPointers			= getInputOutputPointers(m_spec.outputs, outputData, m_numValues);
+
+	// Initialize input data.
+	getInputValues(m_numValues, &inputPointers[0]);
+
+	// Execute shader.
+	m_executor.execute(m_context, m_numValues, &inputPointers[0], &outputPointers[0]);
+
+	// Compare results.
+	{
+		const vector<int>		inScalarSizes		= getScalarSizes(m_spec.inputs);
+		const vector<int>		outScalarSizes		= getScalarSizes(m_spec.outputs);
+		vector<void*>			curInputPtr			(inputPointers.size());
+		vector<void*>			curOutputPtr		(outputPointers.size());
+		int						numFailed			= 0;
+		tcu::TestContext&		testCtx				= m_context.getTestContext();
+
+		for (int valNdx = 0; valNdx < m_numValues; valNdx++)
+		{
+			// Set up pointers for comparison.
+			for (int inNdx = 0; inNdx < (int)curInputPtr.size(); ++inNdx)
+				curInputPtr[inNdx] = (deUint32*)inputPointers[inNdx] + inScalarSizes[inNdx]*valNdx;
+
+			for (int outNdx = 0; outNdx < (int)curOutputPtr.size(); ++outNdx)
+				curOutputPtr[outNdx] = (deUint32*)outputPointers[outNdx] + outScalarSizes[outNdx]*valNdx;
+
+			if (!compare(&curInputPtr[0], &curOutputPtr[0]))
+			{
+				// \todo [2013-08-08 pyry] We probably want to log reference value as well?
+
+				testCtx.getLog() << TestLog::Message << "ERROR: comparison failed for value " << valNdx << ":\n  " << m_failMsg.str() << TestLog::EndMessage;
+
+				testCtx.getLog() << TestLog::Message << "  inputs:" << TestLog::EndMessage;
+				for (int inNdx = 0; inNdx < (int)curInputPtr.size(); inNdx++)
+					testCtx.getLog() << TestLog::Message << "    " << m_spec.inputs[inNdx].name << " = "
+														   << VarValue(m_spec.inputs[inNdx].varType, curInputPtr[inNdx])
+									   << TestLog::EndMessage;
+
+				testCtx.getLog() << TestLog::Message << "  outputs:" << TestLog::EndMessage;
+				for (int outNdx = 0; outNdx < (int)curOutputPtr.size(); outNdx++)
+					testCtx.getLog() << TestLog::Message << "    " << m_spec.outputs[outNdx].name << " = "
+														   << VarValue(m_spec.outputs[outNdx].varType, curOutputPtr[outNdx])
+									   << TestLog::EndMessage;
+
+				m_failMsg.str("");
+				m_failMsg.clear();
+				numFailed += 1;
+			}
+		}
+
+		testCtx.getLog() << TestLog::Message << (m_numValues - numFailed) << " / " << m_numValues << " values passed" << TestLog::EndMessage;
+
+		if (numFailed == 0)
+			return tcu::TestStatus::pass("Pass");
+		else
+			return tcu::TestStatus::fail("Result comparison failed");
+	}
+}
+
+// Test cases
+
+class AbsCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	AbsCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 floatRanges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+		const IVec2 intRanges[] =
+		{
+			IVec2(-(1<<7)+1,	(1<<7)-1),
+			IVec2(-(1<<15)+1,	(1<<15)-1),
+			IVec2(0x80000001,	0x7fffffff)
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0x235facu);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		if (glu::isDataTypeFloatOrVec(type))
+			fillRandomScalars(rnd, floatRanges[precision].x(), floatRanges[precision].y(), values[0], numValues*scalarSize);
+		else
+			fillRandomScalars(rnd, intRanges[precision].x(), intRanges[precision].y(), values[0], numValues*scalarSize);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (glu::isDataTypeFloatOrVec(type))
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= (1u<<(23-mantissaBits))-1u;
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref0		= de::abs(in0);
+				const deUint32	ulpDiff0	= getUlpDiff(out0, ref0);
+
+				if (ulpDiff0 > maxUlpDiff)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref0) << " with ULP threshold " << maxUlpDiff << ", got ULP diff " << ulpDiff0;
+					return false;
+				}
+			}
+		}
+		else
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const int	in0		= ((const int*)inputs[0])[compNdx];
+				const int	out0	= ((const int*)outputs[0])[compNdx];
+				const int	ref0	= de::abs(in0);
+
+				if (out0 != ref0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << ref0;
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class AbsCase : public CommonFunctionCase
+{
+public:
+	AbsCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "abs", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = abs(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new AbsCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class SignCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	SignCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 floatRanges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e4f,		1e4f),	// mediump	- note: may end up as inf
+			Vec2(-1e8f,		1e8f)	// highp	- note: may end up as inf
+		};
+		const IVec2 intRanges[] =
+		{
+			IVec2(-(1<<7),		(1<<7)-1),
+			IVec2(-(1<<15),		(1<<15)-1),
+			IVec2(0x80000000,	0x7fffffff)
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0x324u);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		if (glu::isDataTypeFloatOrVec(type))
+		{
+			// Special cases.
+			std::fill((float*)values[0], (float*)values[0] + scalarSize, +1.0f);
+			std::fill((float*)values[0], (float*)values[0] + scalarSize, -1.0f);
+			std::fill((float*)values[0], (float*)values[0] + scalarSize,  0.0f);
+			fillRandomScalars(rnd, floatRanges[precision].x(), floatRanges[precision].y(), (float*)values[0] + scalarSize*3, (numValues-3)*scalarSize);
+		}
+		else
+		{
+			std::fill((int*)values[0], (int*)values[0] + scalarSize, +1);
+			std::fill((int*)values[0], (int*)values[0] + scalarSize, -1);
+			std::fill((int*)values[0], (int*)values[0] + scalarSize,  0);
+			fillRandomScalars(rnd, intRanges[precision].x(), intRanges[precision].y(), (int*)values[0] + scalarSize*3, (numValues-3)*scalarSize);
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (glu::isDataTypeFloatOrVec(type))
+		{
+			// Both highp and mediump should be able to represent -1, 0, and +1 exactly
+			const deUint32 maxUlpDiff = precision == glu::PRECISION_LOWP ? getMaxUlpDiffFromBits(getMinMantissaBits(precision)) : 0;
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref0		= in0 < 0.0f ? -1.0f :
+											  in0 > 0.0f ? +1.0f : 0.0f;
+				const deUint32	ulpDiff0	= getUlpDiff(out0, ref0);
+
+				if (ulpDiff0 > maxUlpDiff)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref0) << " with ULP threshold " << maxUlpDiff << ", got ULP diff " << ulpDiff0;
+					return false;
+				}
+			}
+		}
+		else
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const int	in0		= ((const int*)inputs[0])[compNdx];
+				const int	out0	= ((const int*)outputs[0])[compNdx];
+				const int	ref0	= in0 < 0 ? -1 :
+									  in0 > 0 ? +1 : 0;
+
+				if (out0 != ref0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << ref0;
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class SignCase : public CommonFunctionCase
+{
+public:
+	SignCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "sign", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = sign(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new SignCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+static float roundEven (float v)
+{
+	const float		q			= deFloatFrac(v);
+	const int		truncated	= int(v-q);
+	const int		rounded		= (q > 0.5f)							? (truncated + 1) :	// Rounded up
+									(q == 0.5f && (truncated % 2 != 0))	? (truncated + 1) :	// Round to nearest even at 0.5
+									truncated;												// Rounded down
+
+	return float(rounded);
+}
+
+class RoundEvenCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	RoundEvenCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd				(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		int						numSpecialCases	= 0;
+
+		// Special cases.
+		if (precision != glu::PRECISION_LOWP)
+		{
+			DE_ASSERT(numValues >= 20);
+			for (int ndx = 0; ndx < 20; ndx++)
+			{
+				const float v = de::clamp(float(ndx) - 10.5f, ranges[precision].x(), ranges[precision].y());
+				std::fill((float*)values[0], (float*)values[0] + scalarSize, v);
+				numSpecialCases += 1;
+			}
+		}
+
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0] + numSpecialCases*scalarSize, (numValues-numSpecialCases)*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				hasSignedZero	= supportsSignedZero(precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			// Require exact rounding result.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref			= roundEven(in0);
+
+				const deUint32	ulpDiff		= hasSignedZero ? getUlpDiff(out0, ref) : getUlpDiffIgnoreZeroSign(out0, ref);
+
+				if (ulpDiff > 0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+					return false;
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);	// ULP diff for rounded integer value.
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const int		minRes		= int(roundEven(in0-eps));
+				const int		maxRes		= int(roundEven(in0+eps));
+				bool			anyOk		= false;
+
+				for (int roundedVal = minRes; roundedVal <= maxRes; roundedVal++)
+				{
+					const deUint32 ulpDiff = getUlpDiffIgnoreZeroSign(out0, float(roundedVal));
+
+					if (ulpDiff <= maxUlpDiff)
+					{
+						anyOk = true;
+						break;
+					}
+				}
+
+				if (!anyOk)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = [" << minRes << ", " << maxRes << "] with ULP threshold " << tcu::toHex(maxUlpDiff);
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class RoundEvenCase : public CommonFunctionCase
+{
+public:
+	RoundEvenCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "roundEven", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = roundEven(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new RoundEvenCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class ModfCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	ModfCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), values[0], numValues*scalarSize);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				hasZeroSign		= supportsSignedZero(precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		const int				mantissaBits	= getMinMantissaBits(precision);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float		in0			= ((const float*)inputs[0])[compNdx];
+			const float		out0		= ((const float*)outputs[0])[compNdx];
+			const float		out1		= ((const float*)outputs[1])[compNdx];
+
+			const float		refOut1		= float(int(in0));
+			const float		refOut0		= in0 - refOut1;
+
+			const int		bitsLost	= precision != glu::PRECISION_HIGHP ? numBitsLostInOp(in0, refOut0) : 0;
+			const deUint32	maxUlpDiff	= getMaxUlpDiffFromBits(de::max(mantissaBits - bitsLost, 0));
+
+			const float		resSum		= out0 + out1;
+
+			const deUint32	ulpDiff		= hasZeroSign ? getUlpDiff(resSum, in0) : getUlpDiffIgnoreZeroSign(resSum, in0);
+
+			if (ulpDiff > maxUlpDiff)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = (" << HexFloat(refOut0) << ") + (" << HexFloat(refOut1) << ") = " << HexFloat(in0) << " with ULP threshold "
+							<< tcu::toHex(maxUlpDiff) << ", got ULP diff " << tcu::toHex(ulpDiff);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class ModfCase : public CommonFunctionCase
+{
+public:
+	ModfCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "modf", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out1", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = modf(in0, out1);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new ModfCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class IsnanCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	IsnanCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd				(deStringHash(m_name) ^ 0xc2a39fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				mantissaBits	= getMinMantissaBits(precision);
+		const deUint32			mantissaMask	= ~getMaxUlpDiffFromBits(mantissaBits) & ((1u<<23)-1u);
+
+		for (int valNdx = 0; valNdx < numValues*scalarSize; valNdx++)
+		{
+			const bool		isNan		= rnd.getFloat() > 0.3f;
+			const bool		isInf		= !isNan && rnd.getFloat() > 0.4f;
+			const deUint32	mantissa	= !isInf ? ((1u<<22) | (rnd.getUint32() & mantissaMask)) : 0;
+			const deUint32	exp			= !isNan && !isInf ? (rnd.getUint32() & 0x7fu) : 0xffu;
+			const deUint32	sign		= rnd.getUint32() & 0x1u;
+			const deUint32	value		= (sign << 31) | (exp << 23) | mantissa;
+
+			DE_ASSERT(tcu::Float32(value).isInf() == isInf && tcu::Float32(value).isNaN() == isNan);
+
+			((deUint32*)values[0])[valNdx] = value;
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP)
+		{
+			// Only highp is required to support inf/nan
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0		= ((const float*)inputs[0])[compNdx];
+				const bool		out0	= ((const deUint32*)outputs[0])[compNdx] != 0;
+				const bool		ref		= tcu::Float32(in0).isNaN();
+
+				if (out0 != ref)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << (ref ? "true" : "false");
+					return false;
+				}
+			}
+		}
+		else if (precision == glu::PRECISION_MEDIUMP || precision == glu::PRECISION_LOWP)
+		{
+			// NaN support is optional, check that inputs that are not NaN don't result in true.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0		= ((const float*)inputs[0])[compNdx];
+				const bool		out0	= ((const deUint32*)outputs[0])[compNdx] != 0;
+				const bool		ref		= tcu::Float32(in0).isNaN();
+
+				if (!ref && out0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << (ref ? "true" : "false");
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class IsnanCase : public CommonFunctionCase
+{
+public:
+	IsnanCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "isnan", shaderType)
+	{
+		DE_ASSERT(glu::isDataTypeFloatOrVec(baseType));
+
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	boolType	= vecSize > 1 ? glu::getDataTypeBoolVec(vecSize) : glu::TYPE_BOOL;
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(boolType, glu::PRECISION_LAST)));
+		m_spec.source = "out0 = isnan(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new IsnanCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class IsinfCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	IsinfCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd				(deStringHash(m_name) ^ 0xc2a39fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				mantissaBits	= getMinMantissaBits(precision);
+		const deUint32			mantissaMask	= ~getMaxUlpDiffFromBits(mantissaBits) & ((1u<<23)-1u);
+
+		for (int valNdx = 0; valNdx < numValues*scalarSize; valNdx++)
+		{
+			const bool		isInf		= rnd.getFloat() > 0.3f;
+			const bool		isNan		= !isInf && rnd.getFloat() > 0.4f;
+			const deUint32	mantissa	= !isInf ? ((1u<<22) | (rnd.getUint32() & mantissaMask)) : 0;
+			const deUint32	exp			= !isNan && !isInf ? (rnd.getUint32() & 0x7fu) : 0xffu;
+			const deUint32	sign		= rnd.getUint32() & 0x1u;
+			const deUint32	value		= (sign << 31) | (exp << 23) | mantissa;
+
+			DE_ASSERT(tcu::Float32(value).isInf() == isInf && tcu::Float32(value).isNaN() == isNan);
+
+			((deUint32*)values[0])[valNdx] = value;
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP)
+		{
+			// Only highp is required to support inf/nan
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0		= ((const float*)inputs[0])[compNdx];
+				const bool		out0	= ((const deUint32*)outputs[0])[compNdx] != 0;
+				const bool		ref		= tcu::Float32(in0).isInf();
+
+				if (out0 != ref)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexBool(ref);
+					return false;
+				}
+			}
+		}
+		else if (precision == glu::PRECISION_MEDIUMP)
+		{
+			// Inf support is optional, check that inputs that are not Inf in mediump don't result in true.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0		= ((const float*)inputs[0])[compNdx];
+				const bool		out0	= ((const deUint32*)outputs[0])[compNdx] != 0;
+				const bool		ref		= tcu::Float16(in0).isInf();
+
+				if (!ref && out0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << (ref ? "true" : "false");
+					return false;
+				}
+			}
+		}
+		// else: no verification can be performed
+
+		return true;
+	}
+};
+
+class IsinfCase : public CommonFunctionCase
+{
+public:
+	IsinfCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "isinf", shaderType)
+	{
+		DE_ASSERT(glu::isDataTypeFloatOrVec(baseType));
+
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	boolType	= vecSize > 1 ? glu::getDataTypeBoolVec(vecSize) : glu::TYPE_BOOL;
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(boolType, glu::PRECISION_LAST)));
+		m_spec.source = "out0 = isinf(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new IsinfCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FloatBitsToUintIntCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	FloatBitsToUintIntCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0x2790au);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), values[0], numValues*scalarSize);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		const int				mantissaBits	= getMinMantissaBits(precision);
+		const int				maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float		in0			= ((const float*)inputs[0])[compNdx];
+			const deUint32	out0		= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	refOut0		= tcu::Float32(in0).bits();
+			const int		ulpDiff		= de::abs((int)out0 - (int)refOut0);
+
+			if (ulpDiff > maxUlpDiff)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(refOut0) << " with threshold "
+							<< tcu::toHex(maxUlpDiff) << ", got diff " << tcu::toHex(ulpDiff);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class FloatBitsToUintIntCase : public CommonFunctionCase
+{
+public:
+	FloatBitsToUintIntCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType, bool outIsSigned)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), outIsSigned ? "floatBitsToInt" : "floatBitsToUint", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= outIsSigned ? (vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT)
+													  : (vecSize > 1 ? glu::getDataTypeUintVec(vecSize) : glu::TYPE_UINT);
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(intType, glu::PRECISION_HIGHP)));
+		m_spec.source = outIsSigned ? "out0 = floatBitsToInt(in0);" : "out0 = floatBitsToUint(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FloatBitsToUintIntCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FloatBitsToIntCaseInstance : public FloatBitsToUintIntCaseInstance
+{
+public:
+	FloatBitsToIntCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: FloatBitsToUintIntCaseInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+};
+
+class FloatBitsToIntCase : public FloatBitsToUintIntCase
+{
+public:
+	FloatBitsToIntCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: FloatBitsToUintIntCase	(testCtx, baseType, precision, shaderType, true)
+	{
+	}
+
+};
+
+class FloatBitsToUintCaseInstance : public FloatBitsToUintIntCaseInstance
+{
+public:
+	FloatBitsToUintCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: FloatBitsToUintIntCaseInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+};
+
+class FloatBitsToUintCase : public FloatBitsToUintIntCase
+{
+public:
+	FloatBitsToUintCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: FloatBitsToUintIntCase	(testCtx, baseType, precision, shaderType, false)
+	{
+	}
+};
+
+class BitsToFloatCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	BitsToFloatCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0xbbb225u);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+		const Vec2				range		(-1e8f, +1e8f);
+
+		// \note Filled as floats.
+		fillRandomScalars(rnd, range.x(), range.y(), values[0], numValues*scalarSize);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const deUint32			maxUlpDiff		= 0;
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float		in0			= ((const float*)inputs[0])[compNdx];
+			const float		out0		= ((const float*)outputs[0])[compNdx];
+			const deUint32	ulpDiff		= getUlpDiff(in0, out0);
+
+			if (ulpDiff > maxUlpDiff)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(tcu::Float32(in0).bits()) << " with ULP threshold "
+							<< tcu::toHex(maxUlpDiff) << ", got ULP diff " << tcu::toHex(ulpDiff);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class BitsToFloatCase : public CommonFunctionCase
+{
+public:
+	BitsToFloatCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, glu::PRECISION_HIGHP, shaderType).c_str(), glu::isDataTypeIntOrIVec(baseType) ? "intBitsToFloat" : "uintBitsToFloat", shaderType)
+	{
+		const bool			inIsSigned	= glu::isDataTypeIntOrIVec(baseType);
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	floatType	= vecSize > 1 ? glu::getDataTypeFloatVec(vecSize) : glu::TYPE_FLOAT;
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(floatType, glu::PRECISION_HIGHP)));
+		m_spec.source = inIsSigned ? "out0 = intBitsToFloat(in0);" : "out0 = uintBitsToFloat(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new BitsToFloatCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FloorCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	FloorCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0], numValues*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			// Require exact result.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref			= deFloatFloor(in0);
+
+				const deUint32	ulpDiff		= getUlpDiff(out0, ref);
+
+				if (ulpDiff > 0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+					return false;
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);	// ULP diff for rounded integer value.
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const int		minRes		= int(deFloatFloor(in0-eps));
+				const int		maxRes		= int(deFloatFloor(in0+eps));
+				bool			anyOk		= false;
+
+				for (int roundedVal = minRes; roundedVal <= maxRes; roundedVal++)
+				{
+					const deUint32 ulpDiff = getUlpDiff(out0, float(roundedVal));
+
+					if (ulpDiff <= maxUlpDiff)
+					{
+						anyOk = true;
+						break;
+					}
+				}
+
+				if (!anyOk)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = [" << minRes << ", " << maxRes << "] with ULP threshold " << tcu::toHex(maxUlpDiff);
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class FloorCase : public CommonFunctionCase
+{
+public:
+	FloorCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "floor", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = floor(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FloorCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class TruncCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	TruncCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd				(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const float				specialCases[]	= { 0.0f, -0.0f, -0.9f, 0.9f, 1.0f, -1.0f };
+		const int				numSpecialCases	= DE_LENGTH_OF_ARRAY(specialCases);
+
+		// Special cases
+		for (int caseNdx = 0; caseNdx < numSpecialCases; caseNdx++)
+		{
+			for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+				((float*)values[0])[caseNdx*scalarSize + scalarNdx] = specialCases[caseNdx];
+		}
+
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0] + scalarSize*numSpecialCases, (numValues-numSpecialCases)*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			// Require exact result.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const bool		isNeg		= tcu::Float32(in0).sign() < 0;
+				const float		ref			= isNeg ? (-float(int(-in0))) : float(int(in0));
+
+				// \note: trunc() function definition is a bit broad on negative zeros. Ignore result sign if zero.
+				const deUint32	ulpDiff		= getUlpDiffIgnoreZeroSign(out0, ref);
+
+				if (ulpDiff > 0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+					return false;
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);	// ULP diff for rounded integer value.
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const int		minRes		= int(in0-eps);
+				const int		maxRes		= int(in0+eps);
+				bool			anyOk		= false;
+
+				for (int roundedVal = minRes; roundedVal <= maxRes; roundedVal++)
+				{
+					const deUint32 ulpDiff = getUlpDiffIgnoreZeroSign(out0, float(roundedVal));
+
+					if (ulpDiff <= maxUlpDiff)
+					{
+						anyOk = true;
+						break;
+					}
+				}
+
+				if (!anyOk)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = [" << minRes << ", " << maxRes << "] with ULP threshold " << tcu::toHex(maxUlpDiff);
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class TruncCase : public CommonFunctionCase
+{
+public:
+	TruncCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "trunc", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = trunc(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new TruncCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class RoundCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	RoundCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd				(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		int						numSpecialCases	= 0;
+
+		// Special cases.
+		if (precision != glu::PRECISION_LOWP)
+		{
+			DE_ASSERT(numValues >= 10);
+			for (int ndx = 0; ndx < 10; ndx++)
+			{
+				const float v = de::clamp(float(ndx) - 5.5f, ranges[precision].x(), ranges[precision].y());
+				std::fill((float*)values[0], (float*)values[0] + scalarSize, v);
+				numSpecialCases += 1;
+			}
+		}
+
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0] + numSpecialCases*scalarSize, (numValues-numSpecialCases)*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				hasZeroSign		= supportsSignedZero(precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+
+				if (deFloatFrac(in0) == 0.5f)
+				{
+					// Allow both ceil(in) and floor(in)
+					const float		ref0		= deFloatFloor(in0);
+					const float		ref1		= deFloatCeil(in0);
+					const deUint32	ulpDiff0	= hasZeroSign ? getUlpDiff(out0, ref0) : getUlpDiffIgnoreZeroSign(out0, ref0);
+					const deUint32	ulpDiff1	= hasZeroSign ? getUlpDiff(out0, ref1) : getUlpDiffIgnoreZeroSign(out0, ref1);
+
+					if (ulpDiff0 > 0 && ulpDiff1 > 0)
+					{
+						m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref0) << " or " << HexFloat(ref1) << ", got ULP diff " << tcu::toHex(de::min(ulpDiff0, ulpDiff1));
+						return false;
+					}
+				}
+				else
+				{
+					// Require exact result
+					const float		ref		= roundEven(in0);
+					const deUint32	ulpDiff	= hasZeroSign ? getUlpDiff(out0, ref) : getUlpDiffIgnoreZeroSign(out0, ref);
+
+					if (ulpDiff > 0)
+					{
+						m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+						return false;
+					}
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);	// ULP diff for rounded integer value.
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const int		minRes		= int(roundEven(in0-eps));
+				const int		maxRes		= int(roundEven(in0+eps));
+				bool			anyOk		= false;
+
+				for (int roundedVal = minRes; roundedVal <= maxRes; roundedVal++)
+				{
+					const deUint32 ulpDiff = getUlpDiffIgnoreZeroSign(out0, float(roundedVal));
+
+					if (ulpDiff <= maxUlpDiff)
+					{
+						anyOk = true;
+						break;
+					}
+				}
+
+				if (!anyOk)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = [" << minRes << ", " << maxRes << "] with ULP threshold " << tcu::toHex(maxUlpDiff);
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class RoundCase : public CommonFunctionCase
+{
+public:
+	RoundCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "round", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = round(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new RoundCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class CeilCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	CeilCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0], numValues*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				hasZeroSign		= supportsSignedZero(precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			// Require exact result.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref			= deFloatCeil(in0);
+
+				const deUint32	ulpDiff		= hasZeroSign ? getUlpDiff(out0, ref) : getUlpDiffIgnoreZeroSign(out0, ref);
+
+				if (ulpDiff > 0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+					return false;
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const deUint32	maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);	// ULP diff for rounded integer value.
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const int		minRes		= int(deFloatCeil(in0-eps));
+				const int		maxRes		= int(deFloatCeil(in0+eps));
+				bool			anyOk		= false;
+
+				for (int roundedVal = minRes; roundedVal <= maxRes; roundedVal++)
+				{
+					const deUint32 ulpDiff = getUlpDiffIgnoreZeroSign(out0, float(roundedVal));
+
+					if (ulpDiff <= maxUlpDiff)
+					{
+						anyOk = true;
+						break;
+					}
+				}
+
+				if (!anyOk && de::inRange(0, minRes, maxRes))
+				{
+					// Allow -0 as well.
+					const int ulpDiff = de::abs((int)tcu::Float32(out0).bits() - (int)0x80000000u);
+					anyOk = ((deUint32)ulpDiff <= maxUlpDiff);
+				}
+
+				if (!anyOk)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = [" << minRes << ", " << maxRes << "] with ULP threshold " << tcu::toHex(maxUlpDiff);
+					return false;
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class CeilCase : public CommonFunctionCase
+{
+public:
+	CeilCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "ceil", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = ceil(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new CeilCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FractCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	FractCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd				(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		int						numSpecialCases	= 0;
+
+		// Special cases.
+		if (precision != glu::PRECISION_LOWP)
+		{
+			DE_ASSERT(numValues >= 10);
+			for (int ndx = 0; ndx < 10; ndx++)
+			{
+				const float v = de::clamp(float(ndx) - 5.5f, ranges[precision].x(), ranges[precision].y());
+				std::fill((float*)values[0], (float*)values[0] + scalarSize, v);
+				numSpecialCases += 1;
+			}
+		}
+
+		// Random cases.
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0] + numSpecialCases*scalarSize, (numValues-numSpecialCases)*scalarSize);
+
+		// If precision is mediump, make sure values can be represented in fp16 exactly
+		if (precision == glu::PRECISION_MEDIUMP)
+		{
+			for (int ndx = 0; ndx < numValues*scalarSize; ndx++)
+				((float*)values[0])[ndx] = tcu::Float16(((float*)values[0])[ndx]).asFloat();
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				hasZeroSign		= supportsSignedZero(precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		if (precision == glu::PRECISION_HIGHP || precision == glu::PRECISION_MEDIUMP)
+		{
+			// Require exact result.
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+				const float		ref			= deFloatFrac(in0);
+
+				const deUint32	ulpDiff		= hasZeroSign ? getUlpDiff(out0, ref) : getUlpDiffIgnoreZeroSign(out0, ref);
+
+				if (ulpDiff > 0)
+				{
+					m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << ", got ULP diff " << tcu::toHex(ulpDiff);
+					return false;
+				}
+			}
+		}
+		else
+		{
+			const int		mantissaBits	= getMinMantissaBits(precision);
+			const float		eps				= getEpsFromBits(1.0f, mantissaBits);	// epsilon for rounding bounds
+
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const float		in0			= ((const float*)inputs[0])[compNdx];
+				const float		out0		= ((const float*)outputs[0])[compNdx];
+
+				if (int(deFloatFloor(in0-eps)) == int(deFloatFloor(in0+eps)))
+				{
+					const float		ref			= deFloatFrac(in0);
+					const int		bitsLost	= numBitsLostInOp(in0, ref);
+					const deUint32	maxUlpDiff	= getMaxUlpDiffFromBits(de::max(0, mantissaBits-bitsLost));	// ULP diff for rounded integer value.
+					const deUint32	ulpDiff		= getUlpDiffIgnoreZeroSign(out0, ref);
+
+					if (ulpDiff > maxUlpDiff)
+					{
+						m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(ref) << " with ULP threshold " << tcu::toHex(maxUlpDiff) << ", got diff " << tcu::toHex(ulpDiff);
+						return false;
+					}
+				}
+				else
+				{
+					if (out0 >= 1.0f)
+					{
+						m_failMsg << "Expected [" << compNdx << "] < 1.0";
+						return false;
+					}
+				}
+			}
+		}
+
+		return true;
+	}
+};
+
+class FractCase : public CommonFunctionCase
+{
+public:
+	FractCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "fract", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, precision)));
+		m_spec.source = "out0 = fract(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FractCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FrexpCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	FrexpCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd			(deStringHash(m_name) ^ 0x2790au);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+
+		// Special cases
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			((float*)values[0])[scalarSize*0 + compNdx] = 0.0f;
+			((float*)values[0])[scalarSize*1 + compNdx] = -0.0f;
+			((float*)values[0])[scalarSize*2 + compNdx] = 0.5f;
+			((float*)values[0])[scalarSize*3 + compNdx] = -0.5f;
+			((float*)values[0])[scalarSize*4 + compNdx] = 1.0f;
+			((float*)values[0])[scalarSize*5 + compNdx] = -1.0f;
+			((float*)values[0])[scalarSize*6 + compNdx] = 2.0f;
+			((float*)values[0])[scalarSize*7 + compNdx] = -2.0f;
+		}
+
+		fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[0] + 8*scalarSize, (numValues-8)*scalarSize);
+
+		// Make sure the values are representable in the target format
+		for (int caseNdx = 0; caseNdx < numValues; ++caseNdx)
+		{
+			for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+			{
+				float* const valuePtr = &((float*)values[0])[caseNdx * scalarSize + scalarNdx];
+
+				*valuePtr = makeFloatRepresentable(*valuePtr, precision);
+			}
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type						= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision					= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize					= glu::getDataTypeScalarSize(type);
+		const bool				transitSupportsSignedZero	= (m_shaderType != glu::SHADERTYPE_FRAGMENT); // executor cannot reliably transit negative zero to fragment stage
+		const bool				signedZero					= supportsSignedZero(precision) && transitSupportsSignedZero;
+
+		const int				mantissaBits				= getMinMantissaBits(precision);
+		const deUint32			maxUlpDiff					= getMaxUlpDiffFromBits(mantissaBits);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float		in0			= ((const float*)inputs[0])[compNdx];
+			const float		out0		= ((const float*)outputs[0])[compNdx];
+			const int		out1		= ((const int*)outputs[1])[compNdx];
+
+			float			refOut0;
+			int				refOut1;
+
+			frexp(in0, &refOut0, &refOut1);
+
+			const deUint32	ulpDiff0	= signedZero ? getUlpDiff(out0, refOut0) : getUlpDiffIgnoreZeroSign(out0, refOut0);
+
+			if (ulpDiff0 > maxUlpDiff || out1 != refOut1)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(refOut0) << ", " << refOut1 << " with ULP threshold "
+						  << tcu::toHex(maxUlpDiff) << ", got ULP diff " << tcu::toHex(ulpDiff0);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class FrexpCase : public CommonFunctionCase
+{
+public:
+	FrexpCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "frexp", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out1", glu::VarType(intType, glu::PRECISION_HIGHP)));
+		m_spec.source = "out0 = frexp(in0, out1);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FrexpCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class LdexpCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	LdexpCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-1e3f,		1e3f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd					(deStringHash(m_name) ^ 0x2790au);
+		const glu::DataType		type				= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision			= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize			= glu::getDataTypeScalarSize(type);
+		int						valueNdx			= 0;
+
+		{
+			const float easySpecialCases[] = { 0.0f, -0.0f, 0.5f, -0.5f, 1.0f, -1.0f, 2.0f, -2.0f };
+
+			DE_ASSERT(valueNdx + DE_LENGTH_OF_ARRAY(easySpecialCases) <= numValues);
+			for (int caseNdx = 0; caseNdx < DE_LENGTH_OF_ARRAY(easySpecialCases); caseNdx++)
+			{
+				float	in0;
+				int		in1;
+
+				frexp(easySpecialCases[caseNdx], &in0, &in1);
+
+				for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				{
+					((float*)values[0])[valueNdx*scalarSize + compNdx] = in0;
+					((int*)values[1])[valueNdx*scalarSize + compNdx] = in1;
+				}
+
+				valueNdx += 1;
+			}
+		}
+
+		{
+			// \note lowp and mediump can not necessarily fit the values in hard cases, so we'll use only easy ones.
+			const int numEasyRandomCases = precision == glu::PRECISION_HIGHP ? 50 : (numValues-valueNdx);
+
+			DE_ASSERT(valueNdx + numEasyRandomCases <= numValues);
+			for (int caseNdx = 0; caseNdx < numEasyRandomCases; caseNdx++)
+			{
+				for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				{
+					const float	in	= rnd.getFloat(ranges[precision].x(), ranges[precision].y());
+					float		in0;
+					int			in1;
+
+					frexp(in, &in0, &in1);
+
+					((float*)values[0])[valueNdx*scalarSize + compNdx] = in0;
+					((int*)values[1])[valueNdx*scalarSize + compNdx] = in1;
+				}
+
+				valueNdx += 1;
+			}
+		}
+
+		{
+			const int numHardRandomCases = numValues-valueNdx;
+			DE_ASSERT(numHardRandomCases >= 0 && valueNdx + numHardRandomCases <= numValues);
+
+			for (int caseNdx = 0; caseNdx < numHardRandomCases; caseNdx++)
+			{
+				for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				{
+					const int		fpExp		= rnd.getInt(-126, 127);
+					const int		sign		= rnd.getBool() ? -1 : +1;
+					const deUint32	mantissa	= (1u<<23) | (rnd.getUint32() & ((1u<<23)-1));
+					const int		in1			= rnd.getInt(de::max(-126, -126-fpExp), de::min(127, 127-fpExp));
+					const float		in0			= tcu::Float32::construct(sign, fpExp, mantissa).asFloat();
+
+					DE_ASSERT(de::inRange(in1, -126, 127)); // See Khronos bug 11180
+					DE_ASSERT(de::inRange(in1+fpExp, -126, 127));
+
+					const float		out			= ldexp(in0, in1);
+
+					DE_ASSERT(!tcu::Float32(out).isInf() && !tcu::Float32(out).isDenorm());
+					DE_UNREF(out);
+
+					((float*)values[0])[valueNdx*scalarSize + compNdx] = in0;
+					((int*)values[1])[valueNdx*scalarSize + compNdx] = in1;
+				}
+
+				valueNdx += 1;
+			}
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		const int				mantissaBits	= getMinMantissaBits(precision);
+		const deUint32			maxUlpDiff		= getMaxUlpDiffFromBits(mantissaBits);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float		in0			= ((const float*)inputs[0])[compNdx];
+			const int		in1			= ((const int*)inputs[1])[compNdx];
+			const float		out0		= ((const float*)outputs[0])[compNdx];
+			const float		refOut0		= ldexp(in0, in1);
+			const deUint32	ulpDiff		= getUlpDiffIgnoreZeroSign(out0, refOut0);
+
+			const int		inExp		= tcu::Float32(in0).exponent();
+
+			if (ulpDiff > maxUlpDiff)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << HexFloat(refOut0) << ", (exp = " << inExp << ") with ULP threshold "
+						  << tcu::toHex(maxUlpDiff) << ", got ULP diff " << tcu::toHex(ulpDiff);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class LdexpCase : public CommonFunctionCase
+{
+public:
+	LdexpCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "ldexp", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("in1", glu::VarType(intType, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(baseType, glu::PRECISION_HIGHP)));
+		m_spec.source = "out0 = ldexp(in0, in1);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new LdexpCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FmaCaseInstance : public CommonFunctionTestInstance
+{
+public:
+	FmaCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: CommonFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		const Vec2 ranges[] =
+		{
+			Vec2(-2.0f,		2.0f),	// lowp
+			Vec2(-127.f,	127.f),	// mediump
+			Vec2(-1e7f,		1e7f)	// highp
+		};
+
+		de::Random				rnd							(deStringHash(m_name) ^ 0xac23fu);
+		const glu::DataType		type						= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision					= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize					= glu::getDataTypeScalarSize(type);
+		const float				specialCases[][3]			=
+		{
+			// a		b		c
+			{ 0.0f,		0.0f,	0.0f },
+			{ 0.0f,		1.0f,	0.0f },
+			{ 0.0f,		0.0f,	-1.0f },
+			{ 1.0f,		1.0f,	0.0f },
+			{ 1.0f,		1.0f,	1.0f },
+			{ -1.0f,	1.0f,	0.0f },
+			{ 1.0f,		-1.0f,	0.0f },
+			{ -1.0f,	-1.0f,	0.0f },
+			{ -0.0f,	1.0f,	0.0f },
+			{ 1.0f,		-0.0f,	0.0f }
+		};
+		const int				numSpecialCases				= DE_LENGTH_OF_ARRAY(specialCases);
+
+		// Special cases
+		for (int caseNdx = 0; caseNdx < numSpecialCases; caseNdx++)
+		{
+			for (int inputNdx = 0; inputNdx < 3; inputNdx++)
+			{
+				for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+					((float*)values[inputNdx])[caseNdx*scalarSize + scalarNdx] = specialCases[caseNdx][inputNdx];
+			}
+		}
+
+		// Random cases.
+		{
+			const int	numScalars	= (numValues-numSpecialCases)*scalarSize;
+			const int	offs		= scalarSize*numSpecialCases;
+
+			for (int inputNdx = 0; inputNdx < 3; inputNdx++)
+				fillRandomScalars(rnd, ranges[precision].x(), ranges[precision].y(), (float*)values[inputNdx] + offs, numScalars);
+		}
+
+		// Make sure the values are representable in the target format
+		for (int inputNdx = 0; inputNdx < 3; inputNdx++)
+		{
+			for (int caseNdx = 0; caseNdx < numValues; ++caseNdx)
+			{
+				for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+				{
+					float* const valuePtr = &((float*)values[inputNdx])[caseNdx * scalarSize + scalarNdx];
+
+					*valuePtr = makeFloatRepresentable(*valuePtr, precision);
+				}
+			}
+		}
+	}
+
+	static tcu::Interval fma (glu::Precision precision, float a, float b, float c)
+	{
+		const tcu::FloatFormat formats[] =
+		{
+			//				 minExp		maxExp		mantissa	exact,		subnormals	infinities	NaN
+			tcu::FloatFormat(0,			0,			7,			false,		tcu::YES,	tcu::MAYBE,	tcu::MAYBE),
+			tcu::FloatFormat(-13,		13,			9,			false,		tcu::MAYBE,	tcu::MAYBE,	tcu::MAYBE),
+			tcu::FloatFormat(-126,		127,		23,			true,		tcu::MAYBE, tcu::YES,	tcu::MAYBE)
+		};
+		const tcu::FloatFormat&	format	= de::getSizedArrayElement<glu::PRECISION_LAST>(formats, precision);
+		const tcu::Interval		ia		= format.convert(a);
+		const tcu::Interval		ib		= format.convert(b);
+		const tcu::Interval		ic		= format.convert(c);
+		tcu::Interval			prod0;
+		tcu::Interval			prod1;
+		tcu::Interval			prod2;
+		tcu::Interval			prod3;
+		tcu::Interval			prod;
+		tcu::Interval			res;
+
+		TCU_SET_INTERVAL(prod0, tmp, tmp = ia.lo() * ib.lo());
+		TCU_SET_INTERVAL(prod1, tmp, tmp = ia.lo() * ib.hi());
+		TCU_SET_INTERVAL(prod2, tmp, tmp = ia.hi() * ib.lo());
+		TCU_SET_INTERVAL(prod3, tmp, tmp = ia.hi() * ib.hi());
+
+		prod = format.convert(format.roundOut(prod0 | prod1 | prod2 | prod3, ia.isFinite() && ib.isFinite()));
+
+		TCU_SET_INTERVAL_BOUNDS(res, tmp,
+								tmp = prod.lo() + ic.lo(),
+								tmp = prod.hi() + ic.hi());
+
+		return format.convert(format.roundOut(res, prod.isFinite() && ic.isFinite()));
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const float			a			= ((const float*)inputs[0])[compNdx];
+			const float			b			= ((const float*)inputs[1])[compNdx];
+			const float			c			= ((const float*)inputs[2])[compNdx];
+			const float			res			= ((const float*)outputs[0])[compNdx];
+			const tcu::Interval	ref			= fma(precision, a, b, c);
+
+			if (!ref.contains(res))
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << ref;
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class FmaCase : public CommonFunctionCase
+{
+public:
+	void init (void)
+	{
+		CommonFunctionCase::init();
+	}
+
+	FmaCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: CommonFunctionCase	(testCtx, getCommonFuncCaseName(baseType, precision, shaderType).c_str(), "fma", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("a", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("b", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("c", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("res", glu::VarType(baseType, precision)));
+		m_spec.source = "res = fma(a, b, c);";
+		m_spec.globalDeclarations = "#extension GL_EXT_gpu_shader5 : require\n";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FmaCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+} // anonymous
+
+ShaderCommonFunctionTests::ShaderCommonFunctionTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup	(testCtx, "common", "Common function tests")
+{
+}
+
+ShaderCommonFunctionTests::~ShaderCommonFunctionTests (void)
+{
+}
+
+void ShaderCommonFunctionTests::init (void)
+{
+	enum
+	{
+		VS = (1<<glu::SHADERTYPE_VERTEX),
+		TC = (1<<glu::SHADERTYPE_TESSELLATION_CONTROL),
+		TE = (1<<glu::SHADERTYPE_TESSELLATION_EVALUATION),
+		GS = (1<<glu::SHADERTYPE_GEOMETRY),
+		FS = (1<<glu::SHADERTYPE_FRAGMENT),
+		CS = (1<<glu::SHADERTYPE_COMPUTE),
+
+		ALL_SHADERS = VS|TC|TE|GS|FS|CS,
+		NEW_SHADERS = TC|TE|GS|CS,
+	};
+
+	//																	Float?	Int?	Uint?	Shaders
+	addFunctionCases<AbsCase>				(this,	"abs",				true,	true,	false,	ALL_SHADERS);
+	addFunctionCases<SignCase>				(this,	"sign",				true,	true,	false,	ALL_SHADERS);
+	addFunctionCases<FloorCase>				(this,	"floor",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<TruncCase>				(this,	"trunc",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<RoundCase>				(this,	"round",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<RoundEvenCase>			(this,	"roundeven",		true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<CeilCase>				(this,	"ceil",				true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<FractCase>				(this,	"fract",			true,	false,	false,	ALL_SHADERS);
+	// mod
+	addFunctionCases<ModfCase>				(this,	"modf",				true,	false,	false,	ALL_SHADERS);
+	// min
+	// max
+	// clamp
+	// mix
+	// step
+	// smoothstep
+	addFunctionCases<IsnanCase>				(this,	"isnan",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<IsinfCase>				(this,	"isinf",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<FloatBitsToIntCase>	(this,	"floatbitstoint",	true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<FloatBitsToUintCase>	(this,	"floatbitstouint",	true,	false,	false,	ALL_SHADERS);
+
+	addFunctionCases<FrexpCase>				(this,	"frexp",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<LdexpCase>				(this,	"ldexp",			true,	false,	false,	ALL_SHADERS);
+	addFunctionCases<FmaCase>				(this,	"fma",				true,	false,	false,	ALL_SHADERS);
+
+	// (u)intBitsToFloat()
+	{
+		const deUint32		shaderBits	= NEW_SHADERS;
+		tcu::TestCaseGroup* intGroup	= new tcu::TestCaseGroup(m_testCtx, "intbitstofloat",	"intBitsToFloat() Tests");
+		tcu::TestCaseGroup* uintGroup	= new tcu::TestCaseGroup(m_testCtx, "uintbitstofloat",	"uintBitsToFloat() Tests");
+
+		addChild(intGroup);
+		addChild(uintGroup);
+
+		for (int vecSize = 1; vecSize < 4; vecSize++)
+		{
+			const glu::DataType		intType		= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+			const glu::DataType		uintType	= vecSize > 1 ? glu::getDataTypeUintVec(vecSize) : glu::TYPE_UINT;
+
+			for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+			{
+				if (shaderBits & (1<<shaderType))
+				{
+					intGroup->addChild(new BitsToFloatCase(getTestContext(), intType, glu::ShaderType(shaderType)));
+					uintGroup->addChild(new BitsToFloatCase(getTestContext(), uintType, glu::ShaderType(shaderType)));
+				}
+			}
+		}
+	}
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.hpp
new file mode 100644
index 0000000..b63cd3e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderCommonFunctionTests.hpp
@@ -0,0 +1,63 @@
+#ifndef _VKTSHADERCOMMONFUNCTIONTESTS_HPP
+#define _VKTSHADERCOMMONFUNCTIONTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Common built-in function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+// ShaderCommonFunctionTests
+
+class ShaderCommonFunctionTests : public tcu::TestCaseGroup
+{
+public:
+										ShaderCommonFunctionTests	(tcu::TestContext& testCtx);
+	virtual								~ShaderCommonFunctionTests	(void);
+
+	virtual void						init						(void);
+
+private:
+										ShaderCommonFunctionTests	(const ShaderCommonFunctionTests&);		// not allowed!
+	ShaderCommonFunctionTests&			operator=					(const ShaderCommonFunctionTests&);		// not allowed!
+};
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADERCOMMONFUNCTIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.cpp
new file mode 100644
index 0000000..ea952d0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.cpp
@@ -0,0 +1,3450 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan ShaderExecutor
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderExecutor.hpp"
+#include <map>
+#include <sstream>
+#include <iostream>
+
+#include "tcuVector.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuTextureUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+#include "deSharedPtr.hpp"
+
+#include "vkMemUtil.hpp"
+#include "vkRef.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkStrUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkImageUtil.hpp"
+
+#include "gluShaderUtil.hpp"
+
+using std::vector;
+using namespace vk;
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+namespace
+{
+
+enum
+{
+	DEFAULT_RENDER_WIDTH	= 100,
+	DEFAULT_RENDER_HEIGHT	= 100,
+};
+
+// Shader utilities
+
+static VkClearValue	getDefaultClearColor (void)
+{
+	return makeClearValueColorF32(0.125f, 0.25f, 0.5f, 1.0f);
+}
+
+static void checkSupported (const Context& ctx, glu::ShaderType shaderType)
+{
+	const VkPhysicalDeviceFeatures& features = ctx.getDeviceFeatures();
+
+	if (shaderType == glu::SHADERTYPE_GEOMETRY && !features.geometryShader)
+		TCU_THROW(NotSupportedError, "Geometry shader type not supported by device");
+	else if (shaderType == glu::SHADERTYPE_TESSELLATION_CONTROL && !features.tessellationShader)
+		TCU_THROW(NotSupportedError, "Tessellation shader type not supported by device");
+	else if (shaderType == glu::SHADERTYPE_TESSELLATION_EVALUATION && !features.tessellationShader)
+		TCU_THROW(NotSupportedError, "Tessellation shader type not supported by device");
+}
+
+static std::string generateEmptyFragmentSource ()
+{
+	std::ostringstream src;
+
+	src <<	"#version 310 es\n"
+			"layout(location=0) out highp vec4 o_color;\n";
+
+	src << "void main (void)\n{\n";
+	src << "	o_color = vec4(0.0);\n";
+	src << "}\n";
+
+	return src.str();
+}
+
+static std::string generatePassthroughVertexShader (const std::vector<Symbol>& inputs, const char* inputPrefix, const char* outputPrefix)
+{
+
+	std::ostringstream	src;
+	int					location	= 0;
+
+	src <<	"#version 310 es\n"
+			"layout(location = " << location << ") in highp vec4 a_position;\n";
+
+	for (vector<Symbol>::const_iterator input = inputs.begin(); input != inputs.end(); ++input)
+	{
+		location++;
+		src << "layout(location = "<< location << ") in " << glu::declare(input->varType, inputPrefix + input->name) << ";\n"
+			<< "layout(location = " << location - 1 << ") flat out " << glu::declare(input->varType, outputPrefix + input->name) << ";\n";
+	}
+
+	src << "\nvoid main (void)\n{\n"
+		<< "	gl_Position = a_position;\n"
+		<< "	gl_PointSize = 1.0;\n";
+
+	for (vector<Symbol>::const_iterator input = inputs.begin(); input != inputs.end(); ++input)
+		src << "\t" << outputPrefix << input->name << " = " << inputPrefix << input->name << ";\n";
+
+	src << "}\n";
+
+	return src.str();
+}
+
+static std::string generateVertexShader (const ShaderSpec& shaderSpec, const std::string& inputPrefix, const std::string& outputPrefix)
+{
+	DE_ASSERT(!inputPrefix.empty() && !outputPrefix.empty());
+
+	std::ostringstream	src;
+
+	src <<	"#version 310 es\n";
+
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	src << "layout(location = 0) in highp vec4 a_position;\n";
+
+	int locationNumber = 1;
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input, ++locationNumber)
+		src <<  "layout(location = " << locationNumber << ") in " << glu::declare(input->varType, inputPrefix + input->name) << ";\n";
+
+	locationNumber = 0;
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output, ++locationNumber)
+	{
+		DE_ASSERT(output->varType.isBasicType());
+
+		if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+			const glu::VarType		intType		(intBaseType, glu::PRECISION_HIGHP);
+
+			src << "layout(location = " << locationNumber << ") flat out " << glu::declare(intType, outputPrefix + output->name) << ";\n";
+		}
+		else
+			src << "layout(location = " << locationNumber << ") flat out " << glu::declare(output->varType, outputPrefix + output->name) << ";\n";
+	}
+
+	src << "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	gl_Position = a_position;\n"
+		<< "	gl_PointSize = 1.0;\n";
+
+	// Declare & fetch local input variables
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input)
+		src << "\t" << glu::declare(input->varType, input->name) << " = " << inputPrefix << input->name << ";\n";
+
+	// Declare local output variables
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+		src << "\t" << glu::declare(output->varType, output->name) << ";\n";
+
+	// Operation - indented to correct level.
+	{
+		std::istringstream	opSrc	(shaderSpec.source);
+		std::string			line;
+
+		while (std::getline(opSrc, line))
+			src << "\t" << line << "\n";
+	}
+
+	// Assignments to outputs.
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+	{
+		if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+
+			src << "\t" << outputPrefix << output->name << " = " << glu::getDataTypeName(intBaseType) << "(" << output->name << ");\n";
+		}
+		else
+			src << "\t" << outputPrefix << output->name << " = " << output->name << ";\n";
+	}
+
+	src << "}\n";
+
+	return src.str();
+}
+
+struct FragmentOutputLayout
+{
+	std::vector<const Symbol*>		locationSymbols;		//! Symbols by location
+	std::map<std::string, int>		locationMap;			//! Map from symbol name to start location
+};
+
+static void generateFragShaderOutputDecl (std::ostream& src, const ShaderSpec& shaderSpec, bool useIntOutputs, const std::map<std::string, int>& outLocationMap, const std::string& outputPrefix)
+{
+	for (int outNdx = 0; outNdx < (int)shaderSpec.outputs.size(); ++outNdx)
+	{
+		const Symbol&				output		= shaderSpec.outputs[outNdx];
+		const int					location	= de::lookup(outLocationMap, output.name);
+		const std::string			outVarName	= outputPrefix + output.name;
+		glu::VariableDeclaration	decl		(output.varType, outVarName, glu::STORAGE_OUT, glu::INTERPOLATION_LAST, glu::Layout(location));
+
+		TCU_CHECK_INTERNAL(output.varType.isBasicType());
+
+		if (useIntOutputs && glu::isDataTypeFloatOrVec(output.varType.getBasicType()))
+		{
+			const int			vecSize			= glu::getDataTypeScalarSize(output.varType.getBasicType());
+			const glu::DataType	uintBasicType	= vecSize > 1 ? glu::getDataTypeUintVec(vecSize) : glu::TYPE_UINT;
+			const glu::VarType	uintType		(uintBasicType, glu::PRECISION_HIGHP);
+
+			decl.varType = uintType;
+			src << decl << ";\n";
+		}
+		else if (glu::isDataTypeBoolOrBVec(output.varType.getBasicType()))
+		{
+			const int			vecSize			= glu::getDataTypeScalarSize(output.varType.getBasicType());
+			const glu::DataType	intBasicType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+			const glu::VarType	intType			(intBasicType, glu::PRECISION_HIGHP);
+
+			decl.varType = intType;
+			src << decl << ";\n";
+		}
+		else if (glu::isDataTypeMatrix(output.varType.getBasicType()))
+		{
+			const int			vecSize			= glu::getDataTypeMatrixNumRows(output.varType.getBasicType());
+			const int			numVecs			= glu::getDataTypeMatrixNumColumns(output.varType.getBasicType());
+			const glu::DataType	uintBasicType	= glu::getDataTypeUintVec(vecSize);
+			const glu::VarType	uintType		(uintBasicType, glu::PRECISION_HIGHP);
+
+			decl.varType = uintType;
+			for (int vecNdx = 0; vecNdx < numVecs; ++vecNdx)
+			{
+				decl.name				= outVarName + "_" + de::toString(vecNdx);
+				decl.layout.location	= location + vecNdx;
+				src << decl << ";\n";
+			}
+		}
+		else
+			src << decl << ";\n";
+	}
+}
+
+static void generateFragShaderOutAssign (std::ostream& src, const ShaderSpec& shaderSpec, bool useIntOutputs, const std::string& valuePrefix, const std::string& outputPrefix)
+{
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+	{
+		if (useIntOutputs && glu::isDataTypeFloatOrVec(output->varType.getBasicType()))
+			src << "	o_" << output->name << " = floatBitsToUint(" << valuePrefix << output->name << ");\n";
+		else if (glu::isDataTypeMatrix(output->varType.getBasicType()))
+		{
+			const int	numVecs		= glu::getDataTypeMatrixNumColumns(output->varType.getBasicType());
+
+			for (int vecNdx = 0; vecNdx < numVecs; ++vecNdx)
+				if (useIntOutputs)
+					src << "\t" << outputPrefix << output->name << "_" << vecNdx << " = floatBitsToUint(" << valuePrefix << output->name << "[" << vecNdx << "]);\n";
+				else
+					src << "\t" << outputPrefix << output->name << "_" << vecNdx << " = " << valuePrefix << output->name << "[" << vecNdx << "];\n";
+		}
+		else if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+
+			src << "\t" << outputPrefix << output->name << " = " << glu::getDataTypeName(intBaseType) << "(" << valuePrefix << output->name << ");\n";
+		}
+		else
+			src << "\t" << outputPrefix << output->name << " = " << valuePrefix << output->name << ";\n";
+	}
+}
+
+static std::string generatePassthroughFragmentShader (const ShaderSpec& shaderSpec, bool useIntOutputs, const std::map<std::string, int>& outLocationMap, const std::string& inputPrefix, const std::string& outputPrefix)
+{
+	std::ostringstream	src;
+
+	src <<	"#version 310 es\n";
+
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	int locationNumber = 0;
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output, ++locationNumber)
+	{
+		if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+			const glu::VarType		intType		(intBaseType, glu::PRECISION_HIGHP);
+
+			src << "layout(location = " << locationNumber << ") flat in " << glu::declare(intType, inputPrefix + output->name) << ";\n";
+		}
+		else
+			src << "layout(location = " << locationNumber << ") flat in " << glu::declare(output->varType, inputPrefix + output->name) << ";\n";
+	}
+
+	generateFragShaderOutputDecl(src, shaderSpec, useIntOutputs, outLocationMap, outputPrefix);
+
+	src << "\nvoid main (void)\n{\n";
+
+	generateFragShaderOutAssign(src, shaderSpec, useIntOutputs, inputPrefix, outputPrefix);
+
+	src << "}\n";
+
+	return src.str();
+}
+
+static std::string generateGeometryShader (const ShaderSpec& shaderSpec, const std::string& inputPrefix, const std::string& outputPrefix)
+{
+	DE_ASSERT(!inputPrefix.empty() && !outputPrefix.empty());
+
+	std::ostringstream	src;
+
+	src <<	"#version 310 es\n"
+		"#extension GL_EXT_geometry_shader : require\n";
+
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	src << "layout(points) in;\n"
+		<< "layout(points, max_vertices = 1) out;\n";
+
+	int locationNumber = 0;
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input, ++locationNumber)
+		src << "layout(location = " << locationNumber << ") flat in " << glu::declare(input->varType, inputPrefix + input->name) << "[];\n";
+
+	locationNumber = 0;
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output, ++locationNumber)
+	{
+		DE_ASSERT(output->varType.isBasicType());
+
+		if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+			const glu::VarType		intType		(intBaseType, glu::PRECISION_HIGHP);
+
+			src << "layout(location = " << locationNumber << ") flat out " << glu::declare(intType, outputPrefix + output->name) << ";\n";
+		}
+		else
+			src << "layout(location = " << locationNumber << ") flat out " << glu::declare(output->varType, outputPrefix + output->name) << ";\n";
+	}
+
+	src << "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	gl_Position = gl_in[0].gl_Position;\n\n";
+
+	// Fetch input variables
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input)
+		src << "\t" << glu::declare(input->varType, input->name) << " = " << inputPrefix << input->name << "[0];\n";
+
+	// Declare local output variables.
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+		src << "\t" << glu::declare(output->varType, output->name) << ";\n";
+
+	src << "\n";
+
+	// Operation - indented to correct level.
+	{
+		std::istringstream	opSrc	(shaderSpec.source);
+		std::string			line;
+
+		while (std::getline(opSrc, line))
+			src << "\t" << line << "\n";
+	}
+
+	// Assignments to outputs.
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+	{
+		if (glu::isDataTypeBoolOrBVec(output->varType.getBasicType()))
+		{
+			const int				vecSize		= glu::getDataTypeScalarSize(output->varType.getBasicType());
+			const glu::DataType		intBaseType	= vecSize > 1 ? glu::getDataTypeIntVec(vecSize) : glu::TYPE_INT;
+
+			src << "\t" << outputPrefix << output->name << " = " << glu::getDataTypeName(intBaseType) << "(" << output->name << ");\n";
+		}
+		else
+			src << "\t" << outputPrefix << output->name << " = " << output->name << ";\n";
+	}
+
+	src << "	EmitVertex();\n"
+		<< "	EndPrimitive();\n"
+		<< "}\n";
+
+	return src.str();
+}
+
+static std::string generateFragmentShader (const ShaderSpec& shaderSpec, bool useIntOutputs, const std::map<std::string, int>& outLocationMap, const std::string& inputPrefix, const std::string& outputPrefix)
+{
+	std::ostringstream src;
+	src <<  "#version 310 es\n";
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	int locationNumber = 0;
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input, ++locationNumber)
+		src << "layout(location = " << locationNumber << ") flat in " << glu::declare(input->varType, inputPrefix + input->name) << ";\n";
+
+	generateFragShaderOutputDecl(src, shaderSpec, useIntOutputs, outLocationMap, outputPrefix);
+
+	src << "\nvoid main (void)\n{\n";
+
+	// Declare & fetch local input variables
+	for (vector<Symbol>::const_iterator input = shaderSpec.inputs.begin(); input != shaderSpec.inputs.end(); ++input)
+		src << "\t" << glu::declare(input->varType, input->name) << " = " << inputPrefix << input->name << ";\n";
+
+	// Declare output variables
+	for (vector<Symbol>::const_iterator output = shaderSpec.outputs.begin(); output != shaderSpec.outputs.end(); ++output)
+		src << "\t" << glu::declare(output->varType, output->name) << ";\n";
+
+	// Operation - indented to correct level.
+	{
+		std::istringstream	opSrc	(shaderSpec.source);
+		std::string			line;
+
+		while (std::getline(opSrc, line))
+			src << "\t" << line << "\n";
+	}
+
+	generateFragShaderOutAssign(src, shaderSpec, useIntOutputs, "", outputPrefix);
+
+	src << "}\n";
+
+	return src.str();
+}
+
+// FragmentOutExecutor
+
+class FragmentOutExecutor : public ShaderExecutor
+{
+public:
+														FragmentOutExecutor		(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual												~FragmentOutExecutor	(void);
+
+	virtual void										execute					(const Context&			ctx,
+																				 int					numValues,
+																				 const void* const*		inputs,
+																				 void* const*			outputs);
+
+protected:
+	const FragmentOutputLayout							m_outputLayout;
+private:
+	void												bindAttributes			(const Context&			ctx,
+																				 Allocator&				memAlloc,
+																				 int					numValues,
+																				 const void* const*		inputs);
+
+	void												addAttribute			(const Context&			ctx,
+																				 Allocator&				memAlloc,
+																				 deUint32				bindingLocation,
+																				 VkFormat				format,
+																				 deUint32				sizePerElement,
+																				 deUint32				count,
+																				 const void*			dataPtr);
+	// reinit render data members
+	virtual void										clearRenderData			(void);
+
+	typedef de::SharedPtr<Unique<VkImage> >				VkImageSp;
+	typedef de::SharedPtr<Unique<VkImageView> >			VkImageViewSp;
+	typedef de::SharedPtr<Unique<VkBuffer> >			VkBufferSp;
+	typedef de::SharedPtr<de::UniquePtr<Allocation> >	AllocationSp;
+
+	std::vector<VkVertexInputBindingDescription>		m_vertexBindingDescriptions;
+	std::vector<VkVertexInputAttributeDescription>		m_vertexAttributeDescriptions;
+	std::vector<VkBufferSp>								m_vertexBuffers;
+	std::vector<AllocationSp>							m_vertexBufferAllocs;
+};
+
+static FragmentOutputLayout computeFragmentOutputLayout (const std::vector<Symbol>& symbols)
+{
+	FragmentOutputLayout	ret;
+	int						location	= 0;
+
+	for (std::vector<Symbol>::const_iterator it = symbols.begin(); it != symbols.end(); ++it)
+	{
+		const int	numLocations	= glu::getDataTypeNumLocations(it->varType.getBasicType());
+
+		TCU_CHECK_INTERNAL(!de::contains(ret.locationMap, it->name));
+		de::insert(ret.locationMap, it->name, location);
+		location += numLocations;
+
+		for (int ndx = 0; ndx < numLocations; ++ndx)
+			ret.locationSymbols.push_back(&*it);
+	}
+
+	return ret;
+}
+
+FragmentOutExecutor::FragmentOutExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: ShaderExecutor	(shaderSpec, shaderType)
+	, m_outputLayout	(computeFragmentOutputLayout(m_shaderSpec.outputs))
+{
+}
+
+FragmentOutExecutor::~FragmentOutExecutor (void)
+{
+}
+
+static std::vector<tcu::Vec2> computeVertexPositions (int numValues, const tcu::IVec2& renderSize)
+{
+	std::vector<tcu::Vec2> positions(numValues);
+	for (int valNdx = 0; valNdx < numValues; valNdx++)
+	{
+		const int		ix		= valNdx % renderSize.x();
+		const int		iy		= valNdx / renderSize.x();
+		const float		fx		= -1.0f + 2.0f*((float(ix) + 0.5f) / float(renderSize.x()));
+		const float		fy		= -1.0f + 2.0f*((float(iy) + 0.5f) / float(renderSize.y()));
+
+		positions[valNdx] = tcu::Vec2(fx, fy);
+	}
+
+	return positions;
+}
+
+static tcu::TextureFormat getRenderbufferFormatForOutput (const glu::VarType& outputType, bool useIntOutputs)
+{
+	const tcu::TextureFormat::ChannelOrder channelOrderMap[] =
+	{
+		tcu::TextureFormat::R,
+		tcu::TextureFormat::RG,
+		tcu::TextureFormat::RGBA,	// No RGB variants available.
+		tcu::TextureFormat::RGBA
+	};
+
+	const glu::DataType					basicType		= outputType.getBasicType();
+	const int							numComps		= glu::getDataTypeNumComponents(basicType);
+	tcu::TextureFormat::ChannelType		channelType;
+
+	switch (glu::getDataTypeScalarType(basicType))
+	{
+		case glu::TYPE_UINT:	channelType = tcu::TextureFormat::UNSIGNED_INT32;												break;
+		case glu::TYPE_INT:		channelType = tcu::TextureFormat::SIGNED_INT32;													break;
+		case glu::TYPE_BOOL:	channelType = tcu::TextureFormat::SIGNED_INT32;													break;
+		case glu::TYPE_FLOAT:	channelType = useIntOutputs ? tcu::TextureFormat::UNSIGNED_INT32 : tcu::TextureFormat::FLOAT;	break;
+		default:
+			throw tcu::InternalError("Invalid output type");
+	}
+
+	DE_ASSERT(de::inRange<int>(numComps, 1, DE_LENGTH_OF_ARRAY(channelOrderMap)));
+
+	return tcu::TextureFormat(channelOrderMap[numComps-1], channelType);
+}
+
+static VkFormat getAttributeFormat (const glu::DataType dataType)
+{
+	switch (dataType)
+	{
+		case glu::TYPE_FLOAT:			return VK_FORMAT_R32_SFLOAT;
+		case glu::TYPE_FLOAT_VEC2:		return VK_FORMAT_R32G32_SFLOAT;
+		case glu::TYPE_FLOAT_VEC3:		return VK_FORMAT_R32G32B32_SFLOAT;
+		case glu::TYPE_FLOAT_VEC4:		return VK_FORMAT_R32G32B32A32_SFLOAT;
+
+		case glu::TYPE_INT:				return VK_FORMAT_R32_SINT;
+		case glu::TYPE_INT_VEC2:		return VK_FORMAT_R32G32_SINT;
+		case glu::TYPE_INT_VEC3:		return VK_FORMAT_R32G32B32_SINT;
+		case glu::TYPE_INT_VEC4:		return VK_FORMAT_R32G32B32A32_SINT;
+
+		case glu::TYPE_UINT:			return VK_FORMAT_R32_UINT;
+		case glu::TYPE_UINT_VEC2:		return VK_FORMAT_R32G32_UINT;
+		case glu::TYPE_UINT_VEC3:		return VK_FORMAT_R32G32B32_UINT;
+		case glu::TYPE_UINT_VEC4:		return VK_FORMAT_R32G32B32A32_UINT;
+
+		case glu::TYPE_FLOAT_MAT2:		return VK_FORMAT_R32G32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT2X3:	return VK_FORMAT_R32G32B32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT2X4:	return VK_FORMAT_R32G32B32A32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT3X2:	return VK_FORMAT_R32G32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT3:		return VK_FORMAT_R32G32B32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT3X4:	return VK_FORMAT_R32G32B32A32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT4X2:	return VK_FORMAT_R32G32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT4X3:	return VK_FORMAT_R32G32B32_SFLOAT;
+		case glu::TYPE_FLOAT_MAT4:		return VK_FORMAT_R32G32B32A32_SFLOAT;
+		default:
+			DE_ASSERT(false);
+			return VK_FORMAT_UNDEFINED;
+	}
+}
+
+void FragmentOutExecutor::addAttribute (const Context& ctx, Allocator& memAlloc, deUint32 bindingLocation, VkFormat format, deUint32 sizePerElement, deUint32 count, const void* dataPtr)
+{
+	// Add binding specification
+	const deUint32 binding = (deUint32)m_vertexBindingDescriptions.size();
+	const VkVertexInputBindingDescription bindingDescription =
+	{
+		binding,
+		sizePerElement,
+		VK_VERTEX_INPUT_RATE_VERTEX
+	};
+
+	m_vertexBindingDescriptions.push_back(bindingDescription);
+
+	// Add location and format specification
+	const VkVertexInputAttributeDescription attributeDescription =
+	{
+		bindingLocation,			// deUint32	location;
+		binding,					// deUint32	binding;
+		format,						// VkFormat	format;
+		0u,							// deUint32	offsetInBytes;
+	};
+
+	m_vertexAttributeDescriptions.push_back(attributeDescription);
+
+	// Upload data to buffer
+	const VkDevice				vkDevice			= ctx.getDevice();
+	const DeviceInterface&		vk					= ctx.getDeviceInterface();
+	const deUint32				queueFamilyIndex	= ctx.getUniversalQueueFamilyIndex();
+
+	const VkDeviceSize inputSize = sizePerElement * count;
+	const VkBufferCreateInfo vertexBufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		inputSize,									// VkDeviceSize			size;
+		VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	Move<VkBuffer> buffer = createBuffer(vk, vkDevice, &vertexBufferParams);
+	de::MovePtr<Allocation> alloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, alloc->getMemory(), alloc->getOffset()));
+
+	deMemcpy(alloc->getHostPtr(), dataPtr, (size_t)inputSize);
+	flushMappedMemoryRange(vk, vkDevice, alloc->getMemory(), alloc->getOffset(), inputSize);
+
+	m_vertexBuffers.push_back(de::SharedPtr<Unique<VkBuffer> >(new Unique<VkBuffer>(buffer)));
+	m_vertexBufferAllocs.push_back(de::SharedPtr<de::UniquePtr<Allocation> >(new de::UniquePtr<Allocation>(alloc)));
+}
+
+void FragmentOutExecutor::bindAttributes (const Context& ctx, Allocator& memAlloc, int numValues, const void* const* inputs)
+{
+	// Input attributes
+	for (int inputNdx = 0; inputNdx < (int)m_shaderSpec.inputs.size(); inputNdx++)
+	{
+		const Symbol&		symbol			= m_shaderSpec.inputs[inputNdx];
+		const void*			ptr				= inputs[inputNdx];
+		const glu::DataType	basicType		= symbol.varType.getBasicType();
+		const int			vecSize			= glu::getDataTypeScalarSize(basicType);
+		const VkFormat		format			= getAttributeFormat(basicType);
+		int					elementSize		= 0;
+		int					numAttrsToAdd	= 1;
+
+		if (glu::isDataTypeFloatOrVec(basicType))
+			elementSize = sizeof(float);
+		else if (glu::isDataTypeIntOrIVec(basicType))
+			elementSize = sizeof(int);
+		else if (glu::isDataTypeUintOrUVec(basicType))
+			elementSize = sizeof(deUint32);
+		else if (glu::isDataTypeMatrix(basicType))
+		{
+			int		numRows	= glu::getDataTypeMatrixNumRows(basicType);
+			int		numCols	= glu::getDataTypeMatrixNumColumns(basicType);
+
+			elementSize = numRows * numCols * (int)sizeof(float);
+			numAttrsToAdd = numCols;
+		}
+		else
+			DE_ASSERT(false);
+
+		// add attributes, in case of matrix every column is binded as an attribute
+		for (int attrNdx = 0; attrNdx < numAttrsToAdd; attrNdx++)
+		{
+			addAttribute(ctx, memAlloc, (deUint32)m_vertexBindingDescriptions.size(), format, elementSize * vecSize, numValues, ptr);
+		}
+	}
+}
+
+void FragmentOutExecutor::clearRenderData (void)
+{
+	m_vertexBindingDescriptions.clear();
+	m_vertexAttributeDescriptions.clear();
+	m_vertexBuffers.clear();
+	m_vertexBufferAllocs.clear();
+}
+
+void FragmentOutExecutor::execute (const Context& ctx, int numValues, const void* const* inputs, void* const* outputs)
+{
+	checkSupported(ctx, m_shaderType);
+
+	const VkDevice										vkDevice				= ctx.getDevice();
+	const DeviceInterface&								vk						= ctx.getDeviceInterface();
+	const VkQueue										queue					= ctx.getUniversalQueue();
+	const deUint32										queueFamilyIndex		= ctx.getUniversalQueueFamilyIndex();
+	Allocator&											memAlloc				= ctx.getDefaultAllocator();
+
+	const deUint32										renderSizeX				= de::min(static_cast<deUint32>(DEFAULT_RENDER_WIDTH), (deUint32)numValues);
+	const deUint32										renderSizeY				= ((deUint32)numValues / renderSizeX) + (((deUint32)numValues % renderSizeX != 0) ? 1u : 0u);
+	const tcu::UVec2									renderSize				(renderSizeX, renderSizeY);
+	std::vector<tcu::Vec2>								positions;
+
+	const bool											useGeometryShader		= m_shaderType == glu::SHADERTYPE_GEOMETRY;
+
+	std::vector<VkImageSp>								colorImages;
+	std::vector<VkImageMemoryBarrier>					colorImagePreRenderBarriers;
+	std::vector<VkImageMemoryBarrier>					colorImagePostRenderBarriers;
+	std::vector<AllocationSp>							colorImageAllocs;
+	std::vector<VkAttachmentDescription>				attachments;
+	std::vector<VkClearValue>							attachmentClearValues;
+	std::vector<VkImageViewSp>							colorImageViews;
+
+	std::vector<VkPipelineColorBlendAttachmentState>	colorBlendAttachmentStates;
+	std::vector<VkAttachmentReference>					colorAttachmentReferences;
+
+	Move<VkRenderPass>									renderPass;
+	Move<VkFramebuffer>									framebuffer;
+	Move<VkPipelineLayout>								pipelineLayout;
+	Move<VkPipeline>									graphicsPipeline;
+
+	Move<VkShaderModule>								vertexShaderModule;
+	Move<VkShaderModule>								geometryShaderModule;
+	Move<VkShaderModule>								fragmentShaderModule;
+
+	Move<VkCommandPool>									cmdPool;
+	Move<VkCommandBuffer>								cmdBuffer;
+
+	Move<VkFence>										fence;
+
+	Move<VkDescriptorPool>								descriptorPool;
+	Move<VkDescriptorSetLayout>							descriptorSetLayout;
+	Move<VkDescriptorSet>								descriptorSet;
+
+	clearRenderData();
+
+	// Compute positions - 1px points are used to drive fragment shading.
+	positions = computeVertexPositions(numValues, renderSize.cast<int>());
+
+	// Bind attributes
+	addAttribute(ctx, memAlloc, 0u, VK_FORMAT_R32G32_SFLOAT, sizeof(tcu::Vec2), (deUint32)positions.size(), &positions[0]);
+	bindAttributes(ctx, memAlloc, numValues, inputs);
+
+	// Create color images
+	{
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			VK_FALSE,																	// VkBool32						blendEnable;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor				srcColorBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor				dstColorBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp					blendOpColor;
+			VK_BLEND_FACTOR_ONE,														// VkBlendFactor				srcAlphaBlendFactor;
+			VK_BLEND_FACTOR_ZERO,														// VkBlendFactor				destAlphaBlendFactor;
+			VK_BLEND_OP_ADD,															// VkBlendOp					blendOpAlpha;
+			(VK_COLOR_COMPONENT_R_BIT |
+			 VK_COLOR_COMPONENT_G_BIT |
+			 VK_COLOR_COMPONENT_B_BIT |
+			 VK_COLOR_COMPONENT_A_BIT)													// VkColorComponentFlags		colorWriteMask;
+		};
+
+		for (int outNdx = 0; outNdx < (int)m_outputLayout.locationSymbols.size(); ++outNdx)
+		{
+			bool												isFloat = isDataTypeFloatOrVec(m_shaderSpec.outputs[outNdx].varType.getBasicType());
+			bool												isSigned = isDataTypeIntOrIVec(m_shaderSpec.outputs[outNdx].varType.getBasicType());
+			VkFormat											colorFormat = isFloat ? VK_FORMAT_R32G32B32A32_SFLOAT : (isSigned ? VK_FORMAT_R32G32B32A32_SINT : VK_FORMAT_R32G32B32A32_UINT);
+
+			const VkImageCreateInfo	 colorImageParams =
+			{
+				VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType				sType;
+				DE_NULL,																	// const void*					pNext;
+				0u,																			// VkImageCreateFlags			flags;
+				VK_IMAGE_TYPE_2D,															// VkImageType					imageType;
+				colorFormat,																// VkFormat						format;
+				{ renderSize.x(), renderSize.y(), 1u },										// VkExtent3D					extent;
+				1u,																			// deUint32						mipLevels;
+				1u,																			// deUint32						arraySize;
+				VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits		samples;
+				VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling				tiling;
+				VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags			usage;
+				VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode				sharingMode;
+				1u,																			// deUint32						queueFamilyCount;
+				&queueFamilyIndex,															// const deUint32*				pQueueFamilyIndices;
+				VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout				initialLayout;
+			};
+
+			const VkAttachmentDescription colorAttachmentDescription =
+			{
+				0u,																			// VkAttachmentDescriptorFlags	flags;
+				colorFormat,																// VkFormat						format;
+				VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits		samples;
+				VK_ATTACHMENT_LOAD_OP_CLEAR,												// VkAttachmentLoadOp			loadOp;
+				VK_ATTACHMENT_STORE_OP_STORE,												// VkAttachmentStoreOp			storeOp;
+				VK_ATTACHMENT_LOAD_OP_DONT_CARE,											// VkAttachmentLoadOp			stencilLoadOp;
+				VK_ATTACHMENT_STORE_OP_DONT_CARE,											// VkAttachmentStoreOp			stencilStoreOp;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,									// VkImageLayout				initialLayout;
+				VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,									// VkImageLayout				finalLayout;
+			};
+
+			Move<VkImage> colorImage = createImage(vk, vkDevice, &colorImageParams);
+			colorImages.push_back(de::SharedPtr<Unique<VkImage> >(new Unique<VkImage>(colorImage)));
+			attachmentClearValues.push_back(getDefaultClearColor());
+
+			// Allocate and bind color image memory
+			{
+				de::MovePtr<Allocation> colorImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *((const VkImage*) colorImages.back().get())), MemoryRequirement::Any);
+				VK_CHECK(vk.bindImageMemory(vkDevice, colorImages.back().get()->get(), colorImageAlloc->getMemory(), colorImageAlloc->getOffset()));
+				colorImageAllocs.push_back(de::SharedPtr<de::UniquePtr<Allocation> >(new de::UniquePtr<Allocation>(colorImageAlloc)));
+
+				attachments.push_back(colorAttachmentDescription);
+				colorBlendAttachmentStates.push_back(colorBlendAttachmentState);
+
+				const VkAttachmentReference colorAttachmentReference = {
+					(deUint32) (colorImages.size() - 1),			//	deUint32		attachment;
+					VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL		//	VkImageLayout	layout;
+				};
+
+				colorAttachmentReferences.push_back(colorAttachmentReference);
+			}
+
+			// Create color attachment view
+			{
+				const VkImageViewCreateInfo colorImageViewParams =
+				{
+					VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+					DE_NULL,											// const void*				pNext;
+					0u,													// VkImageViewCreateFlags	flags;
+					colorImages.back().get()->get(),					// VkImage					image;
+					VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+					colorFormat,										// VkFormat					format;
+					{
+						VK_COMPONENT_SWIZZLE_R,							// VkComponentSwizzle		r;
+						VK_COMPONENT_SWIZZLE_G,							// VkComponentSwizzle		g;
+						VK_COMPONENT_SWIZZLE_B,							// VkComponentSwizzle		b;
+						VK_COMPONENT_SWIZZLE_A							// VkComponentSwizzle		a;
+					},													// VkComponentMapping		components;
+					{
+						VK_IMAGE_ASPECT_COLOR_BIT,						// VkImageAspectFlags		aspectMask;
+						0u,												// deUint32					baseMipLevel;
+						1u,												// deUint32					mipLevels;
+						0u,												// deUint32					baseArraySlice;
+						1u												// deUint32					arraySize;
+					}													// VkImageSubresourceRange	subresourceRange;
+				};
+
+				Move<VkImageView> colorImageView = createImageView(vk, vkDevice, &colorImageViewParams);
+				colorImageViews.push_back(de::SharedPtr<Unique<VkImageView> >(new Unique<VkImageView>(colorImageView)));
+
+				const VkImageMemoryBarrier	colorImagePreRenderBarrier =
+				{
+					VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,					// sType
+					DE_NULL,												// pNext
+					0u,														// srcAccessMask
+					(VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+					VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT),					// dstAccessMask
+					VK_IMAGE_LAYOUT_UNDEFINED,								// oldLayout
+					VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,				// newLayout
+					VK_QUEUE_FAMILY_IGNORED,								// srcQueueFamilyIndex
+					VK_QUEUE_FAMILY_IGNORED,								// dstQueueFamilyIndex
+					colorImages.back().get()->get(),						// image
+					{
+						VK_IMAGE_ASPECT_COLOR_BIT,								// aspectMask
+						0u,														// baseMipLevel
+						1u,														// levelCount
+						0u,														// baseArrayLayer
+						1u,														// layerCount
+					}														// subresourceRange
+				};
+				colorImagePreRenderBarriers.push_back(colorImagePreRenderBarrier);
+
+				const VkImageMemoryBarrier	colorImagePostRenderBarrier =
+				{
+					VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,					// sType
+					DE_NULL,												// pNext
+					(VK_ACCESS_COLOR_ATTACHMENT_READ_BIT |
+					VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT),					// srcAccessMask
+					VK_ACCESS_TRANSFER_READ_BIT,							// dstAccessMask
+					VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,				// oldLayout
+					VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,					// newLayout
+					VK_QUEUE_FAMILY_IGNORED,								// srcQueueFamilyIndex
+					VK_QUEUE_FAMILY_IGNORED,								// dstQueueFamilyIndex
+					colorImages.back().get()->get(),						// image
+					{
+						VK_IMAGE_ASPECT_COLOR_BIT,								// aspectMask
+						0u,														// baseMipLevel
+						1u,														// levelCount
+						0u,														// baseArrayLayer
+						1u,														// layerCount
+					}														// subresourceRange
+				};
+				colorImagePostRenderBarriers.push_back(colorImagePostRenderBarrier);
+			}
+		}
+	}
+
+	// Create render pass
+	{
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			(deUint32)colorImages.size(),						// deUint32						colorCount;
+			&colorAttachmentReferences[0],						// const VkAttachmentReference*	colorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	resolveAttachments;
+			DE_NULL,											// VkAttachmentReference		depthStencilAttachment;
+			0u,													// deUint32						preserveCount;
+			DE_NULL												// const VkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			(VkRenderPassCreateFlags)0,							// VkRenderPassCreateFlags			flags;
+			(deUint32)attachments.size(),						// deUint32							attachmentCount;
+			&attachments[0],									// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		std::vector<VkImageView> views(colorImageViews.size());
+		for (size_t i = 0; i < colorImageViews.size(); i++)
+		{
+			views[i] = colorImageViews[i].get()->get();
+		}
+
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkFramebufferCreateFlags		flags;
+			*renderPass,										// VkRenderPass					renderPass;
+			(deUint32)views.size(),								// deUint32						attachmentCount;
+			&views[0],											// const VkImageView*			pAttachments;
+			(deUint32)renderSize.x(),							// deUint32						width;
+			(deUint32)renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create descriptors
+	{
+		addUniforms(vkDevice, vk, queue, queueFamilyIndex, memAlloc);
+
+		descriptorSetLayout = m_descriptorSetLayoutBuilder.build(vk, vkDevice);
+		if (!m_uniformInfos.empty())
+			descriptorPool = m_descriptorPoolBuilder.build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+		else 
+		{
+			const VkDescriptorPoolSize 			poolSizeCount 	= { vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1 };
+			const VkDescriptorPoolCreateInfo	createInfo 		=
+			{
+				VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+				DE_NULL,
+				VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+				1u,
+				1u,
+				&poolSizeCount,
+			};
+
+			descriptorPool = createDescriptorPool(vk, vkDevice, &createInfo);
+		}
+
+		const VkDescriptorSetAllocateInfo allocInfo =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+			DE_NULL,
+			*descriptorPool,
+			1u,
+			&*descriptorSetLayout
+		};
+
+		descriptorSet = allocateDescriptorSet(vk, vkDevice, &allocInfo);
+
+		// Update descriptors
+		{
+			vk::DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
+
+			uploadUniforms(descriptorSetUpdateBuilder, *descriptorSet);
+
+			descriptorSetUpdateBuilder.update(vk, vkDevice);
+		}
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkPipelineLayoutCreateFlags)0,						// VkPipelineLayoutCreateFlags	flags;
+			1,													// deUint32						descriptorSetCount;
+			&*descriptorSetLayout,								// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shaders
+	{
+		vertexShaderModule		= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("vert"), 0);
+		fragmentShaderModule	= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("frag"), 0);
+
+		if (useGeometryShader)
+		{
+			geometryShaderModule = createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("geom"), 0);
+		}
+	}
+
+	// Create pipeline
+	{
+		std::vector<VkPipelineShaderStageCreateInfo> shaderStageParams;
+
+		const VkPipelineShaderStageCreateInfo vertexShaderStageParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+			VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBits				stage;
+			*vertexShaderModule,										// VkShaderModule						module;
+			"main",														// const char*							pName;
+			DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+		};
+
+		const VkPipelineShaderStageCreateInfo fragmentShaderStageParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+			VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBits				stage;
+			*fragmentShaderModule,										// VkShaderModule						module;
+			"main",														// const char*							pName;
+			DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+		};
+
+		shaderStageParams.push_back(vertexShaderStageParams);
+		shaderStageParams.push_back(fragmentShaderStageParams);
+
+		if (useGeometryShader)
+		{
+			const VkPipelineShaderStageCreateInfo geometryShaderStageParams =
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_GEOMETRY_BIT,								// VkShaderStageFlagBits				stage;
+				*geometryShaderModule,										// VkShaderModule						module;
+				"main",														// VkShader								shader;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			};
+
+			shaderStageParams.push_back(geometryShaderStageParams);
+		}
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			(VkPipelineVertexInputStateCreateFlags)0,					// VkPipelineVertexInputStateCreateFlags		flags;
+			(deUint32)m_vertexBindingDescriptions.size(),				// deUint32										bindingCount;
+			&m_vertexBindingDescriptions[0],							// const VkVertexInputBindingDescription*		pVertexBindingDescriptions;
+			(deUint32)m_vertexAttributeDescriptions.size(),				// deUint32										attributeCount;
+			&m_vertexAttributeDescriptions[0],							// const VkVertexInputAttributeDescription*		pvertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineInputAssemblyStateCreateFlags)0,						// VkPipelineInputAssemblyStateCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_POINT_LIST,								// VkPrimitiveTopology						topology;
+			DE_FALSE														// VkBool32									primitiveRestartEnable;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	originX;
+			0.0f,						// float	originY;
+			(float)renderSize.x(),		// float	width;
+			(float)renderSize.y(),		// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor =
+		{
+			{
+				0u,						// deUint32	x;
+				0u,						// deUint32	y;
+			},							// VkOffset2D	offset;
+			{
+				renderSize.x(),			// deUint32	width;
+				renderSize.y(),			// deUint32	height;
+			},							// VkExtent2D	extent;
+		};
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,	// VkStructureType										sType;
+			DE_NULL,												// const void*											pNext;
+			0u,														// VkPipelineViewportStateCreateFlags					flags;
+			1u,														// deUint32												viewportCount;
+			&viewport,												// const VkViewport*									pViewports;
+			1u,														// deUint32												scissorsCount;
+			&scissor												// const VkRect2D*										pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType								sType;
+			DE_NULL,														// const void*									pNext;
+			(VkPipelineRasterizationStateCreateFlags)0u,					//VkPipelineRasterizationStateCreateFlags		flags;
+			VK_FALSE,														// VkBool32										depthClipEnable;
+			VK_FALSE,														// VkBool32										rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode								polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullModeFlags								cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace									frontFace;
+			VK_FALSE,														// VkBool32										depthBiasEnable;
+			0.0f,															// float										depthBias;
+			0.0f,															// float										depthBiasClamp;
+			0.0f,															// float										slopeScaledDepthBias;
+			1.0f															// float										lineWidth;
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits					rasterizationSamples;
+			VK_FALSE,														// VkBool32									sampleShadingEnable;
+			0.0f,															// float									minSampleShading;
+			DE_NULL,														// const VkSampleMask*						pSampleMask;
+			VK_FALSE,														// VkBool32									alphaToCoverageEnable;
+			VK_FALSE														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,		// VkStructureType								sType;
+			DE_NULL,														// const void*									pNext;
+			(VkPipelineColorBlendStateCreateFlags)0,						// VkPipelineColorBlendStateCreateFlags			flags;
+			VK_FALSE,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,												// VkLogicOp									logicOp;
+			(deUint32)colorBlendAttachmentStates.size(),					// deUint32										attachmentCount;
+			&colorBlendAttachmentStates[0],									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }										// float										blendConst[4];
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateInfo =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType									sType;
+			DE_NULL,													// const void*										pNext;
+			(VkPipelineDynamicStateCreateFlags)0,						// VkPipelineDynamicStateCreateFlags				flags;
+			0u,															// deUint32											dynamicStateCount;
+			DE_NULL														// const VkDynamicState*							pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			(VkPipelineCreateFlags)0,							// VkPipelineCreateFlags							flags;
+			(deUint32)shaderStageParams.size(),					// deUint32											stageCount;
+			&shaderStageParams[0],								// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterStateCreateInfo*			pRasterState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			DE_NULL,											// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateInfo,									// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*pipelineLayout,									// VkPipelineLayout									layout;
+			*renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		graphicsPipeline = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,			// VkStructureType		sType;
+			DE_NULL,											// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,				// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,									// deUint32				queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*cmdPool,										// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType				sType;
+			DE_NULL,										// const void*					pNext;
+			0u,												// VkCmdBufferOptimizeFlags		flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*renderPass,											// VkRenderPass			renderPass;
+			*framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { renderSize.x(), renderSize.y() } },		// VkRect2D				renderArea;
+			(deUint32)attachmentClearValues.size(),					// deUint32				attachmentCount;
+			&attachmentClearValues[0]								// const VkClearValue*	pAttachmentClearValues;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+		VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+
+		vk.cmdPipelineBarrier(*cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+							  0, (const VkMemoryBarrier*)DE_NULL,
+							  0, (const VkBufferMemoryBarrier*)DE_NULL,
+							  (deUint32)colorImagePreRenderBarriers.size(), colorImagePreRenderBarriers.empty() ? DE_NULL : &colorImagePreRenderBarriers[0]);
+		vk.cmdBeginRenderPass(*cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *graphicsPipeline);
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &*descriptorSet, 0u, DE_NULL);
+
+		const deUint32 numberOfVertexAttributes = (deUint32)m_vertexBuffers.size();
+
+		std::vector<VkDeviceSize> offsets(numberOfVertexAttributes, 0);
+
+		std::vector<VkBuffer> buffers(numberOfVertexAttributes);
+		for (size_t i = 0; i < numberOfVertexAttributes; i++)
+		{
+			buffers[i] = m_vertexBuffers[i].get()->get();
+		}
+
+		vk.cmdBindVertexBuffers(*cmdBuffer, 0, numberOfVertexAttributes, &buffers[0], &offsets[0]);
+		vk.cmdDraw(*cmdBuffer, (deUint32)positions.size(), 1u, 0u, 0u);
+
+		vk.cmdEndRenderPass(*cmdBuffer);
+		vk.cmdPipelineBarrier(*cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+							  0, (const VkMemoryBarrier*)DE_NULL,
+							  0, (const VkBufferMemoryBarrier*)DE_NULL,
+							  (deUint32)colorImagePostRenderBarriers.size(), colorImagePostRenderBarriers.empty() ? DE_NULL : &colorImagePostRenderBarriers[0]);
+
+		VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// Execute Draw
+	{
+
+		const VkSubmitInfo submitInfo =
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,			// sType
+			DE_NULL,								// pNext
+			0u,										// waitSemaphoreCount
+			DE_NULL,								// pWaitSemaphores
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,										// commandBufferCount
+			&cmdBuffer.get(),						// pCommandBuffers
+			0u,										// signalSemaphoreCount
+			DE_NULL									// pSignalSemaphores
+		};
+
+		VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+		VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), DE_TRUE, ~(0ull) /* infinity*/));
+	}
+
+	// Read back result and output
+	{
+		const VkDeviceSize imageSizeBytes = (VkDeviceSize)(4 * sizeof(deUint32) * renderSize.x() * renderSize.y());
+		const VkBufferCreateInfo readImageBufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			imageSizeBytes,								// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyCount;
+			&queueFamilyIndex,							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		// constants for image copy
+
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,		// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex							// deUint32				queueFamilyIndex;
+		};
+
+		Move<VkCommandPool>	copyCmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+
+		const VkCommandBufferAllocateInfo cmdBufferParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*copyCmdPool,									// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCmdBufferOptimizeFlags			flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkBufferImageCopy copyParams =
+		{
+			0u,											// VkDeviceSize			bufferOffset;
+			(deUint32)renderSize.x(),					// deUint32				bufferRowLength;
+			(deUint32)renderSize.y(),					// deUint32				bufferImageHeight;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,				// VkImageAspect		aspect;
+				0u,										// deUint32				mipLevel;
+				0u,										// deUint32				arraySlice;
+				1u,										// deUint32				arraySize;
+			},											// VkImageSubresource	imageSubresource;
+			{ 0u, 0u, 0u },								// VkOffset3D			imageOffset;
+			{ renderSize.x(), renderSize.y(), 1u }		// VkExtent3D			imageExtent;
+		};
+
+		// Read back pixels.
+		for (int outNdx = 0; outNdx < (int)m_shaderSpec.outputs.size(); ++outNdx)
+		{
+			const Symbol&				output			= m_shaderSpec.outputs[outNdx];
+			const int					outSize			= output.varType.getScalarSize();
+			const int					outVecSize		= glu::getDataTypeNumComponents(output.varType.getBasicType());
+			const int					outNumLocs		= glu::getDataTypeNumLocations(output.varType.getBasicType());
+			deUint32*					dstPtrBase		= static_cast<deUint32*>(outputs[outNdx]);
+			const int					outLocation		= de::lookup(m_outputLayout.locationMap, output.name);
+
+			for (int locNdx = 0; locNdx < outNumLocs; ++locNdx)
+			{
+				tcu::TextureLevel			tmpBuf;
+				const tcu::TextureFormat	format = getRenderbufferFormatForOutput(output.varType, false);
+				const tcu::TextureFormat	readFormat (tcu::TextureFormat::RGBA, format.type);
+				const Unique<VkBuffer>		readImageBuffer(createBuffer(vk, vkDevice, &readImageBufferParams));
+				const de::UniquePtr<Allocation> readImageBufferMemory(memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *readImageBuffer), MemoryRequirement::HostVisible));
+
+				VK_CHECK(vk.bindBufferMemory(vkDevice, *readImageBuffer, readImageBufferMemory->getMemory(), readImageBufferMemory->getOffset()));
+
+				// Copy image to buffer
+				{
+
+					Move<VkCommandBuffer> copyCmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+					const VkSubmitInfo submitInfo =
+					{
+						VK_STRUCTURE_TYPE_SUBMIT_INFO,
+						DE_NULL,
+						0u,
+						(const VkSemaphore*)DE_NULL,
+						(const VkPipelineStageFlags*)DE_NULL,
+						1u,
+						&copyCmdBuffer.get(),
+						0u,
+						(const VkSemaphore*)DE_NULL,
+					};
+
+					VK_CHECK(vk.beginCommandBuffer(*copyCmdBuffer, &cmdBufferBeginInfo));
+					vk.cmdCopyImageToBuffer(*copyCmdBuffer, colorImages[outLocation + locNdx].get()->get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *readImageBuffer, 1u, &copyParams);
+					VK_CHECK(vk.endCommandBuffer(*copyCmdBuffer));
+
+					VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+					VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+					VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity */));
+				}
+
+				const VkMappedMemoryRange range =
+				{
+					VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// VkStructureType	sType;
+					DE_NULL,								// const void*		pNext;
+					readImageBufferMemory->getMemory(),		// VkDeviceMemory	mem;
+					0,										// VkDeviceSize		offset;
+					imageSizeBytes,							// VkDeviceSize		size;
+				};
+
+				VK_CHECK(vk.invalidateMappedMemoryRanges(vkDevice, 1u, &range));
+
+				tmpBuf.setStorage(readFormat, renderSize.x(), renderSize.y());
+
+				const tcu::TextureFormat resultFormat(tcu::TextureFormat::RGBA, format.type);
+				const tcu::ConstPixelBufferAccess resultAccess(resultFormat, renderSize.x(), renderSize.y(), 1, readImageBufferMemory->getHostPtr());
+
+				tcu::copy(tmpBuf.getAccess(), resultAccess);
+
+				if (outSize == 4 && outNumLocs == 1)
+					deMemcpy(dstPtrBase, tmpBuf.getAccess().getDataPtr(), numValues * outVecSize * sizeof(deUint32));
+				else
+				{
+					for (int valNdx = 0; valNdx < numValues; valNdx++)
+					{
+						const deUint32* srcPtr = (const deUint32*)tmpBuf.getAccess().getDataPtr() + valNdx * 4;
+						deUint32*		dstPtr = &dstPtrBase[outSize * valNdx + outVecSize * locNdx];
+						deMemcpy(dstPtr, srcPtr, outVecSize * sizeof(deUint32));
+					}
+				}
+			}
+		}
+	}
+}
+
+// VertexShaderExecutor
+
+class VertexShaderExecutor : public FragmentOutExecutor
+{
+public:
+								VertexShaderExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual						~VertexShaderExecutor	(void);
+
+	virtual void				log						(tcu::TestLog& dst) const { /* TODO */ (void)dst;}
+
+	virtual void				setShaderSources		(SourceCollections& programCollection) const;
+
+};
+
+VertexShaderExecutor::VertexShaderExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: FragmentOutExecutor		(shaderSpec, shaderType)
+{
+}
+
+VertexShaderExecutor::~VertexShaderExecutor (void)
+{
+}
+
+void VertexShaderExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(generateVertexShader(m_shaderSpec, "a_", "vtx_out_"));
+	/* \todo [2015-09-11 hegedusd] set useIntOutputs parameter if needed. */
+	programCollection.glslSources.add("frag") << glu::FragmentSource(generatePassthroughFragmentShader(m_shaderSpec, false, m_outputLayout.locationMap, "vtx_out_", "o_"));
+}
+
+// GeometryShaderExecutor
+
+class GeometryShaderExecutor : public FragmentOutExecutor
+{
+public:
+								GeometryShaderExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual						~GeometryShaderExecutor	(void);
+
+	virtual void				log						(tcu::TestLog& dst) const	{ /* TODO */ (void)dst; }
+
+	virtual void				setShaderSources		(SourceCollections& programCollection) const;
+
+};
+
+GeometryShaderExecutor::GeometryShaderExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: FragmentOutExecutor		(shaderSpec, shaderType)
+{
+}
+
+GeometryShaderExecutor::~GeometryShaderExecutor (void)
+{
+}
+
+void GeometryShaderExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(generatePassthroughVertexShader(m_shaderSpec.inputs, "a_", "vtx_out_"));
+
+	programCollection.glslSources.add("geom") << glu::GeometrySource(generateGeometryShader(m_shaderSpec, "vtx_out_", "geom_out_"));
+
+	/* \todo [2015-09-18 rsipka] set useIntOutputs parameter if needed. */
+	programCollection.glslSources.add("frag") << glu::FragmentSource(generatePassthroughFragmentShader(m_shaderSpec, false, m_outputLayout.locationMap, "geom_out_", "o_"));
+
+}
+
+// FragmentShaderExecutor
+
+class FragmentShaderExecutor : public FragmentOutExecutor
+{
+public:
+								FragmentShaderExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual						~FragmentShaderExecutor (void);
+
+	virtual void				log						(tcu::TestLog& dst) const { /* TODO */ (void)dst; }
+
+	virtual void				setShaderSources		(SourceCollections& programCollection) const;
+
+};
+
+FragmentShaderExecutor::FragmentShaderExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: FragmentOutExecutor		(shaderSpec, shaderType)
+{
+}
+
+FragmentShaderExecutor::~FragmentShaderExecutor (void)
+{
+}
+
+void FragmentShaderExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(generatePassthroughVertexShader(m_shaderSpec.inputs, "a_", "vtx_out_"));
+	/* \todo [2015-09-11 hegedusd] set useIntOutputs parameter if needed. */
+	programCollection.glslSources.add("frag") << glu::FragmentSource(generateFragmentShader(m_shaderSpec, false, m_outputLayout.locationMap, "vtx_out_", "o_"));
+}
+
+// Shared utilities for compute and tess executors
+
+static deUint32 getVecStd430ByteAlignment (glu::DataType type)
+{
+	switch (glu::getDataTypeScalarSize(type))
+	{
+		case 1:		return 4u;
+		case 2:		return 8u;
+		case 3:		return 16u;
+		case 4:		return 16u;
+		default:
+			DE_ASSERT(false);
+			return 0u;
+	}
+}
+
+class BufferIoExecutor : public ShaderExecutor
+{
+public:
+							BufferIoExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual					~BufferIoExecutor	(void);
+
+	virtual void			log					(tcu::TestLog& dst) const	{ /* TODO */ (void)dst; }
+
+protected:
+	enum
+	{
+		INPUT_BUFFER_BINDING	= 0,
+		OUTPUT_BUFFER_BINDING	= 1,
+	};
+
+	void					initBuffers			(const Context& ctx, int numValues);
+	VkBuffer				getInputBuffer		(void) const		{ return *m_inputBuffer;					}
+	VkBuffer				getOutputBuffer		(void) const		{ return *m_outputBuffer;					}
+	deUint32				getInputStride		(void) const		{ return getLayoutStride(m_inputLayout);	}
+	deUint32				getOutputStride		(void) const		{ return getLayoutStride(m_outputLayout);	}
+
+	void					uploadInputBuffer	(const Context& ctx, const void* const* inputPtrs, int numValues);
+	void					readOutputBuffer	(const Context& ctx, void* const* outputPtrs, int numValues);
+
+	static void				declareBufferBlocks	(std::ostream& src, const ShaderSpec& spec);
+	static void				generateExecBufferIo(std::ostream& src, const ShaderSpec& spec, const char* invocationNdxName);
+
+protected:
+	Move<VkBuffer>			m_inputBuffer;
+	Move<VkBuffer>			m_outputBuffer;
+
+private:
+	struct VarLayout
+	{
+		deUint32		offset;
+		deUint32		stride;
+		deUint32		matrixStride;
+
+		VarLayout (void) : offset(0), stride(0), matrixStride(0) {}
+	};
+
+	static void				computeVarLayout	(const std::vector<Symbol>& symbols, std::vector<VarLayout>* layout);
+	static deUint32			getLayoutStride		(const vector<VarLayout>& layout);
+
+	static void				copyToBuffer		(const glu::VarType& varType, const VarLayout& layout, int numValues, const void* srcBasePtr, void* dstBasePtr);
+	static void				copyFromBuffer		(const glu::VarType& varType, const VarLayout& layout, int numValues, const void* srcBasePtr, void* dstBasePtr);
+
+	de::MovePtr<Allocation>	m_inputAlloc;
+	de::MovePtr<Allocation>	m_outputAlloc;
+
+	vector<VarLayout>		m_inputLayout;
+	vector<VarLayout>		m_outputLayout;
+};
+
+BufferIoExecutor::BufferIoExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: ShaderExecutor (shaderSpec, shaderType)
+{
+	computeVarLayout(m_shaderSpec.inputs, &m_inputLayout);
+	computeVarLayout(m_shaderSpec.outputs, &m_outputLayout);
+}
+
+BufferIoExecutor::~BufferIoExecutor (void)
+{
+}
+
+inline deUint32 BufferIoExecutor::getLayoutStride (const vector<VarLayout>& layout)
+{
+	return layout.empty() ? 0 : layout[0].stride;
+}
+
+void BufferIoExecutor::computeVarLayout (const std::vector<Symbol>& symbols, std::vector<VarLayout>* layout)
+{
+	deUint32	maxAlignment	= 0;
+	deUint32	curOffset		= 0;
+
+	DE_ASSERT(layout != DE_NULL);
+	DE_ASSERT(layout->empty());
+	layout->resize(symbols.size());
+
+	for (size_t varNdx = 0; varNdx < symbols.size(); varNdx++)
+	{
+		const Symbol&		symbol		= symbols[varNdx];
+		const glu::DataType	basicType	= symbol.varType.getBasicType();
+		VarLayout&			layoutEntry	= (*layout)[varNdx];
+
+		if (glu::isDataTypeScalarOrVector(basicType))
+		{
+			const deUint32	alignment	= getVecStd430ByteAlignment(basicType);
+			const deUint32	size		= (deUint32)glu::getDataTypeScalarSize(basicType) * (int)sizeof(deUint32);
+
+			curOffset		= (deUint32)deAlign32((int)curOffset, (int)alignment);
+			maxAlignment	= de::max(maxAlignment, alignment);
+
+			layoutEntry.offset			= curOffset;
+			layoutEntry.matrixStride	= 0;
+
+			curOffset += size;
+		}
+		else if (glu::isDataTypeMatrix(basicType))
+		{
+			const int				numVecs			= glu::getDataTypeMatrixNumColumns(basicType);
+			const glu::DataType		vecType			= glu::getDataTypeFloatVec(glu::getDataTypeMatrixNumRows(basicType));
+			const deUint32			vecAlignment	= getVecStd430ByteAlignment(vecType);
+
+			curOffset		= (deUint32)deAlign32((int)curOffset, (int)vecAlignment);
+			maxAlignment	= de::max(maxAlignment, vecAlignment);
+
+			layoutEntry.offset			= curOffset;
+			layoutEntry.matrixStride	= vecAlignment;
+
+			curOffset += vecAlignment*numVecs;
+		}
+		else
+			DE_ASSERT(false);
+	}
+
+	{
+		const deUint32	totalSize	= (deUint32)deAlign32(curOffset, maxAlignment);
+
+		for (vector<VarLayout>::iterator varIter = layout->begin(); varIter != layout->end(); ++varIter)
+			varIter->stride = totalSize;
+	}
+}
+
+void BufferIoExecutor::declareBufferBlocks (std::ostream& src, const ShaderSpec& spec)
+{
+	// Input struct
+	if (!spec.inputs.empty())
+	{
+		glu::StructType inputStruct("Inputs");
+		for (vector<Symbol>::const_iterator symIter = spec.inputs.begin(); symIter != spec.inputs.end(); ++symIter)
+			inputStruct.addMember(symIter->name.c_str(), symIter->varType);
+		src << glu::declare(&inputStruct) << ";\n";
+	}
+
+	// Output struct
+	{
+		glu::StructType outputStruct("Outputs");
+		for (vector<Symbol>::const_iterator symIter = spec.outputs.begin(); symIter != spec.outputs.end(); ++symIter)
+			outputStruct.addMember(symIter->name.c_str(), symIter->varType);
+		src << glu::declare(&outputStruct) << ";\n";
+	}
+
+	src << "\n";
+
+	if (!spec.inputs.empty())
+	{
+		src	<< "layout(set = 0, binding = " << int(INPUT_BUFFER_BINDING) << ", std430) buffer InBuffer\n"
+			<< "{\n"
+			<< "	Inputs inputs[];\n"
+			<< "};\n";
+	}
+
+	src	<< "layout(set = 0, binding = " << int(OUTPUT_BUFFER_BINDING) << ", std430) buffer OutBuffer\n"
+		<< "{\n"
+		<< "	Outputs outputs[];\n"
+		<< "};\n"
+		<< "\n";
+}
+
+void BufferIoExecutor::generateExecBufferIo (std::ostream& src, const ShaderSpec& spec, const char* invocationNdxName)
+{
+	for (vector<Symbol>::const_iterator symIter = spec.inputs.begin(); symIter != spec.inputs.end(); ++symIter)
+		src << "\t" << glu::declare(symIter->varType, symIter->name) << " = inputs[" << invocationNdxName << "]." << symIter->name << ";\n";
+
+	for (vector<Symbol>::const_iterator symIter = spec.outputs.begin(); symIter != spec.outputs.end(); ++symIter)
+		src << "\t" << glu::declare(symIter->varType, symIter->name) << ";\n";
+
+	src << "\n";
+
+	{
+		std::istringstream	opSrc	(spec.source);
+		std::string			line;
+
+		while (std::getline(opSrc, line))
+			src << "\t" << line << "\n";
+	}
+
+	src << "\n";
+	for (vector<Symbol>::const_iterator symIter = spec.outputs.begin(); symIter != spec.outputs.end(); ++symIter)
+		src << "\toutputs[" << invocationNdxName << "]." << symIter->name << " = " << symIter->name << ";\n";
+}
+
+void BufferIoExecutor::copyToBuffer (const glu::VarType& varType, const VarLayout& layout, int numValues, const void* srcBasePtr, void* dstBasePtr)
+{
+	if (varType.isBasicType())
+	{
+		const glu::DataType		basicType		= varType.getBasicType();
+		const bool				isMatrix		= glu::isDataTypeMatrix(basicType);
+		const int				scalarSize		= glu::getDataTypeScalarSize(basicType);
+		const int				numVecs			= isMatrix ? glu::getDataTypeMatrixNumColumns(basicType) : 1;
+		const int				numComps		= scalarSize / numVecs;
+
+		for (int elemNdx = 0; elemNdx < numValues; elemNdx++)
+		{
+			for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+			{
+				const int		srcOffset		= (int)sizeof(deUint32) * (elemNdx * scalarSize + vecNdx * numComps);
+				const int		dstOffset		= layout.offset + layout.stride * elemNdx + (isMatrix ? layout.matrixStride * vecNdx : 0);
+				const deUint8*	srcPtr			= (const deUint8*)srcBasePtr + srcOffset;
+				deUint8*		dstPtr			= (deUint8*)dstBasePtr + dstOffset;
+
+				deMemcpy(dstPtr, srcPtr, sizeof(deUint32) * numComps);
+			}
+		}
+	}
+	else
+		throw tcu::InternalError("Unsupported type");
+}
+
+void BufferIoExecutor::copyFromBuffer (const glu::VarType& varType, const VarLayout& layout, int numValues, const void* srcBasePtr, void* dstBasePtr)
+{
+	if (varType.isBasicType())
+	{
+		const glu::DataType		basicType		= varType.getBasicType();
+		const bool				isMatrix		= glu::isDataTypeMatrix(basicType);
+		const int				scalarSize		= glu::getDataTypeScalarSize(basicType);
+		const int				numVecs			= isMatrix ? glu::getDataTypeMatrixNumColumns(basicType) : 1;
+		const int				numComps		= scalarSize / numVecs;
+
+		for (int elemNdx = 0; elemNdx < numValues; elemNdx++)
+		{
+			for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+			{
+				const int		srcOffset		= layout.offset + layout.stride * elemNdx + (isMatrix ? layout.matrixStride * vecNdx : 0);
+				const int		dstOffset		= (int)sizeof(deUint32) * (elemNdx * scalarSize + vecNdx * numComps);
+				const deUint8*	srcPtr			= (const deUint8*)srcBasePtr + srcOffset;
+				deUint8*		dstPtr			= (deUint8*)dstBasePtr + dstOffset;
+
+				deMemcpy(dstPtr, srcPtr, sizeof(deUint32) * numComps);
+			}
+		}
+	}
+	else
+		throw tcu::InternalError("Unsupported type");
+}
+
+void BufferIoExecutor::uploadInputBuffer (const Context& ctx, const void* const* inputPtrs, int numValues)
+{
+	const VkDevice			vkDevice			= ctx.getDevice();
+	const DeviceInterface&	vk					= ctx.getDeviceInterface();
+
+	const deUint32			inputStride			= getLayoutStride(m_inputLayout);
+	const int				inputBufferSize		= inputStride * numValues;
+
+	if (inputBufferSize == 0)
+		return; // No inputs
+
+	DE_ASSERT(m_shaderSpec.inputs.size() == m_inputLayout.size());
+	for (size_t inputNdx = 0; inputNdx < m_shaderSpec.inputs.size(); ++inputNdx)
+	{
+		const glu::VarType&		varType		= m_shaderSpec.inputs[inputNdx].varType;
+		const VarLayout&		layout		= m_inputLayout[inputNdx];
+
+		copyToBuffer(varType, layout, numValues, inputPtrs[inputNdx], m_inputAlloc->getHostPtr());
+	}
+
+	flushMappedMemoryRange(vk, vkDevice, m_inputAlloc->getMemory(), m_inputAlloc->getOffset(), inputBufferSize);
+}
+
+void BufferIoExecutor::readOutputBuffer (const Context& ctx, void* const* outputPtrs, int numValues)
+{
+	const VkDevice			vkDevice			= ctx.getDevice();
+	const DeviceInterface&	vk					= ctx.getDeviceInterface();
+
+	const deUint32			outputStride		= getLayoutStride(m_outputLayout);
+	const int				outputBufferSize	= numValues * outputStride;
+
+	DE_ASSERT(outputBufferSize > 0); // At least some outputs are required.
+
+	invalidateMappedMemoryRange(vk, vkDevice, m_outputAlloc->getMemory(), m_outputAlloc->getOffset(), outputBufferSize);
+
+	DE_ASSERT(m_shaderSpec.outputs.size() == m_outputLayout.size());
+	for (size_t outputNdx = 0; outputNdx < m_shaderSpec.outputs.size(); ++outputNdx)
+	{
+		const glu::VarType&		varType		= m_shaderSpec.outputs[outputNdx].varType;
+		const VarLayout&		layout		= m_outputLayout[outputNdx];
+
+		copyFromBuffer(varType, layout, numValues, m_outputAlloc->getHostPtr(), outputPtrs[outputNdx]);
+	}
+}
+
+void BufferIoExecutor::initBuffers (const Context& ctx, int numValues)
+{
+	const deUint32				inputStride			= getLayoutStride(m_inputLayout);
+	const deUint32				outputStride		= getLayoutStride(m_outputLayout);
+	// Avoid creating zero-sized buffer/memory
+	const size_t				inputBufferSize		= numValues * inputStride ? (numValues * inputStride) : 1;
+	const size_t				outputBufferSize	= numValues * outputStride;
+
+	// Upload data to buffer
+	const VkDevice				vkDevice			= ctx.getDevice();
+	const DeviceInterface&		vk					= ctx.getDeviceInterface();
+	const deUint32				queueFamilyIndex	= ctx.getUniversalQueueFamilyIndex();
+	Allocator&					memAlloc			= ctx.getDefaultAllocator();
+
+	const VkBufferCreateInfo inputBufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		inputBufferSize,							// VkDeviceSize			size;
+		VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	m_inputBuffer = createBuffer(vk, vkDevice, &inputBufferParams);
+	m_inputAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_inputBuffer), MemoryRequirement::HostVisible);
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *m_inputBuffer, m_inputAlloc->getMemory(), m_inputAlloc->getOffset()));
+
+	const VkBufferCreateInfo outputBufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		outputBufferSize,							// VkDeviceSize			size;
+		VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	m_outputBuffer = createBuffer(vk, vkDevice, &outputBufferParams);
+	m_outputAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_outputBuffer), MemoryRequirement::HostVisible);
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *m_outputBuffer, m_outputAlloc->getMemory(), m_outputAlloc->getOffset()));
+}
+
+// ComputeShaderExecutor
+
+class ComputeShaderExecutor : public BufferIoExecutor
+{
+public:
+						ComputeShaderExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual				~ComputeShaderExecutor	(void);
+
+	virtual void		setShaderSources		(SourceCollections& programCollection) const;
+
+	virtual void		execute					(const Context& ctx, int numValues, const void* const* inputs, void* const* outputs);
+
+protected:
+	static std::string	generateComputeShader	(const ShaderSpec& spec);
+};
+
+ComputeShaderExecutor::ComputeShaderExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: BufferIoExecutor	(shaderSpec, shaderType)
+{
+}
+
+ComputeShaderExecutor::~ComputeShaderExecutor	(void)
+{
+}
+
+std::string ComputeShaderExecutor::generateComputeShader (const ShaderSpec& spec)
+{
+	std::ostringstream src;
+	src <<  "#version 310 es\n";
+
+	if (!spec.globalDeclarations.empty())
+		src << spec.globalDeclarations << "\n";
+
+	src << "layout(local_size_x = 1) in;\n"
+		<< "\n";
+
+	declareBufferBlocks(src, spec);
+
+	src << "void main (void)\n"
+		<< "{\n"
+		<< "	uint invocationNdx = gl_NumWorkGroups.x*gl_NumWorkGroups.y*gl_WorkGroupID.z\n"
+		<< "	                   + gl_NumWorkGroups.x*gl_WorkGroupID.y + gl_WorkGroupID.x;\n";
+
+	generateExecBufferIo(src, spec, "invocationNdx");
+
+	src << "}\n";
+
+	return src.str();
+}
+
+void ComputeShaderExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("compute") << glu::ComputeSource(generateComputeShader(m_shaderSpec));
+}
+
+void ComputeShaderExecutor::execute (const Context& ctx, int numValues, const void* const* inputs, void* const* outputs)
+{
+	checkSupported(ctx, m_shaderType);
+
+	const VkDevice					vkDevice				= ctx.getDevice();
+	const DeviceInterface&			vk						= ctx.getDeviceInterface();
+	const VkQueue					queue					= ctx.getUniversalQueue();
+	const deUint32					queueFamilyIndex		= ctx.getUniversalQueueFamilyIndex();
+	Allocator&						memAlloc				= ctx.getDefaultAllocator();
+
+	Move<VkShaderModule>			computeShaderModule;
+	Move<VkPipeline>				computePipeline;
+	Move<VkPipelineLayout>			pipelineLayout;
+	Move<VkCommandPool>				cmdPool;
+	Move<VkDescriptorPool>			descriptorPool;
+	Move<VkDescriptorSetLayout>		descriptorSetLayout;
+	Move<VkDescriptorSet>			descriptorSet;
+	Move<VkFence>					fence;
+
+	initBuffers(ctx, numValues);
+
+	// Setup input buffer & copy data
+	uploadInputBuffer(ctx, inputs, numValues);
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex								// deUint32				queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	const VkCommandBufferAllocateInfo cmdBufferParams =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		*cmdPool,										// VkCmdPool				cmdPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+		1u												// deUint32					bufferCount;
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		0u,												// VkCmdBufferOptimizeFlags			flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	m_descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+	m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+	m_descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+	m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+
+	addUniforms(vkDevice, vk, queue, queueFamilyIndex, memAlloc);
+
+	descriptorSetLayout = m_descriptorSetLayoutBuilder.build(vk, vkDevice);
+	descriptorPool = m_descriptorPoolBuilder.build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+	const VkDescriptorSetAllocateInfo allocInfo =
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		*descriptorPool,
+		1u,
+		&*descriptorSetLayout
+	};
+
+	descriptorSet = allocateDescriptorSet(vk, vkDevice, &allocInfo);
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkPipelineLayoutCreateFlags)0,						// VkPipelineLayoutCreateFlags	flags;
+			1u,													// deUint32						CdescriptorSetCount;
+			&*descriptorSetLayout,								// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shaders
+	{
+		computeShaderModule		= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("compute"), 0);
+	}
+
+	// create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStageParams[1] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0u,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_COMPUTE_BIT,								// VkShaderStageFlagsBit				stage;
+				*computeShaderModule,										// VkShaderModule						shader;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkComputePipelineCreateInfo computePipelineParams =
+		{
+			VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,		// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			(VkPipelineCreateFlags)0,							// VkPipelineCreateFlags							flags;
+			*shaderStageParams,									// VkPipelineShaderStageCreateInfo					cs;
+			*pipelineLayout,									// VkPipelineLayout									layout;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u,													// int32_t											basePipelineIndex;
+		};
+
+		computePipeline = createComputePipeline(vk, vkDevice, DE_NULL, &computePipelineParams);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+		fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	const int maxValuesPerInvocation	= ctx.getDeviceProperties().limits.maxComputeWorkGroupSize[0];
+	int					curOffset		= 0;
+	const deUint32		inputStride		= getInputStride();
+	const deUint32		outputStride	= getOutputStride();
+
+	while (curOffset < numValues)
+	{
+		Move<VkCommandBuffer>		cmdBuffer;
+		const int numToExec = de::min(maxValuesPerInvocation, numValues-curOffset);
+
+		// Update descriptors
+		{
+			DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
+
+			const VkDescriptorBufferInfo outputDescriptorBufferInfo =
+			{
+				*m_outputBuffer,				// VkBuffer			buffer;
+				curOffset * outputStride,		// VkDeviceSize		offset;
+				numToExec * outputStride		// VkDeviceSize		range;
+			};
+
+			descriptorSetUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding((deUint32)OUTPUT_BUFFER_BINDING), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputDescriptorBufferInfo);
+
+			if (inputStride)
+			{
+				const VkDescriptorBufferInfo inputDescriptorBufferInfo =
+				{
+					*m_inputBuffer,					// VkBuffer			buffer;
+					curOffset * inputStride,		// VkDeviceSize		offset;
+					numToExec * inputStride			// VkDeviceSize		range;
+				};
+
+				descriptorSetUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding((deUint32)INPUT_BUFFER_BINDING), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inputDescriptorBufferInfo);
+			}
+
+			uploadUniforms(descriptorSetUpdateBuilder, *descriptorSet);
+
+			descriptorSetUpdateBuilder.update(vk, vkDevice);
+		}
+
+		cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+		VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *computePipeline);
+
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &*descriptorSet, 0u, DE_NULL);
+
+		vk.cmdDispatch(*cmdBuffer, numToExec, 1, 1);
+
+		VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+		curOffset += numToExec;
+
+		// Execute
+		{
+			VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+
+			const VkSubmitInfo submitInfo =
+			{
+				VK_STRUCTURE_TYPE_SUBMIT_INFO,
+				DE_NULL,
+				0u,
+				(const VkSemaphore*)DE_NULL,
+				(const VkPipelineStageFlags*)DE_NULL,
+				1u,
+				&cmdBuffer.get(),
+				0u,
+				(const VkSemaphore*)DE_NULL,
+			};
+
+			VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+			VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity*/));
+		}
+	}
+
+	// Read back data
+	readOutputBuffer(ctx, outputs, numValues);
+}
+
+// Tessellation utils
+
+static std::string generateVertexShaderForTess (void)
+{
+	std::ostringstream	src;
+	src <<  "#version 310 es\n"
+		<< "void main (void)\n{\n"
+		<< "	gl_Position = vec4(gl_VertexIndex/2, gl_VertexIndex%2, 0.0, 1.0);\n"
+		<< "}\n";
+
+	return src.str();
+}
+
+class TessellationExecutor : public BufferIoExecutor
+{
+public:
+						TessellationExecutor		(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual				~TessellationExecutor		(void);
+
+	void				renderTess					(const Context& ctx, deUint32 vertexCount);
+};
+
+TessellationExecutor::TessellationExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: BufferIoExecutor	(shaderSpec, shaderType)
+{
+}
+
+TessellationExecutor::~TessellationExecutor (void)
+{
+}
+
+void TessellationExecutor::renderTess (const Context& ctx, deUint32 vertexCount)
+{
+	const size_t						inputBufferSize				= (vertexCount/2) * getInputStride();
+	const VkDevice						vkDevice					= ctx.getDevice();
+	const DeviceInterface&				vk							= ctx.getDeviceInterface();
+	const VkQueue						queue						= ctx.getUniversalQueue();
+	const deUint32						queueFamilyIndex			= ctx.getUniversalQueueFamilyIndex();
+	Allocator&							memAlloc					= ctx.getDefaultAllocator();
+
+	const tcu::UVec2					renderSize					(DEFAULT_RENDER_WIDTH, DEFAULT_RENDER_HEIGHT);
+
+	Move<VkImage>						colorImage;
+	de::MovePtr<Allocation>				colorImageAlloc;
+	VkFormat							colorFormat					= VK_FORMAT_R8G8B8A8_UNORM;
+	Move<VkImageView>					colorImageView;
+
+	Move<VkRenderPass>					renderPass;
+	Move<VkFramebuffer>					framebuffer;
+	Move<VkPipelineLayout>				pipelineLayout;
+	Move<VkPipeline>					graphicsPipeline;
+
+	Move<VkShaderModule>				vertexShaderModule;
+	Move<VkShaderModule>				tessControlShaderModule;
+	Move<VkShaderModule>				tessEvalShaderModule;
+	Move<VkShaderModule>				fragmentShaderModule;
+
+	Move<VkCommandPool>					cmdPool;
+	Move<VkCommandBuffer>				cmdBuffer;
+
+	Move<VkFence>						fence;
+
+	Move<VkDescriptorPool>				descriptorPool;
+	Move<VkDescriptorSetLayout>			descriptorSetLayout;
+	Move<VkDescriptorSet>				descriptorSet;
+
+	// Create color image
+	{
+		const VkImageCreateInfo colorImageParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType			sType;
+			DE_NULL,																	// const void*				pNext;
+			0u,																			// VkImageCreateFlags		flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType				imageType;
+			colorFormat,																// VkFormat					format;
+			{ renderSize.x(), renderSize.y(), 1u },										// VkExtent3D				extent;
+			1u,																			// deUint32					mipLevels;
+			1u,																			// deUint32					arraySize;
+			VK_SAMPLE_COUNT_1_BIT,														// VkSampleCountFlagBits	samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling			tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags		usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode			sharingMode;
+			1u,																			// deUint32					queueFamilyCount;
+			&queueFamilyIndex,															// const deUint32*			pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED													// VkImageLayout			initialLayout;
+		};
+
+		colorImage = createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		colorImageAlloc = memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *colorImage, colorImageAlloc->getMemory(), colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo colorImageViewParams =
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			colorFormat,										// VkFormat					format;
+			{
+				VK_COMPONENT_SWIZZLE_R,							// VkComponentSwizzle		r;
+				VK_COMPONENT_SWIZZLE_G,							// VkComponentSwizzle		g;
+				VK_COMPONENT_SWIZZLE_B,							// VkComponentSwizzle		b;
+				VK_COMPONENT_SWIZZLE_A							// VkComponentSwizzle		a;
+			},													// VkComponentsMapping		components;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,						// VkImageAspectFlags		aspectMask;
+				0u,												// deUint32					baseMipLevel;
+				1u,												// deUint32					mipLevels;
+				0u,												// deUint32					baseArraylayer;
+				1u												// deUint32					layerCount;
+			}													// VkImageSubresourceRange	subresourceRange;
+		};
+
+		colorImageView = createImageView(vk, vkDevice, &colorImageViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription colorAttachmentDescription =
+		{
+			0u,													// VkAttachmentDescriptorFlags	flags;
+			colorFormat,										// VkFormat						format;
+			VK_SAMPLE_COUNT_1_BIT,								// VkSampleCountFlagBits		samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp			storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp			stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout				finalLayout
+		};
+
+		const VkAttachmentDescription attachments[1] =
+		{
+			colorAttachmentDescription
+		};
+
+		const VkAttachmentReference colorAttachmentReference =
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription subpassDescription =
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputCount;
+			DE_NULL,											// const VkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorCount;
+			&colorAttachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// const VkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// VkAttachmentReference		depthStencilAttachment;
+			0u,													// deUint32						preserveCount;
+			DE_NULL												// const VkAttachmentReference* pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo renderPassParams =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			0u,													// VkRenderPassCreateFlags			flags;
+			1u,													// deUint32							attachmentCount;
+			attachments,										// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo framebufferParams =
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			0u,													// VkFramebufferCreateFlags		flags;
+			*renderPass,										// VkRenderPass					renderPass;
+			1u,													// deUint32						attachmentCount;
+			&*colorImageView,									// const VkAttachmentBindInfo*	pAttachments;
+			(deUint32)renderSize.x(),							// deUint32						width;
+			(deUint32)renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create descriptors
+	{
+		m_descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_ALL);
+		m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+		m_descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_ALL);
+		m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+
+		addUniforms(vkDevice, vk, queue, queueFamilyIndex, memAlloc);
+
+		descriptorSetLayout = m_descriptorSetLayoutBuilder.build(vk, vkDevice);
+		descriptorPool = m_descriptorPoolBuilder.build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+
+		const VkDescriptorSetAllocateInfo allocInfo =
+		{
+			VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+			DE_NULL,
+			*descriptorPool,
+			1u,
+			&*descriptorSetLayout
+		};
+
+		descriptorSet = allocateDescriptorSet(vk, vkDevice, &allocInfo);
+		// Update descriptors
+		{
+			DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
+			const VkDescriptorBufferInfo outputDescriptorBufferInfo =
+			{
+				*m_outputBuffer,				// VkBuffer			buffer;
+				0u,								// VkDeviceSize		offset;
+				VK_WHOLE_SIZE					// VkDeviceSize		range;
+			};
+
+			descriptorSetUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding((deUint32)OUTPUT_BUFFER_BINDING), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputDescriptorBufferInfo);
+
+			VkDescriptorBufferInfo inputDescriptorBufferInfo =
+			{
+				0,							// VkBuffer			buffer;
+				0u,							// VkDeviceSize		offset;
+				VK_WHOLE_SIZE				// VkDeviceSize		range;
+			};
+			if (inputBufferSize)
+			{
+				inputDescriptorBufferInfo.buffer = *m_inputBuffer;
+
+				descriptorSetUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding((deUint32)INPUT_BUFFER_BINDING), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inputDescriptorBufferInfo);
+			}
+
+			uploadUniforms(descriptorSetUpdateBuilder, *descriptorSet);
+
+			descriptorSetUpdateBuilder.update(vk, vkDevice);
+		}
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo pipelineLayoutParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkPipelineLayoutCreateFlags)0,						// VkPipelineLayoutCreateFlags	flags;
+			1u,													// deUint32						descriptorSetCount;
+			&*descriptorSetLayout,								// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shader modules
+	{
+		vertexShaderModule		= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("vert"), 0);
+		tessControlShaderModule	= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("tess_control"), 0);
+		tessEvalShaderModule	= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("tess_eval"), 0);
+		fragmentShaderModule	= createShaderModule(vk, vkDevice, ctx.getBinaryCollection().get("frag"), 0);
+	}
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo shaderStageParams[4] =
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStageFlagBit					stage;
+				*vertexShaderModule,										// VkShaderModule						shader;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,					// VkShaderStageFlagBit					stage;
+				*tessControlShaderModule,									// VkShaderModule						shader;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,				// VkShaderStageFlagBit					stage;
+				*tessEvalShaderModule,										// VkShaderModule						shader;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType						sType;
+				DE_NULL,													// const void*							pNext;
+				(VkPipelineShaderStageCreateFlags)0,						// VkPipelineShaderStageCreateFlags		flags;
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStageFlagBit					stage;
+				*fragmentShaderModule,										// VkShaderModule						shader;
+				"main",														// const char*							pName;
+				DE_NULL														// const VkSpecializationInfo*			pSpecializationInfo;
+			}
+		};
+
+		const VkPipelineVertexInputStateCreateInfo vertexInputStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineVertexInputStateCreateFlags)0,						// VkPipelineVertexInputStateCreateFlags	flags;
+			0u,																// deUint32									bindingCount;
+			DE_NULL,														// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			0u,																// deUint32									attributeCount;
+			DE_NULL,														// const VkVertexInputAttributeDescription*	pvertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType						sType;
+			DE_NULL,														// const void*							pNext;
+			(VkPipelineShaderStageCreateFlags)0,							// VkPipelineShaderStageCreateFlags	flags;
+			VK_PRIMITIVE_TOPOLOGY_PATCH_LIST,								// VkPrimitiveTopology					topology;
+			DE_FALSE														// VkBool32								primitiveRestartEnable;
+		};
+
+		struct VkPipelineTessellationStateCreateInfo tessellationStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineTessellationStateCreateFlags)0,						// VkPipelineTessellationStateCreateFlags	flags;
+			1																// uint32_t									patchControlPoints;
+		};
+
+		const VkViewport viewport =
+		{
+			0.0f,						// float	originX;
+			0.0f,						// float	originY;
+			(float)renderSize.x(),		// float	width;
+			(float)renderSize.y(),		// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D scissor =
+		{
+			{
+				0u,						// deUint32	x;
+				0u,						// deUint32	y;
+			},							// VkOffset2D	offset;
+			{
+				renderSize.x(),			// deUint32	width;
+				renderSize.y(),			// deUint32	height;
+			},							// VkExtent2D	extent;
+		};
+
+		const VkPipelineViewportStateCreateInfo viewportStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,	// VkStructureType						sType;
+			DE_NULL,												// const void*							pNext;
+			(VkPipelineViewportStateCreateFlags)0,					// VkPipelineViewPortStateCreateFlags	flags;
+			1u,														// deUint32								viewportCount;
+			&viewport,												// const VkViewport*					pViewports;
+			1u,														// deUint32								scissorsCount;
+			&scissor												// const VkRect2D*						pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo rasterStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineRasterizationStateCreateFlags)0,						// VkPipelineRasterizationStageCreateFlags	flags;
+			VK_FALSE,														// VkBool32									depthClipEnable;
+			VK_FALSE,														// VkBool32									rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkPolygonMode							polygonMode;
+			VK_CULL_MODE_NONE,												// VkCullMode								cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace								frontFace;
+			VK_FALSE,														// VkBool32									depthBiasEnable;
+			0.0f,															// float									depthBias;
+			0.0f,															// float									depthBiasClamp;
+			0.0f,															// float									slopeScaledDepthBias;
+			1.0f															// float									lineWidth;
+		};
+
+		const VkPipelineMultisampleStateCreateInfo multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits					rasterizationSamples;
+			VK_FALSE,														// VkBool32									sampleShadingEnable;
+			0.0f,															// float									minSampleShading;
+			DE_NULL,														// const VkSampleMask*						pSampleMask;
+			VK_FALSE,														// VkBool32									alphaToCoverageEnable;
+			VK_FALSE														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineColorBlendAttachmentState colorBlendAttachmentState =
+		{
+			VK_FALSE,						// VkBool32					blendEnable;
+			VK_BLEND_FACTOR_ONE,			// VkBlendFactor			srcBlendColor;
+			VK_BLEND_FACTOR_ZERO,			// VkBlendFactor			destBlendColor;
+			VK_BLEND_OP_ADD,				// VkBlendOp				blendOpColor;
+			VK_BLEND_FACTOR_ONE,			// VkBlendFactor			srcBlendAlpha;
+			VK_BLEND_FACTOR_ZERO,			// VkBlendFactor			destBlendAlpha;
+			VK_BLEND_OP_ADD,				// VkBlendOp				blendOpAlpha;
+			(VK_COLOR_COMPONENT_R_BIT |
+			 VK_COLOR_COMPONENT_G_BIT |
+			 VK_COLOR_COMPONENT_B_BIT |
+			 VK_COLOR_COMPONENT_A_BIT)		// VkColorComponentFlags	colorWriteMask;
+		};
+
+		const VkPipelineColorBlendStateCreateInfo colorBlendStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			(VkPipelineColorBlendStateCreateFlags)0,					// VkPipelineColorBlendStateCreateFlags			flags
+			VK_FALSE,													// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f }									// float										blendConst[4];
+		};
+
+		const VkPipelineDynamicStateCreateInfo dynamicStateInfo =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType						sType;
+			DE_NULL,													// const void*							pNext;
+			(VkPipelineDynamicStateCreateFlags)0,						// VkPipelineDynamicStateCreateFlags	flags;
+			0u,															// deUint32								dynamicStateCount;
+			DE_NULL														// const VkDynamicState*				pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo graphicsPipelineParams =
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			4u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			&tessellationStateParams,							// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterStateCreateInfo*			pRasterState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			DE_NULL,											// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateInfo,									// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*pipelineLayout,									// VkPipelineLayout									layout;
+			*renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		graphicsPipeline = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32				queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo cmdBufferParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*cmdPool,										// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1u												// uint32_t					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+			DE_NULL,										// const void*						pNext;
+			0u,												// VkCmdBufferOptimizeFlags			flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue clearValues[1] =
+		{
+			getDefaultClearColor()
+		};
+
+		const VkRenderPassBeginInfo renderPassBeginInfo =
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*renderPass,											// VkRenderPass			renderPass;
+			*framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { renderSize.x(), renderSize.y() } },		// VkRect2D				renderArea;
+			1,														// deUint32				attachmentCount;
+			clearValues												// const VkClearValue*	pClearValues;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+		VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+
+		vk.cmdBeginRenderPass(*cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *graphicsPipeline);
+
+		vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &*descriptorSet, 0u, DE_NULL);
+
+		vk.cmdDraw(*cmdBuffer, vertexCount, 1, 0, 0);
+
+		vk.cmdEndRenderPass(*cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+		fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// Execute Draw
+	{
+		VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+		const VkSubmitInfo submitInfo =
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const VkSemaphore*)0,
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmdBuffer.get(),
+			0u,
+			(const VkSemaphore*)0,
+		};
+		VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity*/));
+	}
+}
+
+// TessControlExecutor
+
+class TessControlExecutor : public TessellationExecutor
+{
+public:
+						TessControlExecutor			(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual				~TessControlExecutor		(void);
+
+	virtual void		setShaderSources			(SourceCollections& programCollection) const;
+
+	virtual void		execute						(const Context& ctx, int numValues, const void* const* inputs, void* const* outputs);
+
+protected:
+	static std::string	generateTessControlShader	(const ShaderSpec& shaderSpec);
+};
+
+TessControlExecutor::TessControlExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: TessellationExecutor (shaderSpec, shaderType)
+{
+}
+
+TessControlExecutor::~TessControlExecutor (void)
+{
+}
+
+std::string TessControlExecutor::generateTessControlShader (const ShaderSpec& shaderSpec)
+{
+	std::ostringstream src;
+	src <<  "#version 310 es\n"
+			"#extension GL_EXT_tessellation_shader : require\n\n";
+
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	src << "\nlayout(vertices = 1) out;\n\n";
+
+	declareBufferBlocks(src, shaderSpec);
+
+	src << "void main (void)\n{\n";
+
+	for (int ndx = 0; ndx < 2; ndx++)
+		src << "\tgl_TessLevelInner[" << ndx << "] = 1.0;\n";
+
+	for (int ndx = 0; ndx < 4; ndx++)
+		src << "\tgl_TessLevelOuter[" << ndx << "] = 1.0;\n";
+
+	src << "\n"
+		<< "\thighp uint invocationId = uint(gl_PrimitiveID);\n";
+
+	generateExecBufferIo(src, shaderSpec, "invocationId");
+
+	src << "}\n";
+
+	return src.str();
+}
+
+static std::string generateEmptyTessEvalShader ()
+{
+	std::ostringstream src;
+
+	src <<  "#version 310 es\n"
+			"#extension GL_EXT_tessellation_shader : require\n\n";
+
+	src << "layout(triangles, ccw) in;\n";
+
+	src << "\nvoid main (void)\n{\n"
+		<< "\tgl_Position = vec4(gl_TessCoord.xy, 0.0, 1.0);\n"
+		<< "}\n";
+
+	return src.str();
+}
+
+void TessControlExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(generateVertexShaderForTess());
+	programCollection.glslSources.add("tess_control") << glu::TessellationControlSource(generateTessControlShader(m_shaderSpec));
+	programCollection.glslSources.add("tess_eval") << glu::TessellationEvaluationSource(generateEmptyTessEvalShader());
+	programCollection.glslSources.add("frag") << glu::FragmentSource(generateEmptyFragmentSource());
+}
+
+void TessControlExecutor::execute (const Context& ctx, int numValues, const void* const* inputs, void* const* outputs)
+{
+	checkSupported(ctx, m_shaderType);
+
+	initBuffers(ctx, numValues);
+
+	// Setup input buffer & copy data
+	uploadInputBuffer(ctx, inputs, numValues);
+
+	renderTess(ctx, 3 * numValues);
+
+	// Read back data
+	readOutputBuffer(ctx, outputs, numValues);
+}
+
+// TessEvaluationExecutor
+
+class TessEvaluationExecutor : public TessellationExecutor
+{
+public:
+						TessEvaluationExecutor	(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+	virtual				~TessEvaluationExecutor	(void);
+
+	virtual void		setShaderSources		(SourceCollections& programCollection) const;
+
+	virtual void		execute					(const Context& ctx, int numValues, const void* const* inputs, void* const* outputs);
+
+protected:
+	static std::string	generateTessEvalShader	(const ShaderSpec& shaderSpec);
+};
+
+TessEvaluationExecutor::TessEvaluationExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: TessellationExecutor (shaderSpec, shaderType)
+{
+}
+
+TessEvaluationExecutor::~TessEvaluationExecutor (void)
+{
+}
+
+static std::string generatePassthroughTessControlShader (void)
+{
+	std::ostringstream src;
+
+	src <<  "#version 310 es\n"
+			"#extension GL_EXT_tessellation_shader : require\n\n";
+
+	src << "layout(vertices = 1) out;\n\n";
+
+	src << "void main (void)\n{\n";
+
+	for (int ndx = 0; ndx < 2; ndx++)
+		src << "\tgl_TessLevelInner[" << ndx << "] = 1.0;\n";
+
+	for (int ndx = 0; ndx < 4; ndx++)
+		src << "\tgl_TessLevelOuter[" << ndx << "] = 1.0;\n";
+
+	src << "}\n";
+
+	return src.str();
+}
+
+std::string TessEvaluationExecutor::generateTessEvalShader (const ShaderSpec& shaderSpec)
+{
+	std::ostringstream src;
+
+	src <<  "#version 310 es\n"
+			"#extension GL_EXT_tessellation_shader : require\n\n";
+
+	if (!shaderSpec.globalDeclarations.empty())
+		src << shaderSpec.globalDeclarations << "\n";
+
+	src << "\n";
+
+	src << "layout(isolines, equal_spacing) in;\n\n";
+
+	declareBufferBlocks(src, shaderSpec);
+
+	src << "void main (void)\n{\n"
+		<< "\tgl_Position = vec4(gl_TessCoord.x, 0.0, 0.0, 1.0);\n"
+		<< "\thighp uint invocationId = uint(gl_PrimitiveID) + (gl_TessCoord.x > 0.5 ? 1u : 0u);\n";
+
+	generateExecBufferIo(src, shaderSpec, "invocationId");
+
+	src	<< "}\n";
+
+	return src.str();
+}
+
+void TessEvaluationExecutor::setShaderSources (SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(generateVertexShaderForTess());
+	programCollection.glslSources.add("tess_control") << glu::TessellationControlSource(generatePassthroughTessControlShader());
+	programCollection.glslSources.add("tess_eval") << glu::TessellationEvaluationSource(generateTessEvalShader(m_shaderSpec));
+	programCollection.glslSources.add("frag") << glu::FragmentSource(generateEmptyFragmentSource());
+}
+
+void TessEvaluationExecutor::execute (const Context& ctx, int numValues, const void* const* inputs, void* const* outputs)
+{
+	checkSupported(ctx, m_shaderType);
+
+	const int	alignedValues	= deAlign32(numValues, 2);
+
+	// Initialize buffers with aligned value count to make room for padding
+	initBuffers(ctx, alignedValues);
+
+	// Setup input buffer & copy data
+	uploadInputBuffer(ctx, inputs, numValues);
+
+	renderTess(ctx, 2 * numValues);
+
+	// Read back data
+	readOutputBuffer(ctx, outputs, numValues);
+}
+
+} // anonymous
+
+// ShaderExecutor
+
+ShaderExecutor::ShaderExecutor (const ShaderSpec& shaderSpec, glu::ShaderType shaderType)
+	: m_shaderSpec	(shaderSpec)
+	, m_shaderType	(shaderType)
+{
+}
+
+ShaderExecutor::~ShaderExecutor (void)
+{
+}
+
+// Utilities
+
+ShaderExecutor* createExecutor (glu::ShaderType shaderType, const ShaderSpec& shaderSpec)
+{
+	switch (shaderType)
+	{
+		case glu::SHADERTYPE_VERTEX:					return new VertexShaderExecutor		(shaderSpec, shaderType);
+		case glu::SHADERTYPE_TESSELLATION_CONTROL:		return new TessControlExecutor		(shaderSpec, shaderType);
+		case glu::SHADERTYPE_TESSELLATION_EVALUATION:	return new TessEvaluationExecutor	(shaderSpec, shaderType);
+		case glu::SHADERTYPE_GEOMETRY:					return new GeometryShaderExecutor	(shaderSpec, shaderType);
+		case glu::SHADERTYPE_FRAGMENT:					return new FragmentShaderExecutor	(shaderSpec, shaderType);
+		case glu::SHADERTYPE_COMPUTE:					return new ComputeShaderExecutor	(shaderSpec, shaderType);
+		default:
+			throw tcu::InternalError("Unsupported shader type");
+	}
+}
+
+void ShaderExecutor::setupUniformData (const VkDevice&				vkDevice,
+									   const DeviceInterface&		vk,
+									   const VkQueue				/*queue*/,
+									   const deUint32				queueFamilyIndex,
+									   Allocator&					memAlloc,
+									   deUint32						bindingLocation,
+									   VkDescriptorType				descriptorType,
+									   deUint32						size,
+									   const void*					dataPtr)
+{
+	DE_ASSERT(descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER || descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
+
+	VkImageUsageFlags usage = descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
+
+	const VkBufferCreateInfo uniformBufferParams =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		size,										// VkDeviceSize			size;
+		usage,										// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyIndexCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	Move<VkBuffer>					buffer				= createBuffer(vk, vkDevice, &uniformBufferParams);
+	de::MovePtr<Allocation>			alloc				= memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, alloc->getMemory(), alloc->getOffset()));
+
+	deMemcpy(alloc->getHostPtr(), dataPtr, size);
+	flushMappedMemoryRange(vk, vkDevice, alloc->getMemory(), alloc->getOffset(), size);
+
+	de::MovePtr<BufferUniform> uniformInfo(new BufferUniform());
+	uniformInfo->type = descriptorType;
+	uniformInfo->descriptor = makeDescriptorBufferInfo(*buffer, 0u, size);
+	uniformInfo->location = bindingLocation;
+	uniformInfo->buffer = VkBufferSp(new Unique<VkBuffer>(buffer));
+	uniformInfo->alloc = AllocationSp(alloc.release());
+
+	m_descriptorSetLayoutBuilder.addSingleBinding(descriptorType, VK_SHADER_STAGE_ALL);
+	m_descriptorPoolBuilder.addType(descriptorType);
+
+	m_uniformInfos.push_back(UniformInfoSp(new de::UniquePtr<UniformInfo>(uniformInfo)));
+}
+
+void ShaderExecutor::setupSamplerData (const VkDevice&				vkDevice,
+									   const DeviceInterface&		vk,
+									   const VkQueue				queue,
+									   const deUint32				queueFamilyIndex,
+									   Allocator&					memAlloc,
+									   deUint32						bindingLocation,
+									   deUint32						numSamplers,
+									   const tcu::Sampler&			refSampler,
+									   const tcu::TextureFormat&	texFormat,
+									   const tcu::IVec3&			texSize,
+									   VkImageType					imageType,
+									   VkImageViewType				imageViewType,
+									   const void*					data)
+{
+	DE_ASSERT(numSamplers > 0);
+
+	de::MovePtr<SamplerArrayUniform>	samplers		(new SamplerArrayUniform());
+
+	samplers->type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+	samplers->location = bindingLocation;
+
+	for (deUint32 ndx = 0; ndx < numSamplers; ++ndx)
+	{
+		const int						offset			= ndx * texSize.x() * texSize.y() * texSize.z() * texFormat.getPixelSize();
+		const void*						samplerData		= ((deUint8*)data) + offset;
+		de::MovePtr<SamplerUniform>		uniform			= createSamplerUniform(vkDevice, vk, queue, queueFamilyIndex, memAlloc, bindingLocation, refSampler, texFormat, texSize, imageType, imageViewType, samplerData);
+
+		samplers->uniforms.push_back(SamplerUniformSp(new de::UniquePtr<SamplerUniform>(uniform)));
+	}
+
+	m_descriptorSetLayoutBuilder.addArraySamplerBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, numSamplers, VK_SHADER_STAGE_ALL, DE_NULL);
+	m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, numSamplers);
+
+	m_uniformInfos.push_back(UniformInfoSp(new de::UniquePtr<UniformInfo>(samplers)));
+}
+
+const void*	ShaderExecutor::getBufferPtr (const deUint32 bindingLocation) const
+{
+	std::vector<UniformInfoSp>::const_iterator it = m_uniformInfos.begin();
+	for (; it != m_uniformInfos.end(); it++)
+	{
+		const UniformInfo* uniformInfo = it->get()->get();
+		if (uniformInfo->isBufferUniform() && uniformInfo->location == bindingLocation)
+		{
+			const BufferUniform* bufferUniform = static_cast<const BufferUniform*>(uniformInfo);
+			return bufferUniform->alloc->getHostPtr();
+		}
+	}
+
+	return DE_NULL;
+}
+
+void ShaderExecutor::addUniforms (const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc)
+{
+	if (!m_uniformSetup)
+		return;
+
+	for (std::vector<UniformDataSp>::const_iterator it = m_uniformSetup->uniforms().begin(); it != m_uniformSetup->uniforms().end(); ++it)
+	{
+		const UniformDataBase* uniformData = it->get()->get();
+		uniformData->setup(*this, vkDevice, vk, queue, queueFamilyIndex, memAlloc);
+	}
+}
+
+void ShaderExecutor::uploadUniforms (DescriptorSetUpdateBuilder& descriptorSetUpdateBuilder, VkDescriptorSet descriptorSet)
+{
+	for (std::vector<UniformInfoSp>::const_iterator it = m_uniformInfos.begin(); it != m_uniformInfos.end(); ++it)
+	{
+		const UniformInfo* uniformInfo = it->get()->get();
+
+		if (uniformInfo->isSamplerArray())
+		{
+			const SamplerArrayUniform*			arrayInfo		= static_cast<const SamplerArrayUniform*>(uniformInfo);
+			std::vector<VkDescriptorImageInfo>	descriptors;
+
+			for (std::vector<SamplerUniformSp>::const_iterator ait = arrayInfo->uniforms.begin(); ait != arrayInfo->uniforms.end(); ++ait)
+			{
+				descriptors.push_back(ait->get()->get()->descriptor);
+			}
+
+			descriptorSetUpdateBuilder.writeArray(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(uniformInfo->location), uniformInfo->type, (deUint32)descriptors.size(), &descriptors[0]);
+		}
+		else if (uniformInfo->isBufferUniform())
+		{
+			const BufferUniform* bufferUniform = static_cast<const BufferUniform*>(uniformInfo);
+			descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bufferUniform->location), bufferUniform->type, &bufferUniform->descriptor);
+		}
+		else if (uniformInfo->isSamplerUniform())
+		{
+			const SamplerUniform* samplerUniform = static_cast<const SamplerUniform*>(uniformInfo);
+			descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(samplerUniform->location), samplerUniform->type, &samplerUniform->descriptor);
+		}
+	}
+}
+
+void ShaderExecutor::uploadImage (const VkDevice&				vkDevice,
+								  const DeviceInterface&		vk,
+								  const VkQueue					queue,
+								  const deUint32				queueFamilyIndex,
+								  Allocator&					memAlloc,
+								  const tcu::TextureFormat&		texFormat,
+								  const tcu::IVec3&				texSize,
+								  const void*					data,
+								  const deUint32				arraySize,
+								  const VkImageAspectFlags		aspectMask,
+								  VkImage						destImage)
+{
+	deUint32						textureSize			= texSize.x() * texSize.y() * texSize.z() * texFormat.getPixelSize();
+	deUint32						bufferSize;
+	Move<VkBuffer>					buffer;
+	de::MovePtr<Allocation>			bufferAlloc;
+	Move<VkCommandPool>				cmdPool;
+	Move<VkCommandBuffer>			cmdBuffer;
+	Move<VkFence>					fence;
+	std::vector<deUint32>			levelDataSizes;
+
+	// Calculate buffer size
+	bufferSize = arraySize * textureSize;
+
+	// Create source buffer
+	{
+		const VkBufferCreateInfo bufferParams =
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			bufferSize,									// VkDeviceSize			size;
+			VK_BUFFER_USAGE_TRANSFER_SRC_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			0u,											// deUint32				queueFamilyIndexCount;
+			DE_NULL,									// const deUint32*		pQueueFamilyIndices;
+		};
+
+		buffer		= createBuffer(vk, vkDevice, &bufferParams);
+		bufferAlloc = memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, bufferAlloc->getMemory(), bufferAlloc->getOffset()));
+	}
+
+	// Create command pool and buffer
+	{
+		const VkCommandPoolCreateInfo cmdPoolParams =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCommandPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32					queueFamilyIndex;
+		};
+
+		cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+
+		const VkCommandBufferAllocateInfo cmdBufferAllocateInfo =
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*cmdPool,										// VkCommandPool			commandPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+			1u,												// deUint32					bufferCount;
+		};
+
+		cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferAllocateInfo);
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo fenceParams =
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u											// VkFenceCreateFlags	flags;
+		};
+
+		fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// Barriers for copying buffer to image
+	const VkBufferMemoryBarrier preBufferBarrier =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	// VkStructureType	sType;
+		DE_NULL,									// const void*		pNext;
+		VK_ACCESS_HOST_WRITE_BIT,					// VkAccessFlags	srcAccessMask;
+		VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags	dstAccessMask;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32			dstQueueFamilyIndex;
+		*buffer,									// VkBuffer			buffer;
+		0u,											// VkDeviceSize		offset;
+		bufferSize									// VkDeviceSize		size;
+	};
+
+	const VkImageMemoryBarrier preImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkAccessFlags			srcAccessMask;
+		0u,												// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_UNDEFINED,						// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		destImage,										// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			aspectMask,								// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			1u,										// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			arraySize								// deUint32			arraySize;
+		}
+	};
+
+	const VkImageMemoryBarrier postImageBarrier =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,			// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		VK_ACCESS_TRANSFER_WRITE_BIT,					// VkAccessFlags			srcAccessMask;
+		VK_ACCESS_SHADER_READ_BIT,						// VkAccessFlags			dstAccessMask;
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,			// VkImageLayout			oldLayout;
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,		// VkImageLayout			newLayout;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,						// deUint32					dstQueueFamilyIndex;
+		destImage,										// VkImage					image;
+		{												// VkImageSubresourceRange	subresourceRange;
+			aspectMask,								// VkImageAspect	aspect;
+			0u,										// deUint32			baseMipLevel;
+			1u,										// deUint32			mipLevels;
+			0u,										// deUint32			baseArraySlice;
+			arraySize								// deUint32			arraySize;
+		}
+	};
+
+	const VkCommandBufferBeginInfo cmdBufferBeginInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,	// VkCommandBufferUsageFlags		flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	std::vector<VkBufferImageCopy>		copyRegions;
+
+	{
+		deUint32 layerDataOffset = 0;
+
+		for (deUint32 layerNdx = 0; layerNdx < arraySize; ++layerNdx)
+		{
+			const VkBufferImageCopy layerRegion =
+			{
+				layerDataOffset,						// VkDeviceSize				bufferOffset;
+				(deUint32)texSize.x(),					// deUint32					bufferRowLength;
+				(deUint32)texSize.y(),					// deUint32					bufferImageHeight;
+				{										// VkImageSubresourceLayers	imageSubresource;
+					aspectMask,
+					0u,
+					(deUint32)layerNdx,
+					1u
+				},
+				{ 0u, 0u, 0u },							// VkOffset3D			imageOffset;
+				{										// VkExtent3D			imageExtent;
+					(deUint32)texSize.x(),
+					(deUint32)texSize.y(),
+					(deUint32)texSize.z()
+				}
+			};
+
+			copyRegions.push_back(layerRegion);
+			layerDataOffset += textureSize;
+		}
+	}
+
+	// Write buffer data
+	{
+		deUint8*	destPtr				= (deUint8*)bufferAlloc->getHostPtr();
+		deUint32	levelOffset			= 0;
+
+		for (deUint32 layerNdx = 0; layerNdx < arraySize; ++layerNdx)
+		{
+			tcu::ConstPixelBufferAccess		access		(texFormat, texSize, data);
+			tcu::PixelBufferAccess			destAccess	(texFormat, texSize, destPtr + levelOffset);
+
+			tcu::copy(destAccess, access);
+			levelOffset += textureSize;
+		}
+	}
+
+	flushMappedMemoryRange(vk, vkDevice, bufferAlloc->getMemory(), bufferAlloc->getOffset(), bufferSize);
+
+	// Copy buffer to image
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &preBufferBarrier, 1, &preImageBarrier);
+	vk.cmdCopyBufferToImage(*cmdBuffer, *buffer, destImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, (deUint32)copyRegions.size(), copyRegions.data());
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+		DE_NULL,						// const void*				pNext;
+		0u,								// deUint32					waitSemaphoreCount;
+		DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+		DE_NULL,
+		1u,								// deUint32					commandBufferCount;
+		&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+		0u,								// deUint32					signalSemaphoreCount;
+		DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity */));
+}
+
+de::MovePtr<ShaderExecutor::SamplerUniform> ShaderExecutor::createSamplerUniform (const VkDevice&				vkDevice,
+																				  const DeviceInterface&		vk,
+																				  const VkQueue					queue,
+																				  const deUint32				queueFamilyIndex,
+																				  Allocator&					memAlloc,
+																				  deUint32						bindingLocation,
+																				  const tcu::Sampler&			refSampler,
+																				  const tcu::TextureFormat&		texFormat,
+																				  const tcu::IVec3&				texSize,
+																				  VkImageType					imageType,
+																				  VkImageViewType				imageViewType,
+																				  const void*					data)
+{
+	const VkFormat					format			= mapTextureFormat(texFormat);
+	const bool						isCube			= imageViewType == VK_IMAGE_VIEW_TYPE_CUBE;
+	const bool						isShadowSampler	= texFormat == tcu::TextureFormat(tcu::TextureFormat::D, tcu::TextureFormat::UNORM_INT16);
+	const VkImageCreateFlags		imageFlags		= isCube ? (VkImageCreateFlags)VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT : (VkImageCreateFlags)0;
+	const deUint32					arraySize		= isCube ? 6u : 1u;
+	const VkImageAspectFlags		aspectMask		= isShadowSampler ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT;
+	VkImageUsageFlags				imageUsage		= VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
+	Move<VkImage>					vkTexture;
+	de::MovePtr<Allocation>			allocation;
+
+	if (isShadowSampler)
+		imageUsage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+
+	// Create image
+	const VkImageCreateInfo	imageParams =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,							// VkStructureType			sType;
+		DE_NULL,														// const void*				pNext;
+		imageFlags,														// VkImageCreateFlags		flags;
+		imageType,														// VkImageType				imageType;
+		format,															// VkFormat					format;
+		{																// VkExtent3D				extent;
+			(deUint32)texSize.x(),
+			(deUint32)texSize.y(),
+			(deUint32)texSize.z()
+		},
+		1u,																// deUint32					mipLevels;
+		arraySize,														// deUint32					arrayLayers;
+		VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits	samples;
+		VK_IMAGE_TILING_OPTIMAL,										// VkImageTiling			tiling;
+		imageUsage,														// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,										// VkSharingMode			sharingMode;
+		1u,																// deUint32					queueFamilyIndexCount;
+		&queueFamilyIndex,												// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED										// VkImageLayout			initialLayout;
+	};
+
+	vkTexture		= createImage(vk, vkDevice, &imageParams);
+	allocation		= memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *vkTexture), MemoryRequirement::Any);
+	VK_CHECK(vk.bindImageMemory(vkDevice, *vkTexture, allocation->getMemory(), allocation->getOffset()));
+
+	// Upload texture data
+	uploadImage(vkDevice, vk, queue, queueFamilyIndex, memAlloc, texFormat, texSize, data, arraySize, aspectMask, *vkTexture);
+
+	// Create sampler
+	const VkSamplerCreateInfo		samplerParams	= mapSampler(refSampler, texFormat);
+	Move<VkSampler>					sampler			= createSampler(vk, vkDevice, &samplerParams);
+
+	const VkImageViewCreateInfo		viewParams		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,	// VkStructureType			sType;
+		NULL,										// const voide*				pNexŧ;
+		0u,											// VkImageViewCreateFlags	flags;
+		*vkTexture,									// VkImage					image;
+		imageViewType,								// VkImageViewType			viewType;
+		format,										// VkFormat					format;
+		{
+			VK_COMPONENT_SWIZZLE_R,						// VkComponentSwizzle		r;
+			VK_COMPONENT_SWIZZLE_G,						// VkComponentSwizzle		g;
+			VK_COMPONENT_SWIZZLE_B,						// VkComponentSwizzle		b;
+			VK_COMPONENT_SWIZZLE_A						// VkComponentSwizzle		a;
+		},											// VkComponentMapping			components;
+		{
+			aspectMask,									// VkImageAspectFlags	aspectMask;
+			0,											// deUint32				baseMipLevel;
+			1,											// deUint32				mipLevels;
+			0,											// deUint32				baseArraySlice;
+			arraySize									// deUint32				arraySize;
+		}											// VkImageSubresourceRange	subresourceRange;
+	};
+
+	Move<VkImageView>				imageView		= createImageView(vk, vkDevice, &viewParams);
+
+	const VkDescriptorImageInfo descriptor			=
+	{
+		sampler.get(),								// VkSampler				sampler;
+		imageView.get(),							// VkImageView				imageView;
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL	// VkImageLayout			imageLayout;
+	};
+
+	de::MovePtr<SamplerUniform> uniform(new SamplerUniform());
+	uniform->type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+	uniform->descriptor = descriptor;
+	uniform->location = bindingLocation;
+	uniform->image = VkImageSp(new Unique<VkImage>(vkTexture));
+	uniform->imageView = VkImageViewSp(new Unique<VkImageView>(imageView));
+	uniform->sampler = VkSamplerSp(new Unique<VkSampler>(sampler));
+	uniform->alloc = AllocationSp(allocation.release());
+
+	return uniform;
+}
+
+SamplerUniformData::SamplerUniformData (deUint32						bindingLocation,
+										deUint32						numSamplers,
+										const tcu::Sampler&				refSampler,
+										const tcu::TextureFormat&		texFormat,
+										const tcu::IVec3&				texSize,
+										VkImageType						imageType,
+										VkImageViewType					imageViewType,
+										const void*						data)
+	: UniformDataBase		(bindingLocation)
+	, m_numSamplers			(numSamplers)
+	, m_refSampler			(refSampler)
+	, m_texFormat			(texFormat)
+	, m_texSize				(texSize)
+	, m_imageType			(imageType)
+	, m_imageViewType		(imageViewType)
+	, m_data				(data)
+{
+}
+
+SamplerUniformData::~SamplerUniformData (void)
+{
+}
+
+void SamplerUniformData::setup (ShaderExecutor& executor, const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc) const
+{
+	executor.setupSamplerData(vkDevice, vk, queue, queueFamilyIndex, memAlloc, m_bindingLocation, m_numSamplers, m_refSampler, m_texFormat, m_texSize, m_imageType, m_imageViewType, m_data);
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.hpp
new file mode 100644
index 0000000..e464626
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderExecutor.hpp
@@ -0,0 +1,333 @@
+#ifndef _VKTSHADEREXECUTOR_HPP
+#define _VKTSHADEREXECUTOR_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan ShaderExecutor
+ *//*--------------------------------------------------------------------*/
+
+#include "deSharedPtr.hpp"
+
+#include "vktTestCase.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+
+#include "gluVarType.hpp"
+
+#include "tcuTexture.hpp"
+
+#include <vector>
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+using namespace vk;
+
+//! Shader input / output variable declaration.
+struct Symbol
+{
+	std::string				name;		//!< Symbol name.
+	glu::VarType			varType;	//!< Symbol type.
+
+	Symbol (void) {}
+	Symbol (const std::string& name_, const glu::VarType& varType_) : name(name_), varType(varType_) {}
+};
+
+//! Complete shader specification.
+struct ShaderSpec
+{
+	std::vector<Symbol>		inputs;
+	std::vector<Symbol>		outputs;
+	std::string				globalDeclarations;	//!< These are placed into global scope. Can contain uniform declarations for example.
+	std::string				source;				//!< Source snippet to be executed.
+
+	ShaderSpec (void) {}
+};
+
+// UniformSetup
+
+class UniformDataBase;
+class ShaderExecutor;
+
+typedef de::SharedPtr<de::UniquePtr<UniformDataBase> > UniformDataSp;
+
+class UniformSetup
+{
+public:
+										UniformSetup		(void) {}
+	virtual								~UniformSetup		(void) {}
+
+	void								addData				(UniformDataBase* uniformData)
+										{
+											m_uniforms.push_back(UniformDataSp(new de::UniquePtr<UniformDataBase>(uniformData)));
+										}
+
+	const std::vector<UniformDataSp>&	uniforms			(void) const
+										{
+											return m_uniforms;
+										}
+
+private:
+										UniformSetup		(const UniformSetup&);	// not allowed!
+	UniformSetup&						operator=			(const UniformSetup&);	// not allowed!
+
+	std::vector<UniformDataSp>			m_uniforms;
+};
+
+//! Base class for shader executor.
+class ShaderExecutor
+{
+public:
+	virtual					~ShaderExecutor		(void);
+
+	//! Log executor details (program etc.).
+	virtual void			log					(tcu::TestLog& log) const = 0;
+
+	//! Execute
+	virtual void			execute				(const Context& ctx, int numValues, const void* const* inputs, void* const* outputs) = 0;
+
+	virtual void			setShaderSources	(SourceCollections& programCollection) const = 0;
+
+	void					setUniforms			(const UniformSetup* uniformSetup)
+												{
+													m_uniformSetup = de::MovePtr<const UniformSetup>(uniformSetup);
+												};
+
+	void					setupUniformData	(const VkDevice&			vkDevice,
+												 const DeviceInterface&		vk,
+												 const VkQueue				queue,
+												 const deUint32				queueFamilyIndex,
+												 Allocator&					memAlloc,
+												 deUint32					bindingLocation,
+												 VkDescriptorType			descriptorType,
+												 deUint32					size,
+												 const void*				dataPtr);
+
+	void					setupSamplerData	(const VkDevice&			vkDevice,
+												 const DeviceInterface&		vk,
+												 const VkQueue				queue,
+												 const deUint32				queueFamilyIndex,
+												 Allocator&					memAlloc,
+												 deUint32					bindingLocation,
+												 deUint32					numSamplers,
+												 const tcu::Sampler&		refSampler,
+												 const tcu::TextureFormat&	texFormat,
+												 const tcu::IVec3&			texSize,
+												 VkImageType				imageType,
+												 VkImageViewType			imageViewType,
+												 const void*				data);
+
+	const void*				getBufferPtr		(const deUint32 bindingLocation) const;
+
+protected:
+							ShaderExecutor		(const ShaderSpec& shaderSpec, glu::ShaderType shaderType);
+
+	void					addUniforms			(const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc);
+
+	void					uploadUniforms		(DescriptorSetUpdateBuilder& descriptorSetUpdateBuilder, VkDescriptorSet descriptorSet);
+
+	class UniformInfo;
+	typedef de::SharedPtr<de::UniquePtr<UniformInfo> >			UniformInfoSp;
+
+	class SamplerUniform;
+	typedef de::SharedPtr<de::UniquePtr<SamplerUniform> >		SamplerUniformSp;
+
+	typedef de::SharedPtr<Unique<VkBuffer> >			VkBufferSp;
+	typedef de::SharedPtr<Unique<VkImage> >				VkImageSp;
+	typedef de::SharedPtr<Unique<VkImageView> >			VkImageViewSp;
+	typedef de::SharedPtr<Unique<VkSampler> >			VkSamplerSp;
+	typedef de::SharedPtr<Allocation>					AllocationSp;
+
+	class UniformInfo
+	{
+	public:
+									UniformInfo			(void) {}
+		virtual						~UniformInfo		(void) {}
+		virtual bool				isSamplerArray		(void) const { return false; }
+		virtual bool				isBufferUniform		(void) const { return false; }
+		virtual bool				isSamplerUniform	(void) const { return false; }
+
+		VkDescriptorType			type;
+		deUint32					location;
+	};
+
+	class BufferUniform : public UniformInfo
+	{
+	public:
+									BufferUniform		(void) {}
+		virtual						~BufferUniform		(void) {}
+		virtual bool				isBufferUniform		(void) const { return true; }
+
+		VkBufferSp					buffer;
+		AllocationSp				alloc;
+		VkDescriptorBufferInfo		descriptor;
+	};
+
+	class SamplerUniform : public UniformInfo
+	{
+	public:
+									SamplerUniform		(void) {}
+		virtual						~SamplerUniform		(void) {}
+		virtual bool				isSamplerUniform	(void) const { return true; }
+		VkImageSp					image;
+		VkImageViewSp				imageView;
+		VkSamplerSp					sampler;
+		AllocationSp				alloc;
+		VkDescriptorImageInfo		descriptor;
+	};
+
+	class SamplerArrayUniform : public UniformInfo
+	{
+	public:
+											SamplerArrayUniform		(void) {}
+		virtual								~SamplerArrayUniform	(void) {}
+		virtual bool						isSamplerArray			(void) const { return true; }
+
+		std::vector<SamplerUniformSp>		uniforms;
+	};
+
+	void									uploadImage					(const VkDevice&				vkDevice,
+																		 const DeviceInterface&			vk,
+																		 const VkQueue					queue,
+																		 const deUint32					queueFamilyIndex,
+																		 Allocator&						memAlloc,
+																		 const tcu::TextureFormat&		texFormat,
+																		 const tcu::IVec3&				texSize,
+																		 const void*					data,
+																		 const deUint32					arraySize,
+																		 const VkImageAspectFlags		aspectMask,
+																		 VkImage						destImage);
+
+	de::MovePtr<SamplerUniform>				createSamplerUniform		(const VkDevice&				vkDevice,
+																		 const DeviceInterface&			vk,
+																		 const VkQueue					queue,
+																		 const deUint32					queueFamilyIndex,
+																		 Allocator&						memAlloc,
+																		 deUint32						bindingLocation,
+																		 const tcu::Sampler&			refSampler,
+																		 const tcu::TextureFormat&		texFormat,
+																		 const tcu::IVec3&				texSize,
+																		 VkImageType					imageType,
+																		 VkImageViewType				imageViewType,
+																		 const void*					data);
+
+	const ShaderSpec									m_shaderSpec;
+	const glu::ShaderType								m_shaderType;
+
+	std::vector<UniformInfoSp>							m_uniformInfos;
+	de::MovePtr<const UniformSetup>						m_uniformSetup;
+	DescriptorSetLayoutBuilder							m_descriptorSetLayoutBuilder;
+	DescriptorPoolBuilder								m_descriptorPoolBuilder;
+
+};
+
+inline tcu::TestLog& operator<< (tcu::TestLog& log, const ShaderExecutor* executor) { executor->log(log); return log; }
+inline tcu::TestLog& operator<< (tcu::TestLog& log, const ShaderExecutor& executor) { executor.log(log); return log; }
+
+ShaderExecutor* createExecutor(glu::ShaderType shaderType, const ShaderSpec& shaderSpec);
+
+class UniformDataBase
+{
+public:
+							UniformDataBase		(deUint32 bindingLocation)
+													: m_bindingLocation		(bindingLocation)
+												{
+												}
+	virtual					~UniformDataBase	(void) {}
+	virtual void			setup				(ShaderExecutor&, const VkDevice&, const DeviceInterface&, const VkQueue, const deUint32, Allocator&) const = 0;
+
+protected:
+	const deUint32			m_bindingLocation;
+};
+
+template<typename T>
+class UniformData : public UniformDataBase
+{
+public:
+							UniformData			(deUint32 bindingLocation, VkDescriptorType descriptorType, const T data);
+	virtual					~UniformData		(void);
+	virtual void			setup				(ShaderExecutor& executor, const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc) const;
+
+private:
+	VkDescriptorType		m_descriptorType;
+	T						m_data;
+};
+
+template<typename T>
+UniformData<T>::UniformData (deUint32 bindingLocation, VkDescriptorType descriptorType, const T data)
+	: UniformDataBase		(bindingLocation)
+	, m_descriptorType		(descriptorType)
+	, m_data				(data)
+{
+}
+
+template<typename T>
+UniformData<T>::~UniformData (void)
+{
+}
+
+template<typename T>
+void UniformData<T>::setup (ShaderExecutor& executor, const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc) const
+{
+	executor.setupUniformData(vkDevice, vk, queue, queueFamilyIndex, memAlloc, m_bindingLocation, m_descriptorType, sizeof(T), &m_data);
+}
+
+class SamplerUniformData : public UniformDataBase
+{
+public:
+							SamplerUniformData	(deUint32						bindingLocation,
+												 deUint32						numSamplers,
+												 const tcu::Sampler&			refSampler,
+												 const tcu::TextureFormat&		texFormat,
+												 const tcu::IVec3&				texSize,
+												 VkImageType					imageType,
+												 VkImageViewType				imageViewType,
+												 const void*					data);
+	virtual					~SamplerUniformData	(void);
+	virtual void			setup				(ShaderExecutor& executor, const VkDevice& vkDevice, const DeviceInterface& vk, const VkQueue queue, const deUint32 queueFamilyIndex, Allocator& memAlloc) const;
+
+private:
+	deUint32					m_numSamplers;
+	const tcu::Sampler			m_refSampler;
+	const tcu::TextureFormat	m_texFormat;
+	const tcu::IVec3			m_texSize;
+	VkImageType					m_imageType;
+	VkImageViewType				m_imageViewType;
+	const void*					m_data;
+};
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADEREXECUTOR_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.cpp
new file mode 100644
index 0000000..67b7631
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.cpp
@@ -0,0 +1,1358 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Integer built-in function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderIntegerFunctionTests.hpp"
+#include "vktShaderExecutor.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuFloat.hpp"
+#include "deRandom.hpp"
+#include "deMath.h"
+#include "deString.h"
+#include "deInt32.h"
+#include "deSharedPtr.hpp"
+
+#include <iostream>
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+using std::vector;
+using std::string;
+using tcu::TestLog;
+
+using tcu::IVec2;
+using tcu::IVec3;
+using tcu::IVec4;
+using tcu::UVec2;
+using tcu::UVec3;
+using tcu::UVec4;
+
+// Utilities
+
+namespace
+{
+
+struct HexFloat
+{
+	const float value;
+	HexFloat (const float value_) : value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const HexFloat& v)
+{
+	return str << v.value << " / " << tcu::toHex(tcu::Float32(v.value).bits());
+}
+
+struct VarValue
+{
+	const glu::VarType&	type;
+	const void*			value;
+
+	VarValue (const glu::VarType& type_, const void* value_) : type(type_), value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const VarValue& varValue)
+{
+	DE_ASSERT(varValue.type.isBasicType());
+
+	const glu::DataType		basicType		= varValue.type.getBasicType();
+	const glu::DataType		scalarType		= glu::getDataTypeScalarType(basicType);
+	const int				numComponents	= glu::getDataTypeScalarSize(basicType);
+
+	if (numComponents > 1)
+		str << glu::getDataTypeName(basicType) << "(";
+
+	for (int compNdx = 0; compNdx < numComponents; compNdx++)
+	{
+		if (compNdx != 0)
+			str << ", ";
+
+		switch (scalarType)
+		{
+			case glu::TYPE_FLOAT:	str << HexFloat(((const float*)varValue.value)[compNdx]);						break;
+			case glu::TYPE_INT:		str << ((const deInt32*)varValue.value)[compNdx];								break;
+			case glu::TYPE_UINT:	str << tcu::toHex(((const deUint32*)varValue.value)[compNdx]);					break;
+			case glu::TYPE_BOOL:	str << (((const deUint32*)varValue.value)[compNdx] != 0 ? "true" : "false");	break;
+
+			default:
+				DE_ASSERT(false);
+		}
+	}
+
+	if (numComponents > 1)
+		str << ")";
+
+	return str;
+}
+
+inline int getShaderUintBitCount (glu::ShaderType shaderType, glu::Precision precision)
+{
+	// \todo [2013-10-31 pyry] Query from GL for vertex and fragment shaders.
+	DE_UNREF(shaderType);
+	const int bitCounts[] = { 9, 16, 32 };
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(bitCounts) == glu::PRECISION_LAST);
+	return bitCounts[precision];
+}
+
+static inline deUint32 extendSignTo32 (deUint32 integer, deUint32 integerLength)
+{
+	DE_ASSERT(integerLength > 0 && integerLength <= 32);
+
+	return deUint32(0 - deInt32((integer & (1 << (integerLength - 1))) << 1)) | integer;
+}
+
+static inline deUint32 getLowBitMask (int integerLength)
+{
+	DE_ASSERT(integerLength >= 0 && integerLength <= 32);
+
+	// \note: shifting more or equal to 32 => undefined behavior. Avoid it by shifting in two parts (1 << (num-1) << 1)
+	if (integerLength == 0u)
+		return 0u;
+	return ((1u << ((deUint32)integerLength - 1u)) << 1u) - 1u;
+}
+
+static void generateRandomInputData (de::Random& rnd, glu::ShaderType shaderType, glu::DataType dataType, glu::Precision precision, deUint32* dst, int numValues)
+{
+	const int				scalarSize		= glu::getDataTypeScalarSize(dataType);
+	const deUint32			integerLength	= (deUint32)getShaderUintBitCount(shaderType, precision);
+	const deUint32			integerMask		= getLowBitMask(integerLength);
+	const bool				isUnsigned		= glu::isDataTypeUintOrUVec(dataType);
+
+	if (isUnsigned)
+	{
+		for (int valueNdx = 0; valueNdx < numValues; ++valueNdx)
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				dst[valueNdx*scalarSize + compNdx] = rnd.getUint32() & integerMask;
+	}
+	else
+	{
+		for (int valueNdx = 0; valueNdx < numValues; ++valueNdx)
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				dst[valueNdx*scalarSize + compNdx] = extendSignTo32(rnd.getUint32() & integerMask, integerLength);
+	}
+}
+
+static vector<int> getScalarSizes (const vector<Symbol>& symbols)
+{
+	vector<int> sizes(symbols.size());
+	for (int ndx = 0; ndx < (int)symbols.size(); ++ndx)
+		sizes[ndx] = symbols[ndx].varType.getScalarSize();
+	return sizes;
+}
+
+static int computeTotalScalarSize (const vector<Symbol>& symbols)
+{
+	int totalSize = 0;
+	for (vector<Symbol>::const_iterator sym = symbols.begin(); sym != symbols.end(); ++sym)
+		totalSize += sym->varType.getScalarSize();
+	return totalSize;
+}
+
+static vector<void*> getInputOutputPointers (const vector<Symbol>& symbols, vector<deUint32>& data, const int numValues)
+{
+	vector<void*>	pointers		(symbols.size());
+	int				curScalarOffset	= 0;
+
+	for (int varNdx = 0; varNdx < (int)symbols.size(); ++varNdx)
+	{
+		const Symbol&	var				= symbols[varNdx];
+		const int		scalarSize		= var.varType.getScalarSize();
+
+		// Uses planar layout as input/output specs do not support strides.
+		pointers[varNdx] = &data[curScalarOffset];
+		curScalarOffset += scalarSize*numValues;
+	}
+
+	DE_ASSERT(curScalarOffset == (int)data.size());
+
+	return pointers;
+}
+
+static const char* getPrecisionPostfix (glu::Precision precision)
+{
+	static const char* s_postfix[] =
+	{
+		"_lowp",
+		"_mediump",
+		"_highp"
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_postfix) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[precision];
+}
+
+static const char* getShaderTypePostfix (glu::ShaderType shaderType)
+{
+	static const char* s_postfix[] =
+	{
+		"_vertex",
+		"_fragment",
+		"_geometry",
+		"_tess_control",
+		"_tess_eval",
+		"_compute"
+	};
+	DE_ASSERT(de::inBounds<int>(shaderType, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[shaderType];
+}
+
+static std::string getIntegerFuncCaseName (glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+{
+	return string(glu::getDataTypeName(baseType)) + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType);
+}
+
+static inline deUint32 reverseBits (deUint32 v)
+{
+	v = (((v & 0xaaaaaaaa) >> 1) | ((v & 0x55555555) << 1));
+	v = (((v & 0xcccccccc) >> 2) | ((v & 0x33333333) << 2));
+	v = (((v & 0xf0f0f0f0) >> 4) | ((v & 0x0f0f0f0f) << 4));
+	v = (((v & 0xff00ff00) >> 8) | ((v & 0x00ff00ff) << 8));
+	return((v >> 16) | (v << 16));
+}
+
+static int findLSB (deUint32 value)
+{
+	for (int i = 0; i < 32; i++)
+	{
+		if (value & (1u<<i))
+			return i;
+	}
+	return -1;
+}
+
+static int findMSB (deInt32 value)
+{
+	if (value > 0)
+		return 31 - deClz32((deUint32)value);
+	else if (value < 0)
+		return 31 - deClz32(~(deUint32)value);
+	else
+		return -1;
+}
+
+static int findMSB (deUint32 value)
+{
+	if (value > 0)
+		return 31 - deClz32(value);
+	else
+		return -1;
+}
+
+static deUint32 toPrecision (deUint32 value, int numIntegerBits)
+{
+	return value & getLowBitMask(numIntegerBits);
+}
+
+static deInt32 toPrecision (deInt32 value, int numIntegerBits)
+{
+	return (deInt32)extendSignTo32((deUint32)value & getLowBitMask(numIntegerBits), numIntegerBits);
+}
+
+template<class TestClass>
+static void addFunctionCases (tcu::TestCaseGroup* parent, const char* functionName, bool intTypes, bool uintTypes, bool allPrec, deUint32 shaderBits)
+{
+	tcu::TestCaseGroup* group = new tcu::TestCaseGroup(parent->getTestContext(), functionName, functionName);
+
+	parent->addChild(group);
+	const glu::DataType scalarTypes[] =
+	{
+		glu::TYPE_INT,
+		glu::TYPE_UINT
+	};
+
+	for (int scalarTypeNdx = 0; scalarTypeNdx < DE_LENGTH_OF_ARRAY(scalarTypes); scalarTypeNdx++)
+	{
+		const glu::DataType scalarType = scalarTypes[scalarTypeNdx];
+
+		if ((!intTypes && scalarType == glu::TYPE_INT) || (!uintTypes && scalarType == glu::TYPE_UINT))
+			continue;
+
+		for (int vecSize = 1; vecSize <= 4; vecSize++)
+		{
+			for (int prec = glu::PRECISION_MEDIUMP; prec <= glu::PRECISION_HIGHP; prec++)
+			{
+				if (prec != glu::PRECISION_HIGHP && !allPrec)
+					continue;
+
+				for (int shaderTypeNdx = 0; shaderTypeNdx < glu::SHADERTYPE_LAST; shaderTypeNdx++)
+				{
+					if (shaderBits & (1<<shaderTypeNdx))
+						group->addChild(new TestClass(parent->getTestContext(), glu::DataType(scalarType + vecSize - 1), glu::Precision(prec), glu::ShaderType(shaderTypeNdx)));
+				}
+			}
+		}
+	}
+}
+
+} // anonymous
+
+// IntegerFunctionCase
+
+class IntegerFunctionCase : public TestCase
+{
+public:
+										IntegerFunctionCase		(tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType);
+										~IntegerFunctionCase	(void);
+
+	virtual	void						initPrograms			(vk::SourceCollections& programCollection) const
+										{
+											m_executor->setShaderSources(programCollection);
+										}
+
+	virtual TestInstance*				createInstance			(Context& context) const = 0;
+	virtual void						init					(void);
+
+protected:
+										IntegerFunctionCase		(const IntegerFunctionCase& other);
+	IntegerFunctionCase&				operator=				(const IntegerFunctionCase& other);
+
+	const glu::ShaderType				m_shaderType;
+
+	ShaderSpec							m_spec;
+
+	de::MovePtr<ShaderExecutor>			m_executor;
+
+	const int							m_numValues;
+};
+
+IntegerFunctionCase::IntegerFunctionCase (tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType)
+	: TestCase		(testCtx, name, description)
+	, m_shaderType	(shaderType)
+	, m_executor	(DE_NULL)
+	, m_numValues	(100)
+{
+}
+
+IntegerFunctionCase::~IntegerFunctionCase (void)
+{
+}
+
+void IntegerFunctionCase::init (void)
+{
+	DE_ASSERT(!m_executor);
+
+	m_executor = de::MovePtr<ShaderExecutor>(createExecutor(m_shaderType, m_spec));
+	m_testCtx.getLog() << *m_executor;
+}
+
+// IntegerFunctionTestInstance
+
+class IntegerFunctionTestInstance : public TestInstance
+{
+public:
+								IntegerFunctionTestInstance		(Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+									: TestInstance	(context)
+									, m_shaderType	(shaderType)
+									, m_spec		(spec)
+									, m_numValues	(numValues)
+									, m_name		(name)
+									, m_executor	(executor)
+								{
+								}
+	virtual tcu::TestStatus		iterate							(void);
+protected:
+	virtual bool						compare					(const void* const* inputs, const void* const* outputs) = 0;
+
+	virtual void						getInputValues			(int numValues, void* const* values) const = 0;
+
+	const glu::ShaderType				m_shaderType;
+
+	ShaderSpec							m_spec;
+
+	const int							m_numValues;
+
+	const char*							m_name;
+
+	std::ostringstream					m_failMsg;				//!< Comparison failure help message.
+
+	ShaderExecutor&						m_executor;
+};
+
+tcu::TestStatus IntegerFunctionTestInstance::iterate (void)
+{
+	const int				numInputScalars			= computeTotalScalarSize(m_spec.inputs);
+	const int				numOutputScalars		= computeTotalScalarSize(m_spec.outputs);
+	vector<deUint32>		inputData				(numInputScalars * m_numValues);
+	vector<deUint32>		outputData				(numOutputScalars * m_numValues);
+	const vector<void*>		inputPointers			= getInputOutputPointers(m_spec.inputs, inputData, m_numValues);
+	const vector<void*>		outputPointers			= getInputOutputPointers(m_spec.outputs, outputData, m_numValues);
+
+	// Initialize input data.
+	getInputValues(m_numValues, &inputPointers[0]);
+
+	// Execute shader.
+	m_executor.execute(m_context, m_numValues, &inputPointers[0], &outputPointers[0]);
+
+	// Compare results.
+	{
+		const vector<int>		inScalarSizes		= getScalarSizes(m_spec.inputs);
+		const vector<int>		outScalarSizes		= getScalarSizes(m_spec.outputs);
+		vector<void*>			curInputPtr			(inputPointers.size());
+		vector<void*>			curOutputPtr		(outputPointers.size());
+		int						numFailed			= 0;
+		tcu::TestContext&		testCtx				= m_context.getTestContext();
+		for (int valNdx = 0; valNdx < m_numValues; valNdx++)
+		{
+			// Set up pointers for comparison.
+			for (int inNdx = 0; inNdx < (int)curInputPtr.size(); ++inNdx)
+				curInputPtr[inNdx] = (deUint32*)inputPointers[inNdx] + inScalarSizes[inNdx]*valNdx;
+
+			for (int outNdx = 0; outNdx < (int)curOutputPtr.size(); ++outNdx)
+				curOutputPtr[outNdx] = (deUint32*)outputPointers[outNdx] + outScalarSizes[outNdx]*valNdx;
+
+			if (!compare(&curInputPtr[0], &curOutputPtr[0]))
+			{
+				// \todo [2013-08-08 pyry] We probably want to log reference value as well?
+
+				testCtx.getLog() << TestLog::Message << "ERROR: comparison failed for value " << valNdx << ":\n  " << m_failMsg.str() << TestLog::EndMessage;
+
+				testCtx.getLog() << TestLog::Message << "  inputs:" << TestLog::EndMessage;
+				for (int inNdx = 0; inNdx < (int)curInputPtr.size(); inNdx++)
+					testCtx.getLog() << TestLog::Message << "    " << m_spec.inputs[inNdx].name << " = "
+														   << VarValue(m_spec.inputs[inNdx].varType, curInputPtr[inNdx])
+									   << TestLog::EndMessage;
+
+				testCtx.getLog() << TestLog::Message << "  outputs:" << TestLog::EndMessage;
+				for (int outNdx = 0; outNdx < (int)curOutputPtr.size(); outNdx++)
+					testCtx.getLog() << TestLog::Message << "    " << m_spec.outputs[outNdx].name << " = "
+														   << VarValue(m_spec.outputs[outNdx].varType, curOutputPtr[outNdx])
+									   << TestLog::EndMessage;
+
+				m_failMsg.str("");
+				m_failMsg.clear();
+				numFailed += 1;
+			}
+		}
+
+		testCtx.getLog() << TestLog::Message << (m_numValues - numFailed) << " / " << m_numValues << " values passed" << TestLog::EndMessage;
+
+		if (numFailed == 0)
+			return tcu::TestStatus::pass("Pass");
+		else
+			return tcu::TestStatus::fail("Result comparison failed");
+	}
+}
+
+// Test cases
+
+class UaddCarryCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	UaddCarryCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd				(deStringHash(m_name) ^ 0x235facu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			integerMask		= getLowBitMask(integerLength);
+		const bool				isSigned		= glu::isDataTypeIntOrIVec(type);
+		deUint32*				in0				= (deUint32*)values[0];
+		deUint32*				in1				= (deUint32*)values[1];
+
+		const struct
+		{
+			deUint32	x;
+			deUint32	y;
+		} easyCases[] =
+		{
+			{ 0x00000000u,	0x00000000u },
+			{ 0xfffffffeu,	0x00000001u },
+			{ 0x00000001u,	0xfffffffeu },
+			{ 0xffffffffu,	0x00000001u },
+			{ 0x00000001u,	0xffffffffu },
+			{ 0xfffffffeu,	0x00000002u },
+			{ 0x00000002u,	0xfffffffeu },
+			{ 0xffffffffu,	0xffffffffu }
+		};
+
+		// generate integers with proper bit count
+		for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				in0[easyCaseNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].x & integerMask;
+				in1[easyCaseNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].y & integerMask;
+			}
+		}
+
+		// convert to signed
+		if (isSigned)
+		{
+			for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+			{
+				for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				{
+					in0[easyCaseNdx*scalarSize + compNdx] = extendSignTo32(in0[easyCaseNdx*scalarSize + compNdx], integerLength);
+					in1[easyCaseNdx*scalarSize + compNdx] = extendSignTo32(in1[easyCaseNdx*scalarSize + compNdx], integerLength);
+				}
+			}
+		}
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, in0, numValues - DE_LENGTH_OF_ARRAY(easyCases));
+		generateRandomInputData(rnd, m_shaderType, type, precision, in1, numValues - DE_LENGTH_OF_ARRAY(easyCases));
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			mask0			= getLowBitMask(integerLength);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	in0		= ((const deUint32*)inputs[0])[compNdx];
+			const deUint32	in1		= ((const deUint32*)inputs[1])[compNdx];
+			const deUint32	out0	= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	out1	= ((const deUint32*)outputs[1])[compNdx];
+			const deUint32	ref0	= in0+in1;
+			const deUint32	ref1	= (deUint64(in0)+deUint64(in1)) > 0xffffffffu ? 1u : 0u;
+
+			if (((out0&mask0) != (ref0&mask0)) || out1 != ref1)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref0) << ", " << tcu::toHex(ref1);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class UaddCarryCase : public IntegerFunctionCase
+{
+public:
+	UaddCarryCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "uaddCarry", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("x", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("y", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("sum", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("carry", glu::VarType(baseType, glu::PRECISION_LOWP)));
+		m_spec.source = "sum = uaddCarry(x, y, carry);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UaddCarryCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class UsubBorrowCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	UsubBorrowCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd				(deStringHash(m_name) ^ 0x235facu);
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			integerMask		= getLowBitMask(integerLength);
+		const bool				isSigned		= glu::isDataTypeIntOrIVec(type);
+		deUint32*				in0				= (deUint32*)values[0];
+		deUint32*				in1				= (deUint32*)values[1];
+
+		const struct
+		{
+			deUint32	x;
+			deUint32	y;
+		} easyCases[] =
+		{
+			{ 0x00000000u,	0x00000000u },
+			{ 0x00000001u,	0x00000001u },
+			{ 0x00000001u,	0x00000002u },
+			{ 0x00000001u,	0xffffffffu },
+			{ 0xfffffffeu,	0xffffffffu },
+			{ 0xffffffffu,	0xffffffffu },
+		};
+
+		// generate integers with proper bit count
+		for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				in0[easyCaseNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].x & integerMask;
+				in1[easyCaseNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].y & integerMask;
+			}
+		}
+
+		// convert to signed
+		if (isSigned)
+		{
+			for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+			{
+				for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+				{
+					in0[easyCaseNdx*scalarSize + compNdx] = extendSignTo32(in0[easyCaseNdx*scalarSize + compNdx], integerLength);
+					in1[easyCaseNdx*scalarSize + compNdx] = extendSignTo32(in1[easyCaseNdx*scalarSize + compNdx], integerLength);
+				}
+			}
+		}
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, in0, numValues - DE_LENGTH_OF_ARRAY(easyCases));
+		generateRandomInputData(rnd, m_shaderType, type, precision, in1, numValues - DE_LENGTH_OF_ARRAY(easyCases));
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			mask0			= getLowBitMask(integerLength);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	in0		= ((const deUint32*)inputs[0])[compNdx];
+			const deUint32	in1		= ((const deUint32*)inputs[1])[compNdx];
+			const deUint32	out0	= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	out1	= ((const deUint32*)outputs[1])[compNdx];
+			const deUint32	ref0	= in0-in1;
+			const deUint32	ref1	= in0 >= in1 ? 0u : 1u;
+
+			if (((out0&mask0) != (ref0&mask0)) || out1 != ref1)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref0) << ", " << tcu::toHex(ref1);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class UsubBorrowCase : public IntegerFunctionCase
+{
+public:
+	UsubBorrowCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "usubBorrow", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("x", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("y", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("diff", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("carry", glu::VarType(baseType, glu::PRECISION_LOWP)));
+		m_spec.source = "diff = usubBorrow(x, y, carry);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UsubBorrowCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class UmulExtendedCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	UmulExtendedCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0x235facu);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+//		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+		deUint32*				in0			= (deUint32*)values[0];
+		deUint32*				in1			= (deUint32*)values[1];
+		int						valueNdx	= 0;
+
+		const struct
+		{
+			deUint32	x;
+			deUint32	y;
+		} easyCases[] =
+		{
+			{ 0x00000000u,	0x00000000u },
+			{ 0xffffffffu,	0x00000001u },
+			{ 0xffffffffu,	0x00000002u },
+			{ 0x00000001u,	0xffffffffu },
+			{ 0x00000002u,	0xffffffffu },
+			{ 0xffffffffu,	0xffffffffu },
+		};
+
+		for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				in0[valueNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].x;
+				in1[valueNdx*scalarSize + compNdx] = easyCases[easyCaseNdx].y;
+			}
+
+			valueNdx += 1;
+		}
+
+		while (valueNdx < numValues)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const deUint32	base0	= rnd.getUint32();
+				const deUint32	base1	= rnd.getUint32();
+				const int		adj0	= rnd.getInt(0, 20);
+				const int		adj1	= rnd.getInt(0, 20);
+				in0[valueNdx*scalarSize + compNdx] = base0 >> adj0;
+				in1[valueNdx*scalarSize + compNdx] = base1 >> adj1;
+			}
+
+			valueNdx += 1;
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	in0		= ((const deUint32*)inputs[0])[compNdx];
+			const deUint32	in1		= ((const deUint32*)inputs[1])[compNdx];
+			const deUint32	out0	= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	out1	= ((const deUint32*)outputs[1])[compNdx];
+			const deUint64	mul64	= deUint64(in0)*deUint64(in1);
+			const deUint32	ref0	= deUint32(mul64 >> 32);
+			const deUint32	ref1	= deUint32(mul64 & 0xffffffffu);
+
+			if (out0 != ref0 || out1 != ref1)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref0) << ", " << tcu::toHex(ref1);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class UmulExtendedCase : public IntegerFunctionCase
+{
+public:
+	UmulExtendedCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "umulExtended", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("x", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("y", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("msb", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("lsb", glu::VarType(baseType, precision)));
+		m_spec.source = "umulExtended(x, y, msb, lsb);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UmulExtendedCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class ImulExtendedCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	ImulExtendedCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0x224fa1u);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+//		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize	= glu::getDataTypeScalarSize(type);
+		deUint32*				in0			= (deUint32*)values[0];
+		deUint32*				in1			= (deUint32*)values[1];
+		int						valueNdx	= 0;
+
+		const struct
+		{
+			deUint32	x;
+			deUint32	y;
+		} easyCases[] =
+		{
+			{ 0x00000000u,	0x00000000u },
+			{ 0xffffffffu,	0x00000002u },
+			{ 0x7fffffffu,	0x00000001u },
+			{ 0x7fffffffu,	0x00000002u },
+			{ 0x7fffffffu,	0x7fffffffu },
+			{ 0xffffffffu,	0xffffffffu },
+			{ 0x7fffffffu,	0xfffffffeu },
+		};
+
+		for (int easyCaseNdx = 0; easyCaseNdx < DE_LENGTH_OF_ARRAY(easyCases); easyCaseNdx++)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				in0[valueNdx*scalarSize + compNdx] = (deInt32)easyCases[easyCaseNdx].x;
+				in1[valueNdx*scalarSize + compNdx] = (deInt32)easyCases[easyCaseNdx].y;
+			}
+
+			valueNdx += 1;
+		}
+
+		while (valueNdx < numValues)
+		{
+			for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+			{
+				const deInt32	base0	= (deInt32)rnd.getUint32();
+				const deInt32	base1	= (deInt32)rnd.getUint32();
+				const int		adj0	= rnd.getInt(0, 20);
+				const int		adj1	= rnd.getInt(0, 20);
+				in0[valueNdx*scalarSize + compNdx] = base0 >> adj0;
+				in1[valueNdx*scalarSize + compNdx] = base1 >> adj1;
+			}
+
+			valueNdx += 1;
+		}
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deInt32	in0		= ((const deInt32*)inputs[0])[compNdx];
+			const deInt32	in1		= ((const deInt32*)inputs[1])[compNdx];
+			const deInt32	out0	= ((const deInt32*)outputs[0])[compNdx];
+			const deInt32	out1	= ((const deInt32*)outputs[1])[compNdx];
+			const deInt64	mul64	= deInt64(in0)*deInt64(in1);
+			const deInt32	ref0	= deInt32(mul64 >> 32);
+			const deInt32	ref1	= deInt32(mul64 & 0xffffffffu);
+
+			if (out0 != ref0 || out1 != ref1)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref0) << ", " << tcu::toHex(ref1);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class ImulExtendedCase : public IntegerFunctionCase
+{
+public:
+	ImulExtendedCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "imulExtended", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("x", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("y", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("msb", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("lsb", glu::VarType(baseType, precision)));
+		m_spec.source = "imulExtended(x, y, msb, lsb);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new ImulExtendedCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class BitfieldExtractCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	BitfieldExtractCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0xa113fca2u);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const bool				ignoreSign	= precision != glu::PRECISION_HIGHP && glu::isDataTypeIntOrIVec(type);
+		const int				numBits		= getShaderUintBitCount(m_shaderType, precision) - (ignoreSign ? 1 : 0);
+		deUint32*				inValue		= (deUint32*)values[0];
+		int*					inOffset	= (int*)values[1];
+		int*					inBits		= (int*)values[2];
+
+		for (int valueNdx = 0; valueNdx < numValues; ++valueNdx)
+		{
+			const int		bits	= rnd.getInt(0, numBits);
+			const int		offset	= rnd.getInt(0, numBits-bits);
+
+			inOffset[valueNdx]	= offset;
+			inBits[valueNdx]	= bits;
+		}
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inValue, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const bool				isSigned		= glu::isDataTypeIntOrIVec(type);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				offset			= *((const int*)inputs[1]);
+		const int				bits			= *((const int*)inputs[2]);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	value	= ((const deUint32*)inputs[0])[compNdx];
+			const deUint32	out		= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	valMask	= (bits == 32 ? ~0u : ((1u<<bits)-1u));
+			const deUint32	baseVal	= (offset == 32) ? (0) : ((value >> offset) & valMask);
+			const deUint32	ref		= baseVal | ((isSigned && (baseVal & (1<<(bits-1)))) ? ~valMask : 0u);
+
+			if (out != ref)
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class BitfieldExtractCase : public IntegerFunctionCase
+{
+public:
+	BitfieldExtractCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "bitfieldExtract", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("value", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("offset", glu::VarType(glu::TYPE_INT, precision)));
+		m_spec.inputs.push_back(Symbol("bits", glu::VarType(glu::TYPE_INT, precision)));
+		m_spec.outputs.push_back(Symbol("extracted", glu::VarType(baseType, precision)));
+		m_spec.source = "extracted = bitfieldExtract(value, offset, bits);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new BitfieldExtractCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class BitfieldInsertCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	BitfieldInsertCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0x12c2acff);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		const int				numBits		= getShaderUintBitCount(m_shaderType, precision);
+		deUint32*				inBase		= (deUint32*)values[0];
+		deUint32*				inInsert	= (deUint32*)values[1];
+		int*					inOffset	= (int*)values[2];
+		int*					inBits		= (int*)values[3];
+
+		for (int valueNdx = 0; valueNdx < numValues; ++valueNdx)
+		{
+			const int bits		= rnd.getInt(0, numBits);
+			const int offset	= rnd.getInt(0, numBits-bits);
+
+			inOffset[valueNdx]	= offset;
+			inBits[valueNdx]	= bits;
+		}
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inBase, numValues);
+		generateRandomInputData(rnd, m_shaderType, type, precision, inInsert, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			cmpMask			= getLowBitMask(integerLength);
+		const int				offset			= *((const int*)inputs[2]);
+		const int				bits			= *((const int*)inputs[3]);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	base	= ((const deUint32*)inputs[0])[compNdx];
+			const deUint32	insert	= ((const deUint32*)inputs[1])[compNdx];
+			const deInt32	out		= ((const deUint32*)outputs[0])[compNdx];
+
+			const deUint32	mask	= bits == 32 ? ~0u : (1u<<bits)-1;
+			const deUint32	ref		= (base & ~(mask<<offset)) | ((insert & mask)<<offset);
+
+			if ((out&cmpMask) != (ref&cmpMask))
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class BitfieldInsertCase : public IntegerFunctionCase
+{
+public:
+	BitfieldInsertCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "bitfieldInsert", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("base", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("insert", glu::VarType(baseType, precision)));
+		m_spec.inputs.push_back(Symbol("offset", glu::VarType(glu::TYPE_INT, precision)));
+		m_spec.inputs.push_back(Symbol("bits", glu::VarType(glu::TYPE_INT, precision)));
+		m_spec.outputs.push_back(Symbol("result", glu::VarType(baseType, precision)));
+		m_spec.source = "result = bitfieldInsert(base, insert, offset, bits);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new BitfieldInsertCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class BitfieldReverseCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	BitfieldReverseCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0xff23a4);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		deUint32*				inValue		= (deUint32*)values[0];
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inValue, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const deUint32			cmpMask			= reverseBits(getLowBitMask(integerLength));
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	value	= ((const deUint32*)inputs[0])[compNdx];
+			const deInt32	out		= ((const deUint32*)outputs[0])[compNdx];
+			const deUint32	ref		= reverseBits(value);
+
+			if ((out&cmpMask) != (ref&cmpMask))
+			{
+				m_failMsg << "Expected [" << compNdx << "] = " << tcu::toHex(ref);
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class BitfieldReverseCase : public IntegerFunctionCase
+{
+public:
+	BitfieldReverseCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "bitfieldReverse", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("value", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("result", glu::VarType(baseType, glu::PRECISION_HIGHP)));
+		m_spec.source = "result = bitfieldReverse(value);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new BitfieldReverseCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class BitCountCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	BitCountCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0xab2cca4);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		deUint32*				inValue		= (deUint32*)values[0];
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inValue, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const deUint32			countMask		= getLowBitMask(integerLength);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	value	= ((const deUint32*)inputs[0])[compNdx];
+			const int		out		= ((const int*)outputs[0])[compNdx];
+			const int		minRef	= dePop32(value&countMask);
+			const int		maxRef	= dePop32(value);
+
+			if (!de::inRange(out, minRef, maxRef))
+			{
+				m_failMsg << "Expected [" << compNdx << "] in range [" << minRef << ", " << maxRef << "]";
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class BitCountCase : public IntegerFunctionCase
+{
+public:
+	BitCountCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "bitCount", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= vecSize == 1 ? glu::TYPE_INT : glu::getDataTypeIntVec(vecSize);
+
+		m_spec.inputs.push_back(Symbol("value", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("count", glu::VarType(intType, glu::PRECISION_MEDIUMP)));
+		m_spec.source = "count = bitCount(value);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new BitCountCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class FindLSBCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	FindLSBCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0x9923c2af);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		deUint32*				inValue		= (deUint32*)values[0];
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inValue, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+		const deUint32			mask			= getLowBitMask(integerLength);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	value	= ((const deUint32*)inputs[0])[compNdx];
+			const int		out		= ((const int*)outputs[0])[compNdx];
+			const int		minRef	= findLSB(value&mask);
+			const int		maxRef	= findLSB(value);
+
+			if (!de::inRange(out, minRef, maxRef))
+			{
+				m_failMsg << "Expected [" << compNdx << "] in range [" << minRef << ", " << maxRef << "]";
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class FindLSBCase : public IntegerFunctionCase
+{
+public:
+	FindLSBCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "findLSB", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= vecSize == 1 ? glu::TYPE_INT : glu::getDataTypeIntVec(vecSize);
+
+		m_spec.inputs.push_back(Symbol("value", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("lsb", glu::VarType(intType, glu::PRECISION_LOWP)));
+		m_spec.source = "lsb = findLSB(value);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new FindLSBCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+class findMSBCaseInstance : public IntegerFunctionTestInstance
+{
+public:
+	findMSBCaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, ShaderExecutor& executor, int numValues, const char* name)
+		: IntegerFunctionTestInstance	(context, shaderType, spec, executor, numValues, name)
+	{
+	}
+
+	void getInputValues (int numValues, void* const* values) const
+	{
+		de::Random				rnd			(deStringHash(m_name) ^ 0x742ac4e);
+		const glu::DataType		type		= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision	= m_spec.inputs[0].varType.getPrecision();
+		deUint32*				inValue		= (deUint32*)values[0];
+
+		generateRandomInputData(rnd, m_shaderType, type, precision, inValue, numValues);
+	}
+
+	bool compare (const void* const* inputs, const void* const* outputs)
+	{
+		const glu::DataType		type			= m_spec.inputs[0].varType.getBasicType();
+		const glu::Precision	precision		= m_spec.inputs[0].varType.getPrecision();
+		const bool				isSigned		= glu::isDataTypeIntOrIVec(type);
+		const int				scalarSize		= glu::getDataTypeScalarSize(type);
+		const int				integerLength	= getShaderUintBitCount(m_shaderType, precision);
+
+		for (int compNdx = 0; compNdx < scalarSize; compNdx++)
+		{
+			const deUint32	value	= ((const deUint32*)inputs[0])[compNdx];
+			const int		out		= ((const deInt32*)outputs[0])[compNdx];
+			const int		minRef	= isSigned ? findMSB(toPrecision(deInt32(value), integerLength))	: findMSB(toPrecision(value, integerLength));
+			const int		maxRef	= isSigned ? findMSB(deInt32(value))								: findMSB(value);
+
+			if (!de::inRange(out, minRef, maxRef))
+			{
+				m_failMsg << "Expected [" << compNdx << "] in range [" << minRef << ", " << maxRef << "]";
+				return false;
+			}
+		}
+
+		return true;
+	}
+};
+
+class findMSBCase : public IntegerFunctionCase
+{
+public:
+	findMSBCase (tcu::TestContext& testCtx, glu::DataType baseType, glu::Precision precision, glu::ShaderType shaderType)
+		: IntegerFunctionCase	(testCtx, getIntegerFuncCaseName(baseType, precision, shaderType).c_str(), "findMSB", shaderType)
+	{
+		const int			vecSize		= glu::getDataTypeScalarSize(baseType);
+		const glu::DataType	intType		= vecSize == 1 ? glu::TYPE_INT : glu::getDataTypeIntVec(vecSize);
+
+		m_spec.inputs.push_back(Symbol("value", glu::VarType(baseType, precision)));
+		m_spec.outputs.push_back(Symbol("msb", glu::VarType(intType, glu::PRECISION_LOWP)));
+		m_spec.source = "msb = findMSB(value);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new findMSBCaseInstance(ctx, m_shaderType, m_spec, *m_executor, m_numValues, getName());
+	}
+};
+
+ShaderIntegerFunctionTests::ShaderIntegerFunctionTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup	(testCtx, "integer", "Integer function tests")
+{
+}
+
+ShaderIntegerFunctionTests::~ShaderIntegerFunctionTests (void)
+{
+}
+
+void ShaderIntegerFunctionTests::init (void)
+{
+	enum
+	{
+		VS = (1<<glu::SHADERTYPE_VERTEX),
+		FS = (1<<glu::SHADERTYPE_FRAGMENT),
+		CS = (1<<glu::SHADERTYPE_COMPUTE),
+		GS = (1<<glu::SHADERTYPE_GEOMETRY),
+		TC = (1<<glu::SHADERTYPE_TESSELLATION_CONTROL),
+		TE = (1<<glu::SHADERTYPE_TESSELLATION_EVALUATION),
+
+		ALL_SHADERS = VS|TC|TE|GS|FS|CS
+	};
+
+	//																		Int?	Uint?	AllPrec?	Shaders
+	addFunctionCases<UaddCarryCase>				(this,	"uaddcarry",		false,	true,	true,		ALL_SHADERS);
+	addFunctionCases<UsubBorrowCase>			(this,	"usubborrow",		false,	true,	true,		ALL_SHADERS);
+	addFunctionCases<UmulExtendedCase>			(this,	"umulextended",		false,	true,	false,		ALL_SHADERS);
+	addFunctionCases<ImulExtendedCase>			(this,	"imulextended",		true,	false,	false,		ALL_SHADERS);
+	addFunctionCases<BitfieldExtractCase>		(this,	"bitfieldextract",	true,	true,	true,		ALL_SHADERS);
+	addFunctionCases<BitfieldInsertCase>		(this,	"bitfieldinsert",	true,	true,	true,		ALL_SHADERS);
+	addFunctionCases<BitfieldReverseCase>		(this,	"bitfieldreverse",	true,	true,	true,		ALL_SHADERS);
+	addFunctionCases<BitCountCase>				(this,	"bitcount",			true,	true,	true,		ALL_SHADERS);
+	addFunctionCases<FindLSBCase>				(this,	"findlsb",			true,	true,	true,		ALL_SHADERS);
+	addFunctionCases<findMSBCase>				(this,	"findMSB",			true,	true,	true,		ALL_SHADERS);
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.hpp
new file mode 100644
index 0000000..b59fa6d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderIntegerFunctionTests.hpp
@@ -0,0 +1,63 @@
+#ifndef _VKTSHADERINTEGERFUNCTIONTESTS_HPP
+#define _VKTSHADERINTEGERFUNCTIONTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Integer built-in function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+// ShaderIntegerFunctionTests
+
+class ShaderIntegerFunctionTests : public tcu::TestCaseGroup
+{
+public:
+										ShaderIntegerFunctionTests		(tcu::TestContext& testCtx);
+	virtual								~ShaderIntegerFunctionTests		(void);
+
+	virtual void						init							(void);
+
+private:
+										ShaderIntegerFunctionTests		(const ShaderIntegerFunctionTests&);		// not allowed!
+	ShaderIntegerFunctionTests&			operator=						(const ShaderIntegerFunctionTests&);		// not allowed!
+};
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADERINTEGERFUNCTIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.cpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.cpp
new file mode 100644
index 0000000..e35bad7
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.cpp
@@ -0,0 +1,1452 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Floating-point packing and unpacking function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderPackingFunctionTests.hpp"
+#include "vktShaderExecutor.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuFloat.hpp"
+#include "deRandom.hpp"
+#include "deMath.h"
+#include "deString.h"
+#include "deSharedPtr.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+using namespace shaderexecutor;
+
+using std::string;
+using tcu::TestLog;
+
+namespace
+{
+
+inline deUint32 getUlpDiff (float a, float b)
+{
+	const deUint32	aBits	= tcu::Float32(a).bits();
+	const deUint32	bBits	= tcu::Float32(b).bits();
+	return aBits > bBits ? aBits - bBits : bBits - aBits;
+}
+
+struct HexFloat
+{
+	const float value;
+	HexFloat (const float value_) : value(value_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const HexFloat& v)
+{
+	return str << v.value << " / " << tcu::toHex(tcu::Float32(v.value).bits());
+}
+
+static const char* getPrecisionPostfix (glu::Precision precision)
+{
+	static const char* s_postfix[] =
+	{
+		"_lowp",
+		"_mediump",
+		"_highp"
+	};
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_postfix) == glu::PRECISION_LAST);
+	DE_ASSERT(de::inBounds<int>(precision, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[precision];
+}
+
+static const char* getShaderTypePostfix (glu::ShaderType shaderType)
+{
+	static const char* s_postfix[] =
+	{
+		"_vertex",
+		"_fragment",
+		"_geometry",
+		"_tess_control",
+		"_tess_eval",
+		"_compute"
+	};
+	DE_ASSERT(de::inBounds<int>(shaderType, 0, DE_LENGTH_OF_ARRAY(s_postfix)));
+	return s_postfix[shaderType];
+}
+
+} // anonymous
+
+// ShaderPackingFunctionCase
+
+class ShaderPackingFunctionCase : public TestCase
+{
+public:
+										ShaderPackingFunctionCase			(tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType);
+										~ShaderPackingFunctionCase			(void);
+
+	virtual	void						initPrograms						(vk::SourceCollections& programCollection) const
+										{
+											m_executor->setShaderSources(programCollection);
+										}
+	virtual TestInstance*				createInstance						(Context& context) const = 0;
+	void								init								(void);
+
+protected:
+	const glu::ShaderType				m_shaderType;
+	ShaderSpec							m_spec;
+	de::SharedPtr<ShaderExecutor>		m_executor;
+
+private:
+										ShaderPackingFunctionCase			(const ShaderPackingFunctionCase& other);
+	ShaderPackingFunctionCase&			operator=							(const ShaderPackingFunctionCase& other);
+};
+
+ShaderPackingFunctionCase::ShaderPackingFunctionCase (tcu::TestContext& testCtx, const char* name, const char* description, glu::ShaderType shaderType)
+	: TestCase		(testCtx, name, description)
+	, m_shaderType	(shaderType)
+	, m_executor	(DE_NULL)
+{
+}
+
+ShaderPackingFunctionCase::~ShaderPackingFunctionCase (void)
+{
+}
+
+void ShaderPackingFunctionCase::init (void)
+{
+	DE_ASSERT(!m_executor);
+
+	m_executor = de::SharedPtr<ShaderExecutor>(createExecutor(m_shaderType, m_spec));
+	m_testCtx.getLog() << *m_executor;
+}
+
+// ShaderPackingFunctionTestInstance
+
+class ShaderPackingFunctionTestInstance : public TestInstance
+{
+public:
+										ShaderPackingFunctionTestInstance	(Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+											: TestInstance	(context)
+											, m_testCtx		(context.getTestContext())
+											, m_shaderType	(shaderType)
+											, m_spec		(spec)
+											, m_name		(name)
+											, m_executor	(executor)
+										{
+										}
+	virtual tcu::TestStatus				iterate								(void) = 0;
+protected:
+	tcu::TestContext&					m_testCtx;
+	const glu::ShaderType				m_shaderType;
+	ShaderSpec							m_spec;
+	const char*							m_name;
+	de::SharedPtr<ShaderExecutor>		m_executor;
+};
+
+// Test cases
+
+class PackSnorm2x16CaseInstance: public ShaderPackingFunctionTestInstance
+{
+public:
+	PackSnorm2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, glu::Precision precision, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+		, m_precision						(precision)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<tcu::Vec2>		inputs;
+		std::vector<deUint32>		outputs;
+		const int					maxDiff		= m_precision == glu::PRECISION_HIGHP	? 1		:		// Rounding only.
+												  m_precision == glu::PRECISION_MEDIUMP	? 33	:		// (2^-10) * (2^15) + 1
+												  m_precision == glu::PRECISION_LOWP	? 129	: 0;	// (2^-8) * (2^15) + 1
+
+		// Special values to check.
+		inputs.push_back(tcu::Vec2(0.0f, 0.0f));
+		inputs.push_back(tcu::Vec2(-1.0f, 1.0f));
+		inputs.push_back(tcu::Vec2(0.5f, -0.5f));
+		inputs.push_back(tcu::Vec2(-1.5f, 1.5f));
+		inputs.push_back(tcu::Vec2(0.25f, -0.75f));
+
+		// Random values, mostly in range.
+		for (int ndx = 0; ndx < 15; ndx++)
+		{
+			const float x = rnd.getFloat()*2.5f - 1.25f;
+			const float y = rnd.getFloat()*2.5f - 1.25f;
+			inputs.push_back(tcu::Vec2(x, y));
+		}
+
+		// Large random values.
+		for (int ndx = 0; ndx < 80; ndx++)
+		{
+			const float x = rnd.getFloat()*1e6f - 0.5e6f;
+			const float y = rnd.getFloat()*1e6f - 0.5e6f;
+			inputs.push_back(tcu::Vec2(x, y));
+		}
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < numValues; valNdx++)
+			{
+				const deUint16	ref0	= (deUint16)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].x(), -1.0f, 1.0f) * 32767.0f), -(1<<15), (1<<15)-1);
+				const deUint16	ref1	= (deUint16)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].y(), -1.0f, 1.0f) * 32767.0f), -(1<<15), (1<<15)-1);
+				const deUint32	ref		= (ref1 << 16) | ref0;
+				const deUint32	res		= outputs[valNdx];
+				const deUint16	res0	= (deUint16)(res & 0xffff);
+				const deUint16	res1	= (deUint16)(res >> 16);
+				const int		diff0	= de::abs((int)ref0 - (int)res0);
+				const int		diff1	= de::abs((int)ref1 - (int)res1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx
+															   << ", expected packSnorm2x16(" << inputs[valNdx] << ") = " << tcu::toHex(ref)
+															   << ", got " << tcu::toHex(res)
+															   << "\n  diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class PackSnorm2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	PackSnorm2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType, glu::Precision precision)
+		: ShaderPackingFunctionCase	(testCtx, (string("packsnorm2x16") + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType)).c_str(), "packSnorm2x16", shaderType)
+		, m_precision				(precision)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_FLOAT_VEC2, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = packSnorm2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new PackSnorm2x16CaseInstance(ctx, m_shaderType, m_spec, m_precision, m_executor, getName());
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class UnpackSnorm2x16CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	UnpackSnorm2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+	: ShaderPackingFunctionTestInstance (context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const deUint32				maxDiff		= 1; // Rounding error.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<deUint32>		inputs;
+		std::vector<tcu::Vec2>		outputs;
+
+		inputs.push_back(0x00000000u);
+		inputs.push_back(0x7fff8000u);
+		inputs.push_back(0x80007fffu);
+		inputs.push_back(0xffffffffu);
+		inputs.push_back(0x0001fffeu);
+
+		// Random values.
+		for (int ndx = 0; ndx < 95; ndx++)
+			inputs.push_back(rnd.getUint32());
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deInt16	in0			= (deInt16)(deUint16)(inputs[valNdx] & 0xffff);
+				const deInt16	in1			= (deInt16)(deUint16)(inputs[valNdx] >> 16);
+				const float		ref0		= de::clamp(float(in0) / 32767.f, -1.0f, 1.0f);
+				const float		ref1		= de::clamp(float(in1) / 32767.f, -1.0f, 1.0f);
+				const float		res0		= outputs[valNdx].x();
+				const float		res1		= outputs[valNdx].y();
+
+				const deUint32	diff0	= getUlpDiff(ref0, res0);
+				const deUint32	diff1	= getUlpDiff(ref1, res1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx << ",\n"
+															   << "  expected unpackSnorm2x16(" << tcu::toHex(inputs[valNdx]) << ") = "
+															   << "vec2(" << HexFloat(ref0) << ", " << HexFloat(ref1) << ")"
+															   << ", got vec2(" << HexFloat(res0) << ", " << HexFloat(res1) << ")"
+															   << "\n  ULP diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+class UnpackSnorm2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	UnpackSnorm2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase	(testCtx, (string("unpacksnorm2x16") + getShaderTypePostfix(shaderType)).c_str(), "unpackSnorm2x16", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = unpackSnorm2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UnpackSnorm2x16CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+};
+
+class PackUnorm2x16CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	PackUnorm2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, glu::Precision precision, de::SharedPtr<ShaderExecutor> executor, const char* name)
+	: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+	, m_precision						(precision)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<tcu::Vec2>		inputs;
+		std::vector<deUint32>		outputs;
+		const int					maxDiff		= m_precision == glu::PRECISION_HIGHP	? 1		:		// Rounding only.
+												  m_precision == glu::PRECISION_MEDIUMP	? 65	:		// (2^-10) * (2^16) + 1
+												  m_precision == glu::PRECISION_LOWP	? 257	: 0;	// (2^-8) * (2^16) + 1
+
+		// Special values to check.
+		inputs.push_back(tcu::Vec2(0.0f, 0.0f));
+		inputs.push_back(tcu::Vec2(0.5f, 1.0f));
+		inputs.push_back(tcu::Vec2(1.0f, 0.5f));
+		inputs.push_back(tcu::Vec2(-0.5f, 1.5f));
+		inputs.push_back(tcu::Vec2(0.25f, 0.75f));
+
+		// Random values, mostly in range.
+		for (int ndx = 0; ndx < 15; ndx++)
+		{
+			const float x = rnd.getFloat()*1.25f;
+			const float y = rnd.getFloat()*1.25f;
+			inputs.push_back(tcu::Vec2(x, y));
+		}
+
+		// Large random values.
+		for (int ndx = 0; ndx < 80; ndx++)
+		{
+			const float x = rnd.getFloat()*1e6f - 1e5f;
+			const float y = rnd.getFloat()*1e6f - 1e5f;
+			inputs.push_back(tcu::Vec2(x, y));
+		}
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint16	ref0	= (deUint16)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].x(), 0.0f, 1.0f) * 65535.0f), 0, (1<<16)-1);
+				const deUint16	ref1	= (deUint16)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].y(), 0.0f, 1.0f) * 65535.0f), 0, (1<<16)-1);
+				const deUint32	ref		= (ref1 << 16) | ref0;
+				const deUint32	res		= outputs[valNdx];
+				const deUint16	res0	= (deUint16)(res & 0xffff);
+				const deUint16	res1	= (deUint16)(res >> 16);
+				const int		diff0	= de::abs((int)ref0 - (int)res0);
+				const int		diff1	= de::abs((int)ref1 - (int)res1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx
+															   << ", expected packUnorm2x16(" << inputs[valNdx] << ") = " << tcu::toHex(ref)
+															   << ", got " << tcu::toHex(res)
+															   << "\n  diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class PackUnorm2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	PackUnorm2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType, glu::Precision precision)
+		: ShaderPackingFunctionCase	(testCtx, (string("packunorm2x16") + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType)).c_str(), "packUnorm2x16", shaderType)
+		, m_precision				(precision)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_FLOAT_VEC2, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = packUnorm2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new PackUnorm2x16CaseInstance(ctx, m_shaderType, m_spec, m_precision, m_executor, getName());
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class UnpackUnorm2x16CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	UnpackUnorm2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance (context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const deUint32				maxDiff		= 1; // Rounding error.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<deUint32>		inputs;
+		std::vector<tcu::Vec2>		outputs;
+
+		inputs.push_back(0x00000000u);
+		inputs.push_back(0x7fff8000u);
+		inputs.push_back(0x80007fffu);
+		inputs.push_back(0xffffffffu);
+		inputs.push_back(0x0001fffeu);
+
+		// Random values.
+		for (int ndx = 0; ndx < 95; ndx++)
+			inputs.push_back(rnd.getUint32());
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint16	in0			= (deUint16)(inputs[valNdx] & 0xffff);
+				const deUint16	in1			= (deUint16)(inputs[valNdx] >> 16);
+				const float		ref0		= float(in0) / 65535.0f;
+				const float		ref1		= float(in1) / 65535.0f;
+				const float		res0		= outputs[valNdx].x();
+				const float		res1		= outputs[valNdx].y();
+
+				const deUint32	diff0		= getUlpDiff(ref0, res0);
+				const deUint32	diff1		= getUlpDiff(ref1, res1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx << ",\n"
+															   << "  expected unpackUnorm2x16(" << tcu::toHex(inputs[valNdx]) << ") = "
+															   << "vec2(" << HexFloat(ref0) << ", " << HexFloat(ref1) << ")"
+															   << ", got vec2(" << HexFloat(res0) << ", " << HexFloat(res1) << ")"
+															   << "\n  ULP diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+
+class UnpackUnorm2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	UnpackUnorm2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase(testCtx, (string("unpackunorm2x16") + getShaderTypePostfix(shaderType)).c_str(), "unpackUnorm2x16", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = unpackUnorm2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UnpackUnorm2x16CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+
+};
+
+class PackHalf2x16CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	PackHalf2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+	: ShaderPackingFunctionTestInstance (context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const int					maxDiff		= 0; // Values can be represented exactly in mediump.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<tcu::Vec2>		inputs;
+		std::vector<deUint32>		outputs;
+
+		// Special values to check.
+		inputs.push_back(tcu::Vec2(0.0f, 0.0f));
+		inputs.push_back(tcu::Vec2(0.5f, 1.0f));
+		inputs.push_back(tcu::Vec2(1.0f, 0.5f));
+		inputs.push_back(tcu::Vec2(-0.5f, 1.5f));
+		inputs.push_back(tcu::Vec2(0.25f, 0.75f));
+
+		// Random values.
+		{
+			const int	minExp	= -14;
+			const int	maxExp	= 15;
+
+			for (int ndx = 0; ndx < 95; ndx++)
+			{
+				tcu::Vec2 v;
+				for (int c = 0; c < 2; c++)
+				{
+					const int		s			= rnd.getBool() ? 1 : -1;
+					const int		exp			= rnd.getInt(minExp, maxExp);
+					const deUint32	mantissa	= rnd.getUint32() & ((1<<23)-1);
+
+					v[c] = tcu::Float32::construct(s, exp ? exp : 1 /* avoid denormals */, (1u<<23) | mantissa).asFloat();
+				}
+				inputs.push_back(v);
+			}
+		}
+
+		// Convert input values to fp16 and back to make sure they can be represented exactly in mediump.
+		for (std::vector<tcu::Vec2>::iterator inVal = inputs.begin(); inVal != inputs.end(); ++inVal)
+			*inVal = tcu::Vec2(tcu::Float16(inVal->x()).asFloat(), tcu::Float16(inVal->y()).asFloat());
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint16	ref0	= (deUint16)tcu::Float16(inputs[valNdx].x()).bits();
+				const deUint16	ref1	= (deUint16)tcu::Float16(inputs[valNdx].y()).bits();
+				const deUint32	ref		= (ref1 << 16) | ref0;
+				const deUint32	res		= outputs[valNdx];
+				const deUint16	res0	= (deUint16)(res & 0xffff);
+				const deUint16	res1	= (deUint16)(res >> 16);
+				const int		diff0	= de::abs((int)ref0 - (int)res0);
+				const int		diff1	= de::abs((int)ref1 - (int)res1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx
+															   << ", expected packHalf2x16(" << inputs[valNdx] << ") = " << tcu::toHex(ref)
+															   << ", got " << tcu::toHex(res)
+															   << "\n  diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+class PackHalf2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	PackHalf2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase	(testCtx, (string("packhalf2x16") + getShaderTypePostfix(shaderType)).c_str(), "packHalf2x16", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = packHalf2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new PackHalf2x16CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+
+};
+
+class UnpackHalf2x16CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	UnpackHalf2x16CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+	: ShaderPackingFunctionTestInstance (context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const int					maxDiff		= 0; // All bits must be accurate.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<deUint32>		inputs;
+		std::vector<tcu::Vec2>		outputs;
+
+		// Special values.
+		inputs.push_back((tcu::Float16( 0.0f).bits() << 16) | tcu::Float16( 1.0f).bits());
+		inputs.push_back((tcu::Float16( 1.0f).bits() << 16) | tcu::Float16( 0.0f).bits());
+		inputs.push_back((tcu::Float16(-1.0f).bits() << 16) | tcu::Float16( 0.5f).bits());
+		inputs.push_back((tcu::Float16( 0.5f).bits() << 16) | tcu::Float16(-0.5f).bits());
+
+		// Construct random values.
+		{
+			const int	minExp		= -14;
+			const int	maxExp		= 15;
+			const int	mantBits	= 10;
+
+			for (int ndx = 0; ndx < 96; ndx++)
+			{
+				deUint32 inVal = 0;
+				for (int c = 0; c < 2; c++)
+				{
+					const int		s			= rnd.getBool() ? 1 : -1;
+					const int		exp			= rnd.getInt(minExp, maxExp);
+					const deUint32	mantissa	= rnd.getUint32() & ((1<<mantBits)-1);
+					const deUint16	value		= tcu::Float16::construct(s, exp ? exp : 1 /* avoid denorm */, (deUint16)((1u<<10) | mantissa)).bits();
+
+					inVal |= value << (16*c);
+				}
+				inputs.push_back(inVal);
+			}
+		}
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint16	in0			= (deUint16)(inputs[valNdx] & 0xffff);
+				const deUint16	in1			= (deUint16)(inputs[valNdx] >> 16);
+				const float		ref0		= tcu::Float16(in0).asFloat();
+				const float		ref1		= tcu::Float16(in1).asFloat();
+				const float		res0		= outputs[valNdx].x();
+				const float		res1		= outputs[valNdx].y();
+
+				const deUint32	refBits0	= tcu::Float32(ref0).bits();
+				const deUint32	refBits1	= tcu::Float32(ref1).bits();
+				const deUint32	resBits0	= tcu::Float32(res0).bits();
+				const deUint32	resBits1	= tcu::Float32(res1).bits();
+
+				const int		diff0	= de::abs((int)refBits0 - (int)resBits0);
+				const int		diff1	= de::abs((int)refBits1 - (int)resBits1);
+
+				if (diff0 > maxDiff || diff1 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx << ",\n"
+															   << "  expected unpackHalf2x16(" << tcu::toHex(inputs[valNdx]) << ") = "
+															   << "vec2(" << ref0 << " / " << tcu::toHex(refBits0) << ", " << ref1 << " / " << tcu::toHex(refBits1) << ")"
+															   << ", got vec2(" << res0 << " / " << tcu::toHex(resBits0) << ", " << res1 << " / " << tcu::toHex(resBits1) << ")"
+															   << "\n  ULP diffs = (" << diff0 << ", " << diff1 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+class UnpackHalf2x16Case : public ShaderPackingFunctionCase
+{
+public:
+	UnpackHalf2x16Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase	(testCtx, (string("unpackhalf2x16") + getShaderTypePostfix(shaderType)).c_str(), "unpackHalf2x16", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_MEDIUMP)));
+
+		m_spec.source = "out0 = unpackHalf2x16(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UnpackHalf2x16CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+
+};
+
+class PackSnorm4x8CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	PackSnorm4x8CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, glu::Precision precision, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+		, m_precision						(precision)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		de::Random					rnd			(deStringHash(m_name) ^ 0x42f2c0);
+		std::vector<tcu::Vec4>		inputs;
+		std::vector<deUint32>		outputs;
+		const int					maxDiff		= m_precision == glu::PRECISION_HIGHP	? 1	:		// Rounding only.
+												  m_precision == glu::PRECISION_MEDIUMP	? 1	:		// (2^-10) * (2^7) + 1
+												  m_precision == glu::PRECISION_LOWP	? 2	: 0;	// (2^-8) * (2^7) + 1
+
+		// Special values to check.
+		inputs.push_back(tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f));
+		inputs.push_back(tcu::Vec4(-1.0f, 1.0f, -1.0f, 1.0f));
+		inputs.push_back(tcu::Vec4(0.5f, -0.5f, -0.5f, 0.5f));
+		inputs.push_back(tcu::Vec4(-1.5f, 1.5f, -1.5f, 1.5f));
+		inputs.push_back(tcu::Vec4(0.25f, -0.75f, -0.25f, 0.75f));
+
+		// Random values, mostly in range.
+		for (int ndx = 0; ndx < 15; ndx++)
+		{
+			const float x = rnd.getFloat()*2.5f - 1.25f;
+			const float y = rnd.getFloat()*2.5f - 1.25f;
+			const float z = rnd.getFloat()*2.5f - 1.25f;
+			const float w = rnd.getFloat()*2.5f - 1.25f;
+			inputs.push_back(tcu::Vec4(x, y, z, w));
+		}
+
+		// Large random values.
+		for (int ndx = 0; ndx < 80; ndx++)
+		{
+			const float x = rnd.getFloat()*1e6f - 0.5e6f;
+			const float y = rnd.getFloat()*1e6f - 0.5e6f;
+			const float z = rnd.getFloat()*1e6f - 0.5e6f;
+			const float w = rnd.getFloat()*1e6f - 0.5e6f;
+			inputs.push_back(tcu::Vec4(x, y, z, w));
+		}
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < numValues; valNdx++)
+			{
+				const deUint16	ref0	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].x(), -1.0f, 1.0f) * 127.0f), -(1<<7), (1<<7)-1);
+				const deUint16	ref1	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].y(), -1.0f, 1.0f) * 127.0f), -(1<<7), (1<<7)-1);
+				const deUint16	ref2	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].z(), -1.0f, 1.0f) * 127.0f), -(1<<7), (1<<7)-1);
+				const deUint16	ref3	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].w(), -1.0f, 1.0f) * 127.0f), -(1<<7), (1<<7)-1);
+				const deUint32	ref		= (deUint32(ref3) << 24) | (deUint32(ref2) << 16) | (deUint32(ref1) << 8) | deUint32(ref0);
+				const deUint32	res		= outputs[valNdx];
+				const deUint16	res0	= (deUint8)(res & 0xff);
+				const deUint16	res1	= (deUint8)((res >> 8) & 0xff);
+				const deUint16	res2	= (deUint8)((res >> 16) & 0xff);
+				const deUint16	res3	= (deUint8)((res >> 24) & 0xff);
+				const int		diff0	= de::abs((int)ref0 - (int)res0);
+				const int		diff1	= de::abs((int)ref1 - (int)res1);
+				const int		diff2	= de::abs((int)ref2 - (int)res2);
+				const int		diff3	= de::abs((int)ref3 - (int)res3);
+
+				if (diff0 > maxDiff || diff1 > maxDiff || diff2 > maxDiff || diff3 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx
+															   << ", expected packSnorm4x8(" << inputs[valNdx] << ") = " << tcu::toHex(ref)
+															   << ", got " << tcu::toHex(res)
+															   << "\n  diffs = " << tcu::IVec4(diff0, diff1, diff2, diff3) << ", max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class PackSnorm4x8Case : public ShaderPackingFunctionCase
+{
+public:
+	PackSnorm4x8Case (tcu::TestContext& testCtx, glu::ShaderType shaderType, glu::Precision precision)
+		: ShaderPackingFunctionCase	(testCtx, (string("packsnorm4x8") + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType)).c_str(), "packSnorm4x8", shaderType)
+		, m_precision				(precision)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_FLOAT_VEC4, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = packSnorm4x8(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new PackSnorm4x8CaseInstance(ctx, m_shaderType, m_spec, m_precision, m_executor, getName());
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class UnpackSnorm4x8CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	UnpackSnorm4x8CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const deUint32				maxDiff		= 1; // Rounding error.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<deUint32>		inputs;
+		std::vector<tcu::Vec4>		outputs;
+
+		inputs.push_back(0x00000000u);
+		inputs.push_back(0x7fff8000u);
+		inputs.push_back(0x80007fffu);
+		inputs.push_back(0xffffffffu);
+		inputs.push_back(0x0001fffeu);
+
+		// Random values.
+		for (int ndx = 0; ndx < 95; ndx++)
+			inputs.push_back(rnd.getUint32());
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deInt8	in0		= (deInt8)(deUint8)(inputs[valNdx] & 0xff);
+				const deInt8	in1		= (deInt8)(deUint8)((inputs[valNdx] >> 8) & 0xff);
+				const deInt8	in2		= (deInt8)(deUint8)((inputs[valNdx] >> 16) & 0xff);
+				const deInt8	in3		= (deInt8)(deUint8)(inputs[valNdx] >> 24);
+				const float		ref0	= de::clamp(float(in0) / 127.f, -1.0f, 1.0f);
+				const float		ref1	= de::clamp(float(in1) / 127.f, -1.0f, 1.0f);
+				const float		ref2	= de::clamp(float(in2) / 127.f, -1.0f, 1.0f);
+				const float		ref3	= de::clamp(float(in3) / 127.f, -1.0f, 1.0f);
+				const float		res0	= outputs[valNdx].x();
+				const float		res1	= outputs[valNdx].y();
+				const float		res2	= outputs[valNdx].z();
+				const float		res3	= outputs[valNdx].w();
+
+				const deUint32	diff0	= getUlpDiff(ref0, res0);
+				const deUint32	diff1	= getUlpDiff(ref1, res1);
+				const deUint32	diff2	= getUlpDiff(ref2, res2);
+				const deUint32	diff3	= getUlpDiff(ref3, res3);
+
+				if (diff0 > maxDiff || diff1 > maxDiff || diff2 > maxDiff || diff3 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx << ",\n"
+															   << "  expected unpackSnorm4x8(" << tcu::toHex(inputs[valNdx]) << ") = "
+															   << "vec4(" << HexFloat(ref0) << ", " << HexFloat(ref1) << ", " << HexFloat(ref2) << ", " << HexFloat(ref3) << ")"
+															   << ", got vec4(" << HexFloat(res0) << ", " << HexFloat(res1) << ", " << HexFloat(res2) << ", " << HexFloat(res3) << ")"
+															   << "\n  ULP diffs = (" << diff0 << ", " << diff1 << ", " << diff2 << ", " << diff3 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+
+class UnpackSnorm4x8Case : public ShaderPackingFunctionCase
+{
+public:
+	UnpackSnorm4x8Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase	(testCtx, (string("unpacksnorm4x8") + getShaderTypePostfix(shaderType)).c_str(), "unpackSnorm4x8", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = unpackSnorm4x8(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UnpackSnorm4x8CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+
+};
+
+class PackUnorm4x8CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	PackUnorm4x8CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, glu::Precision precision, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+		, m_precision						(precision)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<tcu::Vec4>		inputs;
+		std::vector<deUint32>		outputs;
+		const int					maxDiff		= m_precision == glu::PRECISION_HIGHP	? 1	:		// Rounding only.
+												  m_precision == glu::PRECISION_MEDIUMP	? 1	:		// (2^-10) * (2^8) + 1
+												  m_precision == glu::PRECISION_LOWP	? 2	: 0;	// (2^-8) * (2^8) + 1
+
+		// Special values to check.
+		inputs.push_back(tcu::Vec4(0.0f, 0.0f, 0.0f, 0.0f));
+		inputs.push_back(tcu::Vec4(-1.0f, 1.0f, -1.0f, 1.0f));
+		inputs.push_back(tcu::Vec4(0.5f, -0.5f, -0.5f, 0.5f));
+		inputs.push_back(tcu::Vec4(-1.5f, 1.5f, -1.5f, 1.5f));
+		inputs.push_back(tcu::Vec4(0.25f, -0.75f, -0.25f, 0.75f));
+
+		// Random values, mostly in range.
+		for (int ndx = 0; ndx < 15; ndx++)
+		{
+			const float x = rnd.getFloat()*1.25f - 0.125f;
+			const float y = rnd.getFloat()*1.25f - 0.125f;
+			const float z = rnd.getFloat()*1.25f - 0.125f;
+			const float w = rnd.getFloat()*1.25f - 0.125f;
+			inputs.push_back(tcu::Vec4(x, y, z, w));
+		}
+
+		// Large random values.
+		for (int ndx = 0; ndx < 80; ndx++)
+		{
+			const float x = rnd.getFloat()*1e6f - 1e5f;
+			const float y = rnd.getFloat()*1e6f - 1e5f;
+			const float z = rnd.getFloat()*1e6f - 1e5f;
+			const float w = rnd.getFloat()*1e6f - 1e5f;
+			inputs.push_back(tcu::Vec4(x, y, z, w));
+		}
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint16	ref0	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].x(), 0.0f, 1.0f) * 255.0f), 0, (1<<8)-1);
+				const deUint16	ref1	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].y(), 0.0f, 1.0f) * 255.0f), 0, (1<<8)-1);
+				const deUint16	ref2	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].z(), 0.0f, 1.0f) * 255.0f), 0, (1<<8)-1);
+				const deUint16	ref3	= (deUint8)de::clamp(deRoundFloatToInt32(de::clamp(inputs[valNdx].w(), 0.0f, 1.0f) * 255.0f), 0, (1<<8)-1);
+				const deUint32	ref		= (deUint32(ref3) << 24) | (deUint32(ref2) << 16) | (deUint32(ref1) << 8) | deUint32(ref0);
+				const deUint32	res		= outputs[valNdx];
+				const deUint16	res0	= (deUint8)(res & 0xff);
+				const deUint16	res1	= (deUint8)((res >> 8) & 0xff);
+				const deUint16	res2	= (deUint8)((res >> 16) & 0xff);
+				const deUint16	res3	= (deUint8)((res >> 24) & 0xff);
+				const int		diff0	= de::abs((int)ref0 - (int)res0);
+				const int		diff1	= de::abs((int)ref1 - (int)res1);
+				const int		diff2	= de::abs((int)ref2 - (int)res2);
+				const int		diff3	= de::abs((int)ref3 - (int)res3);
+
+				if (diff0 > maxDiff || diff1 > maxDiff || diff2 > maxDiff || diff3 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx
+															   << ", expected packUnorm4x8(" << inputs[valNdx] << ") = " << tcu::toHex(ref)
+															   << ", got " << tcu::toHex(res)
+															   << "\n  diffs = " << tcu::IVec4(diff0, diff1, diff2, diff3) << ", max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class PackUnorm4x8Case : public ShaderPackingFunctionCase
+{
+public:
+	PackUnorm4x8Case (tcu::TestContext& testCtx, glu::ShaderType shaderType, glu::Precision precision)
+		: ShaderPackingFunctionCase	(testCtx, (string("packunorm4x8") + getPrecisionPostfix(precision) + getShaderTypePostfix(shaderType)).c_str(), "packUnorm4x8", shaderType)
+		, m_precision				(precision)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_FLOAT_VEC4, precision)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = packUnorm4x8(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new PackUnorm4x8CaseInstance(ctx, m_shaderType, m_spec, m_precision, m_executor, getName());
+	}
+
+private:
+	const glu::Precision m_precision;
+};
+
+class UnpackUnorm4x8CaseInstance : public ShaderPackingFunctionTestInstance
+{
+public:
+	UnpackUnorm4x8CaseInstance (Context& context, glu::ShaderType shaderType, ShaderSpec spec, de::SharedPtr<ShaderExecutor> executor, const char* name)
+		: ShaderPackingFunctionTestInstance	(context, shaderType, spec, executor, name)
+	{
+	}
+
+	tcu::TestStatus iterate (void)
+	{
+		const deUint32				maxDiff		= 1; // Rounding error.
+		de::Random					rnd			(deStringHash(m_name) ^ 0x776002);
+		std::vector<deUint32>		inputs;
+		std::vector<tcu::Vec4>		outputs;
+
+		inputs.push_back(0x00000000u);
+		inputs.push_back(0x7fff8000u);
+		inputs.push_back(0x80007fffu);
+		inputs.push_back(0xffffffffu);
+		inputs.push_back(0x0001fffeu);
+
+		// Random values.
+		for (int ndx = 0; ndx < 95; ndx++)
+			inputs.push_back(rnd.getUint32());
+
+		outputs.resize(inputs.size());
+
+		m_testCtx.getLog() << TestLog::Message << "Executing shader for " << inputs.size() << " input values" << tcu::TestLog::EndMessage;
+
+		{
+			const void*	in	= &inputs[0];
+			void*		out	= &outputs[0];
+
+			m_executor->execute(m_context, (int)inputs.size(), &in, &out);
+		}
+
+		// Verify
+		{
+			const int	numValues	= (int)inputs.size();
+			const int	maxPrints	= 10;
+			int			numFailed	= 0;
+
+			for (int valNdx = 0; valNdx < (int)inputs.size(); valNdx++)
+			{
+				const deUint8	in0		= (deUint8)(inputs[valNdx] & 0xff);
+				const deUint8	in1		= (deUint8)((inputs[valNdx] >> 8) & 0xff);
+				const deUint8	in2		= (deUint8)((inputs[valNdx] >> 16) & 0xff);
+				const deUint8	in3		= (deUint8)(inputs[valNdx] >> 24);
+				const float		ref0	= de::clamp(float(in0) / 255.f, 0.0f, 1.0f);
+				const float		ref1	= de::clamp(float(in1) / 255.f, 0.0f, 1.0f);
+				const float		ref2	= de::clamp(float(in2) / 255.f, 0.0f, 1.0f);
+				const float		ref3	= de::clamp(float(in3) / 255.f, 0.0f, 1.0f);
+				const float		res0	= outputs[valNdx].x();
+				const float		res1	= outputs[valNdx].y();
+				const float		res2	= outputs[valNdx].z();
+				const float		res3	= outputs[valNdx].w();
+
+				const deUint32	diff0	= getUlpDiff(ref0, res0);
+				const deUint32	diff1	= getUlpDiff(ref1, res1);
+				const deUint32	diff2	= getUlpDiff(ref2, res2);
+				const deUint32	diff3	= getUlpDiff(ref3, res3);
+
+				if (diff0 > maxDiff || diff1 > maxDiff || diff2 > maxDiff || diff3 > maxDiff)
+				{
+					if (numFailed < maxPrints)
+					{
+						m_testCtx.getLog() << TestLog::Message << "ERROR: Mismatch in value " << valNdx << ",\n"
+															   << "  expected unpackUnorm4x8(" << tcu::toHex(inputs[valNdx]) << ") = "
+															   << "vec4(" << HexFloat(ref0) << ", " << HexFloat(ref1) << ", " << HexFloat(ref2) << ", " << HexFloat(ref3) << ")"
+															   << ", got vec4(" << HexFloat(res0) << ", " << HexFloat(res1) << ", " << HexFloat(res2) << ", " << HexFloat(res3) << ")"
+															   << "\n  ULP diffs = (" << diff0 << ", " << diff1 << ", " << diff2 << ", " << diff3 << "), max diff = " << maxDiff
+										   << TestLog::EndMessage;
+					}
+					else if (numFailed == maxPrints)
+						m_testCtx.getLog() << TestLog::Message << "..." << TestLog::EndMessage;
+
+					numFailed += 1;
+				}
+			}
+
+			m_testCtx.getLog() << TestLog::Message << (numValues - numFailed) << " / " << numValues << " values passed" << TestLog::EndMessage;
+
+			if (numFailed == 0)
+				return tcu::TestStatus::pass("Pass");
+			else
+				return tcu::TestStatus::fail("Result comparison failed");
+
+		}
+	}
+};
+
+class UnpackUnorm4x8Case : public ShaderPackingFunctionCase
+{
+public:
+	UnpackUnorm4x8Case (tcu::TestContext& testCtx, glu::ShaderType shaderType)
+		: ShaderPackingFunctionCase	(testCtx, (string("unpackunorm4x8") + getShaderTypePostfix(shaderType)).c_str(), "unpackUnorm4x8", shaderType)
+	{
+		m_spec.inputs.push_back(Symbol("in0", glu::VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP)));
+		m_spec.outputs.push_back(Symbol("out0", glu::VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP)));
+
+		m_spec.source = "out0 = unpackUnorm4x8(in0);";
+		init();
+	}
+
+	TestInstance* createInstance (Context& ctx) const
+	{
+		return new UnpackUnorm4x8CaseInstance(ctx, m_shaderType, m_spec, m_executor, getName());
+	}
+
+};
+
+ShaderPackingFunctionTests::ShaderPackingFunctionTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup	(testCtx, "pack_unpack", "Floating-point pack and unpack function tests")
+{
+}
+
+ShaderPackingFunctionTests::~ShaderPackingFunctionTests (void)
+{
+}
+
+void ShaderPackingFunctionTests::init (void)
+{
+	// New built-in functions in GLES 3.1
+	{
+		const glu::ShaderType allShaderTypes[] =
+		{
+			glu::SHADERTYPE_VERTEX,
+			glu::SHADERTYPE_TESSELLATION_CONTROL,
+			glu::SHADERTYPE_TESSELLATION_EVALUATION,
+			glu::SHADERTYPE_GEOMETRY,
+			glu::SHADERTYPE_FRAGMENT,
+			glu::SHADERTYPE_COMPUTE
+		};
+
+		// packSnorm4x8
+		for (int prec = glu::PRECISION_MEDIUMP; prec < glu::PRECISION_LAST; prec++)
+		{
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(allShaderTypes); shaderTypeNdx++)
+				addChild(new PackSnorm4x8Case(m_testCtx, allShaderTypes[shaderTypeNdx], glu::Precision(prec)));
+		}
+
+		// unpackSnorm4x8
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(allShaderTypes); shaderTypeNdx++)
+			addChild(new UnpackSnorm4x8Case(m_testCtx, allShaderTypes[shaderTypeNdx]));
+
+		// packUnorm4x8
+		for (int prec = glu::PRECISION_MEDIUMP; prec < glu::PRECISION_LAST; prec++)
+		{
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(allShaderTypes); shaderTypeNdx++)
+				addChild(new PackUnorm4x8Case(m_testCtx, allShaderTypes[shaderTypeNdx], glu::Precision(prec)));
+		}
+
+		// unpackUnorm4x8
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(allShaderTypes); shaderTypeNdx++)
+			addChild(new UnpackUnorm4x8Case(m_testCtx, allShaderTypes[shaderTypeNdx]));
+	}
+
+	// GLES 3 functions in new shader types.
+	{
+		const glu::ShaderType newShaderTypes[] =
+		{
+			glu::SHADERTYPE_GEOMETRY,
+			glu::SHADERTYPE_COMPUTE
+		};
+
+		// packSnorm2x16
+		for (int prec = glu::PRECISION_MEDIUMP; prec < glu::PRECISION_LAST; prec++)
+		{
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+				addChild(new PackSnorm2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx], glu::Precision(prec)));
+		}
+
+		// unpackSnorm2x16
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+			addChild(new UnpackSnorm2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx]));
+
+		// packUnorm2x16
+		for (int prec = glu::PRECISION_MEDIUMP; prec < glu::PRECISION_LAST; prec++)
+		{
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+				addChild(new PackUnorm2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx], glu::Precision(prec)));
+		}
+
+		// unpackUnorm2x16
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+			addChild(new UnpackUnorm2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx]));
+
+		// packHalf2x16
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+			addChild(new PackHalf2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx]));
+
+		// unpackHalf2x16
+		for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(newShaderTypes); shaderTypeNdx++)
+			addChild(new UnpackHalf2x16Case(m_testCtx, newShaderTypes[shaderTypeNdx]));
+	}
+}
+
+} // shaderexecutor
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.hpp b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.hpp
new file mode 100644
index 0000000..61afedb
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderexecutor/vktShaderPackingFunctionTests.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTSHADERPACKINGFUNCTIONTESTS_HPP
+#define _VKTSHADERPACKINGFUNCTIONTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Floating-point packing and unpacking function tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace shaderexecutor
+{
+
+class ShaderPackingFunctionTests : public tcu::TestCaseGroup
+{
+public:
+									ShaderPackingFunctionTests				(tcu::TestContext& testCtx);
+	virtual							~ShaderPackingFunctionTests				(void);
+
+	virtual void					init									(void);
+
+private:
+									ShaderPackingFunctionTests				(const ShaderPackingFunctionTests&);		// not allowed!
+	ShaderPackingFunctionTests&		operator=								(const ShaderPackingFunctionTests&);		// not allowed!
+};
+
+} // shaderexecutor
+} // vkt
+
+#endif // _VKTSHADERPACKINGFUNCTIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/CMakeLists.txt b/external/vulkancts/modules/vulkan/shaderrender/CMakeLists.txt
new file mode 100644
index 0000000..2a697a1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/CMakeLists.txt
@@ -0,0 +1,33 @@
+include_directories(
+	..
+	)
+
+set(DEQP_VK_SHADERRENDER_SRCS
+	vktShaderRender.cpp
+	vktShaderRender.hpp
+	vktShaderRenderDiscardTests.cpp
+	vktShaderRenderDiscardTests.hpp
+	vktShaderRenderIndexingTests.cpp
+	vktShaderRenderIndexingTests.hpp
+	vktShaderRenderLoopTests.cpp
+	vktShaderRenderLoopTests.hpp
+	vktShaderRenderMatrixTests.cpp
+	vktShaderRenderMatrixTests.hpp
+	vktShaderRenderOperatorTests.cpp
+	vktShaderRenderOperatorTests.hpp
+	vktShaderRenderReturnTests.cpp
+	vktShaderRenderReturnTests.hpp
+	vktShaderRenderStructTests.cpp
+	vktShaderRenderStructTests.hpp
+	vktShaderRenderSwitchTests.cpp
+	vktShaderRenderSwitchTests.hpp
+	)
+
+set(DEQP_VK_SHADERRENDER_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-shaderrender STATIC ${DEQP_VK_SHADERRENDER_SRCS})
+target_link_libraries(deqp-vk-shaderrender ${DEQP_VK_SHADERRENDER_LIBS})
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.cpp
new file mode 100644
index 0000000..6a23b5f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.cpp
@@ -0,0 +1,1843 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan ShaderRenderCase
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRender.hpp"
+
+#include "tcuImageCompare.hpp"
+#include "tcuImageIO.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuSurface.hpp"
+#include "tcuVector.hpp"
+
+#include "deFilePath.hpp"
+#include "deMath.h"
+#include "deUniquePtr.hpp"
+
+#include "vkDeviceUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkStrUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include <vector>
+#include <string>
+
+namespace vkt
+{
+namespace sr
+{
+
+using namespace vk;
+
+namespace
+{
+
+static const int		GRID_SIZE			= 2;
+static const deUint32	MAX_RENDER_WIDTH	= 128;
+static const deUint32	MAX_RENDER_HEIGHT	= 128;
+static const tcu::Vec4	DEFAULT_CLEAR_COLOR	= tcu::Vec4(0.125f, 0.25f, 0.5f, 1.0f);
+
+static bool isSupportedLinearTilingFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	VkFormatProperties formatProps;
+
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.linearTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0u;
+}
+
+static bool isSupportedOptimalTilingFormat (const InstanceInterface& instanceInterface, VkPhysicalDevice device, VkFormat format)
+{
+	VkFormatProperties formatProps;
+
+	instanceInterface.getPhysicalDeviceFormatProperties(device, format, &formatProps);
+
+	return (formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) != 0u;
+}
+
+static VkImageMemoryBarrier createImageMemoryBarrier (const VkImage&	image,
+													  VkAccessFlags		srcAccessMask,
+													  VkAccessFlags		dstAccessMask,
+													  VkImageLayout		oldLayout,
+													  VkImageLayout		newLayout)
+{
+	VkImageMemoryBarrier imageMemoryBarrier	=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType				sType;
+		DE_NULL,									// const void*					pNext;
+		srcAccessMask,								// VkAccessFlags				srcAccessMask;
+		dstAccessMask,								// VkAccessFlags				dstAccessMask;
+		oldLayout,									// VkImageLayout				oldLayout;
+		newLayout,									// VkImageLayout				newLayout;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32						srcQueueFamilyIndex;
+		VK_QUEUE_FAMILY_IGNORED,					// deUint32						dstQueueFamilyIndex;
+		image,										// VkImage						image;
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+			0,							// deUint32				baseMipLevel;
+			1,							// deUint32				mipLevels;
+			0,							// deUint32				baseArrayLayer;
+			1							// deUint32				arraySize;
+		}											// VkImageSubresourceRange		subresourceRange;
+	};
+	return imageMemoryBarrier;
+}
+
+} // anonymous
+
+// QuadGrid.
+
+class QuadGrid
+{
+public:
+											QuadGrid				(int									gridSize,
+																	 int									screenWidth,
+																	 int									screenHeight,
+																	 const tcu::Vec4&						constCoords,
+																	 const std::vector<tcu::Mat4>&			userAttribTransforms,
+																	 const std::vector<TextureBindingSp>&	textures);
+											~QuadGrid				(void);
+
+	int										getGridSize				(void) const { return m_gridSize; }
+	int										getNumVertices			(void) const { return m_numVertices; }
+	int										getNumTriangles			(void) const { return m_numTriangles; }
+	const tcu::Vec4&						getConstCoords			(void) const { return m_constCoords; }
+	const std::vector<tcu::Mat4>			getUserAttribTransforms	(void) const { return m_userAttribTransforms; }
+	const std::vector<TextureBindingSp>&	getTextures				(void) const { return m_textures; }
+
+	const tcu::Vec4*						getPositions			(void) const { return &m_positions[0]; }
+	const float*							getAttribOne			(void) const { return &m_attribOne[0]; }
+	const tcu::Vec4*						getCoords				(void) const { return &m_coords[0]; }
+	const tcu::Vec4*						getUnitCoords			(void) const { return &m_unitCoords[0]; }
+
+	const tcu::Vec4*						getUserAttrib			(int attribNdx) const { return &m_userAttribs[attribNdx][0]; }
+	const deUint16*							getIndices				(void) const { return &m_indices[0]; }
+
+	tcu::Vec4								getCoords				(float sx, float sy) const;
+	tcu::Vec4								getUnitCoords			(float sx, float sy) const;
+
+	int										getNumUserAttribs		(void) const { return (int)m_userAttribTransforms.size(); }
+	tcu::Vec4								getUserAttrib			(int attribNdx, float sx, float sy) const;
+
+private:
+	const int								m_gridSize;
+	const int								m_numVertices;
+	const int								m_numTriangles;
+	const tcu::Vec4							m_constCoords;
+	const std::vector<tcu::Mat4>			m_userAttribTransforms;
+
+	const std::vector<TextureBindingSp>&	m_textures;
+
+	std::vector<tcu::Vec4>					m_screenPos;
+	std::vector<tcu::Vec4>					m_positions;
+	std::vector<tcu::Vec4>					m_coords;		//!< Near-unit coordinates, roughly [-2.0 .. 2.0].
+	std::vector<tcu::Vec4>					m_unitCoords;	//!< Positive-only coordinates [0.0 .. 1.5].
+	std::vector<float>						m_attribOne;
+	std::vector<tcu::Vec4>					m_userAttribs[ShaderEvalContext::MAX_TEXTURES];
+	std::vector<deUint16>					m_indices;
+};
+
+QuadGrid::QuadGrid (int										gridSize,
+					int										width,
+					int										height,
+					const tcu::Vec4&						constCoords,
+					const std::vector<tcu::Mat4>&			userAttribTransforms,
+					const std::vector<TextureBindingSp>&	textures)
+	: m_gridSize				(gridSize)
+	, m_numVertices				((gridSize + 1) * (gridSize + 1))
+	, m_numTriangles			(gridSize * gridSize * 2)
+	, m_constCoords				(constCoords)
+	, m_userAttribTransforms	(userAttribTransforms)
+	, m_textures				(textures)
+{
+	const tcu::Vec4 viewportScale	((float)width, (float)height, 0.0f, 0.0f);
+
+	// Compute vertices.
+	m_screenPos.resize(m_numVertices);
+	m_positions.resize(m_numVertices);
+	m_coords.resize(m_numVertices);
+	m_unitCoords.resize(m_numVertices);
+	m_attribOne.resize(m_numVertices);
+
+	// User attributes.
+	for (int attrNdx = 0; attrNdx < DE_LENGTH_OF_ARRAY(m_userAttribs); attrNdx++)
+		m_userAttribs[attrNdx].resize(m_numVertices);
+
+	for (int y = 0; y < gridSize+1; y++)
+	for (int x = 0; x < gridSize+1; x++)
+	{
+		float		sx			= (float)x / (float)gridSize;
+		float		sy			= (float)y / (float)gridSize;
+		float		fx			= 2.0f * sx - 1.0f;
+		float		fy			= 2.0f * sy - 1.0f;
+		int			vtxNdx		= ((y * (gridSize+1)) + x);
+
+		m_positions[vtxNdx]		= tcu::Vec4(fx, fy, 0.0f, 1.0f);
+		m_coords[vtxNdx]		= getCoords(sx, sy);
+		m_unitCoords[vtxNdx]	= getUnitCoords(sx, sy);
+		m_attribOne[vtxNdx]		= 1.0f;
+
+		m_screenPos[vtxNdx]		= tcu::Vec4(sx, sy, 0.0f, 1.0f) * viewportScale;
+
+		for (int attribNdx = 0; attribNdx < getNumUserAttribs(); attribNdx++)
+			m_userAttribs[attribNdx][vtxNdx] = getUserAttrib(attribNdx, sx, sy);
+	}
+
+	// Compute indices.
+	m_indices.resize(3 * m_numTriangles);
+	for (int y = 0; y < gridSize; y++)
+	for (int x = 0; x < gridSize; x++)
+	{
+		int stride				= gridSize + 1;
+		int v00					= (y * stride) + x;
+		int v01					= (y * stride) + x + 1;
+		int v10					= ((y+1) * stride) + x;
+		int v11					= ((y+1) * stride) + x + 1;
+
+		int baseNdx				= ((y * gridSize) + x) * 6;
+		m_indices[baseNdx + 0]	= (deUint16)v10;
+		m_indices[baseNdx + 1]	= (deUint16)v00;
+		m_indices[baseNdx + 2]	= (deUint16)v01;
+
+		m_indices[baseNdx + 3]	= (deUint16)v10;
+		m_indices[baseNdx + 4]	= (deUint16)v01;
+		m_indices[baseNdx + 5]	= (deUint16)v11;
+	}
+}
+
+QuadGrid::~QuadGrid (void)
+{
+}
+
+inline tcu::Vec4 QuadGrid::getCoords (float sx, float sy) const
+{
+	const float fx = 2.0f * sx - 1.0f;
+	const float fy = 2.0f * sy - 1.0f;
+	return tcu::Vec4(fx, fy, -fx + 0.33f*fy, -0.275f*fx - fy);
+}
+
+inline tcu::Vec4 QuadGrid::getUnitCoords (float sx, float sy) const
+{
+	return tcu::Vec4(sx, sy, 0.33f*sx + 0.5f*sy, 0.5f*sx + 0.25f*sy);
+}
+
+inline tcu::Vec4 QuadGrid::getUserAttrib (int attribNdx, float sx, float sy) const
+{
+	// homogeneous normalized screen-space coordinates
+	return m_userAttribTransforms[attribNdx] * tcu::Vec4(sx, sy, 0.0f, 1.0f);
+}
+
+// TextureBinding
+
+TextureBinding::TextureBinding (const tcu::Archive&	archive,
+								const char*			filename,
+								const Type			type,
+								const tcu::Sampler&	sampler)
+	: m_type	(type)
+	, m_sampler	(sampler)
+{
+	switch(m_type)
+	{
+		case TYPE_2D: m_binding.tex2D = loadTexture2D(archive, filename).release(); break;
+		default:
+			DE_FATAL("Unsupported texture type");
+	}
+}
+
+TextureBinding::~TextureBinding (void)
+{
+	switch(m_type)
+	{
+		case TYPE_2D: delete m_binding.tex2D; break;
+		default: break;
+	}
+}
+
+
+de::MovePtr<tcu::Texture2D> TextureBinding::loadTexture2D (const tcu::Archive& archive, const char* filename)
+{
+	tcu::TextureLevel level;
+	tcu::ImageIO::loadImage(level, archive, filename);
+
+	TCU_CHECK_INTERNAL(level.getFormat() == tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8) ||
+					   level.getFormat() == tcu::TextureFormat(tcu::TextureFormat::RGB, tcu::TextureFormat::UNORM_INT8));
+
+	// \todo [2015-10-08 elecro] for some reason we get better when using RGBA texture even in RGB case, this needs to be investigated
+	de::MovePtr<tcu::Texture2D> texture(new tcu::Texture2D(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8), level.getWidth(), level.getHeight()));
+
+	// Fill level 0.
+	texture->allocLevel(0);
+	tcu::copy(texture->getLevel(0), level.getAccess());
+
+	return texture;
+}
+
+// ShaderEvalContext.
+
+ShaderEvalContext::ShaderEvalContext (const QuadGrid& quadGrid)
+	: constCoords	(quadGrid.getConstCoords())
+	, isDiscarded	(false)
+	, m_quadGrid	(quadGrid)
+{
+	const std::vector<TextureBindingSp>& bindings = m_quadGrid.getTextures();
+	DE_ASSERT((int)bindings.size() <= MAX_TEXTURES);
+
+	// Fill in texture array.
+	for (int ndx = 0; ndx < (int)bindings.size(); ndx++)
+	{
+		const TextureBinding& binding = *bindings[ndx];
+
+		if (binding.getType() == TextureBinding::TYPE_NONE)
+			continue;
+
+		textures[ndx].sampler = binding.getSampler();
+
+		switch (binding.getType())
+		{
+			case TextureBinding::TYPE_2D:		textures[ndx].tex2D			= &binding.get2D();		break;
+			// \todo [2015-09-07 elecro] Add support for the other binding types
+			/*
+			case TextureBinding::TYPE_CUBE_MAP:	textures[ndx].texCube		= binding.getCube();	break;
+			case TextureBinding::TYPE_2D_ARRAY:	textures[ndx].tex2DArray	= binding.get2DArray();	break;
+			case TextureBinding::TYPE_3D:		textures[ndx].tex3D			= binding.get3D();		break;
+			*/
+			default:
+				TCU_THROW(InternalError, "Handling of texture binding type not implemented");
+		}
+	}
+}
+
+ShaderEvalContext::~ShaderEvalContext (void)
+{
+}
+
+void ShaderEvalContext::reset (float sx, float sy)
+{
+	// Clear old values
+	color		= tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
+	isDiscarded	= false;
+
+	// Compute coords
+	coords		= m_quadGrid.getCoords(sx, sy);
+	unitCoords	= m_quadGrid.getUnitCoords(sx, sy);
+
+	// Compute user attributes.
+	const int numAttribs = m_quadGrid.getNumUserAttribs();
+	DE_ASSERT(numAttribs <= MAX_USER_ATTRIBS);
+	for (int attribNdx = 0; attribNdx < numAttribs; attribNdx++)
+		in[attribNdx] = m_quadGrid.getUserAttrib(attribNdx, sx, sy);
+}
+
+tcu::Vec4 ShaderEvalContext::texture2D (int unitNdx, const tcu::Vec2& texCoords)
+{
+	if (textures[unitNdx].tex2D)
+		return textures[unitNdx].tex2D->sample(textures[unitNdx].sampler, texCoords.x(), texCoords.y(), 0.0f);
+	else
+		return tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
+}
+
+// ShaderEvaluator.
+
+ShaderEvaluator::ShaderEvaluator (void)
+	: m_evalFunc(DE_NULL)
+{
+}
+
+ShaderEvaluator::ShaderEvaluator (ShaderEvalFunc evalFunc)
+	: m_evalFunc(evalFunc)
+{
+}
+
+ShaderEvaluator::~ShaderEvaluator (void)
+{
+}
+
+void ShaderEvaluator::evaluate (ShaderEvalContext& ctx) const
+{
+	DE_ASSERT(m_evalFunc);
+	m_evalFunc(ctx);
+}
+
+// UniformSetup.
+
+UniformSetup::UniformSetup (void)
+	: m_setupFunc(DE_NULL)
+{
+}
+
+UniformSetup::UniformSetup (UniformSetupFunc setupFunc)
+	: m_setupFunc(setupFunc)
+{
+}
+
+UniformSetup::~UniformSetup (void)
+{
+}
+
+void UniformSetup::setup (ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) const
+{
+	if (m_setupFunc)
+		m_setupFunc(instance, constCoords);
+}
+
+// ShaderRenderCase.
+
+ShaderRenderCase::ShaderRenderCase (tcu::TestContext&			testCtx,
+									const std::string&			name,
+									const std::string&			description,
+									const bool					isVertexCase,
+									const ShaderEvalFunc		evalFunc,
+									const UniformSetup*			uniformSetup,
+									const AttributeSetupFunc	attribFunc)
+	: vkt::TestCase		(testCtx, name, description)
+	, m_isVertexCase	(isVertexCase)
+	, m_evaluator		(new ShaderEvaluator(evalFunc))
+	, m_uniformSetup	(uniformSetup ? uniformSetup : new UniformSetup())
+	, m_attribFunc		(attribFunc)
+{}
+
+ShaderRenderCase::ShaderRenderCase (tcu::TestContext&			testCtx,
+									const std::string&			name,
+									const std::string&			description,
+									const bool					isVertexCase,
+									const ShaderEvaluator*		evaluator,
+									const UniformSetup*			uniformSetup,
+									const AttributeSetupFunc	attribFunc)
+	: vkt::TestCase		(testCtx, name, description)
+	, m_isVertexCase	(isVertexCase)
+	, m_evaluator		(evaluator)
+	, m_uniformSetup	(uniformSetup ? uniformSetup : new UniformSetup())
+	, m_attribFunc		(attribFunc)
+{}
+
+ShaderRenderCase::~ShaderRenderCase (void)
+{
+}
+
+void ShaderRenderCase::initPrograms (vk::SourceCollections& programCollection) const
+{
+	programCollection.glslSources.add("vert") << glu::VertexSource(m_vertShaderSource);
+	programCollection.glslSources.add("frag") << glu::FragmentSource(m_fragShaderSource);
+}
+
+TestInstance* ShaderRenderCase::createInstance (Context& context) const
+{
+	DE_ASSERT(m_evaluator != DE_NULL);
+	DE_ASSERT(m_uniformSetup != DE_NULL);
+	return new ShaderRenderCaseInstance(context, m_isVertexCase, *m_evaluator, *m_uniformSetup, m_attribFunc);
+}
+
+// ShaderRenderCaseInstance.
+
+ShaderRenderCaseInstance::ShaderRenderCaseInstance (Context&					context,
+													const bool					isVertexCase,
+													const ShaderEvaluator&		evaluator,
+													const UniformSetup&			uniformSetup,
+													const AttributeSetupFunc	attribFunc)
+	: vkt::TestInstance	(context)
+	, m_clearColor		(DEFAULT_CLEAR_COLOR)
+	, m_memAlloc		(context.getDefaultAllocator())
+	, m_isVertexCase	(isVertexCase)
+	, m_evaluator		(evaluator)
+	, m_uniformSetup	(uniformSetup)
+	, m_attribFunc		(attribFunc)
+	, m_renderSize		(128, 128)
+	, m_colorFormat		(VK_FORMAT_R8G8B8A8_UNORM)
+{
+}
+
+ShaderRenderCaseInstance::~ShaderRenderCaseInstance (void)
+{
+}
+
+tcu::TestStatus ShaderRenderCaseInstance::iterate (void)
+{
+	setup();
+
+	// Create quad grid.
+	const tcu::UVec2	viewportSize	= getViewportSize();
+	const int			width			= viewportSize.x();
+	const int			height			= viewportSize.y();
+
+	QuadGrid			quadGrid		(m_isVertexCase ? GRID_SIZE : 4, width, height, tcu::Vec4(0.125f, 0.25f, 0.5f, 1.0f), m_userAttribTransforms, m_textures);
+
+	// Render result.
+	tcu::Surface		resImage		(width, height);
+	render(resImage, quadGrid);
+
+	// Compute reference.
+	tcu::Surface		refImage		(width, height);
+	if (m_isVertexCase)
+		computeVertexReference(refImage, quadGrid);
+	else
+		computeFragmentReference(refImage, quadGrid);
+
+	// Compare.
+	const bool			compareOk		= compareImages(resImage, refImage, 0.05f);
+
+	if (compareOk)
+		return tcu::TestStatus::pass("Result image matches reference");
+	else
+		return tcu::TestStatus::fail("Image mismatch");
+}
+
+void ShaderRenderCaseInstance::setupUniformData (deUint32 bindingLocation, size_t size, const void* dataPtr)
+{
+	const VkDevice					vkDevice			= m_context.getDevice();
+	const DeviceInterface&			vk					= m_context.getDeviceInterface();
+	const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	const VkBufferCreateInfo		uniformBufferParams	=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		size,										// VkDeviceSize			size;
+		VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	Move<VkBuffer>					buffer				= createBuffer(vk, vkDevice, &uniformBufferParams);
+	de::MovePtr<Allocation>			alloc				= m_memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, alloc->getMemory(), alloc->getOffset()));
+
+	deMemcpy(alloc->getHostPtr(), dataPtr, size);
+	flushMappedMemoryRange(vk, vkDevice, alloc->getMemory(), alloc->getOffset(), size);
+
+	de::MovePtr<BufferUniform> uniformInfo(new BufferUniform());
+	uniformInfo->type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
+	uniformInfo->descriptor = makeDescriptorBufferInfo(*buffer, 0u, size);
+	uniformInfo->location = bindingLocation;
+	uniformInfo->buffer = VkBufferSp(new vk::Unique<VkBuffer>(buffer));
+	uniformInfo->alloc = AllocationSp(alloc.release());
+
+	m_uniformInfos.push_back(UniformInfoSp(new de::UniquePtr<UniformInfo>(uniformInfo)));
+}
+
+void ShaderRenderCaseInstance::addUniform (deUint32 bindingLocation, vk::VkDescriptorType descriptorType, size_t dataSize, const void* data)
+{
+	m_descriptorSetLayoutBuilder.addSingleBinding(descriptorType, vk::VK_SHADER_STAGE_ALL);
+	m_descriptorPoolBuilder.addType(descriptorType);
+
+	setupUniformData(bindingLocation, dataSize, data);
+}
+
+void ShaderRenderCaseInstance::addAttribute (deUint32		bindingLocation,
+											 vk::VkFormat	format,
+											 deUint32		sizePerElement,
+											 deUint32		count,
+											 const void*	dataPtr)
+{
+	// Add binding specification
+	const deUint32							binding					= (deUint32)m_vertexBindingDescription.size();
+	const VkVertexInputBindingDescription	bindingDescription		=
+	{
+		binding,							// deUint32				binding;
+		sizePerElement,						// deUint32				stride;
+		VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputRate	stepRate;
+	};
+
+	m_vertexBindingDescription.push_back(bindingDescription);
+
+	// Add location and format specification
+	const VkVertexInputAttributeDescription	attributeDescription	=
+	{
+		bindingLocation,			// deUint32	location;
+		binding,					// deUint32	binding;
+		format,						// VkFormat	format;
+		0u,							// deUint32	offset;
+	};
+
+	m_vertexattributeDescription.push_back(attributeDescription);
+
+	// Upload data to buffer
+	const VkDevice							vkDevice				= m_context.getDevice();
+	const DeviceInterface&					vk						= m_context.getDeviceInterface();
+	const deUint32							queueFamilyIndex		= m_context.getUniversalQueueFamilyIndex();
+
+	const VkDeviceSize						inputSize				= sizePerElement * count;
+	const VkBufferCreateInfo				vertexBufferParams		=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		inputSize,									// VkDeviceSize			size;
+		VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	Move<VkBuffer>							buffer					= createBuffer(vk, vkDevice, &vertexBufferParams);
+	de::MovePtr<vk::Allocation>				alloc					= m_memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *buffer), MemoryRequirement::HostVisible);
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *buffer, alloc->getMemory(), alloc->getOffset()));
+
+	deMemcpy(alloc->getHostPtr(), dataPtr, (size_t)inputSize);
+	flushMappedMemoryRange(vk, vkDevice, alloc->getMemory(), alloc->getOffset(), inputSize);
+
+	m_vertexBuffers.push_back(VkBufferSp(new vk::Unique<VkBuffer>(buffer)));
+	m_vertexBufferAllocs.push_back(AllocationSp(alloc.release()));
+}
+
+void ShaderRenderCaseInstance::useAttribute (deUint32 bindingLocation, BaseAttributeType type)
+{
+	const EnabledBaseAttribute attribute =
+	{
+		bindingLocation,	// deUint32				location;
+		type				// BaseAttributeType	type;
+	};
+	m_enabledBaseAttributes.push_back(attribute);
+}
+
+void ShaderRenderCaseInstance::setup (void)
+{
+}
+
+void ShaderRenderCaseInstance::setupUniforms (const tcu::Vec4& constCoords)
+{
+	m_uniformSetup.setup(*this, constCoords);
+}
+
+void ShaderRenderCaseInstance::useUniform (deUint32 bindingLocation, BaseUniformType type)
+{
+	#define UNIFORM_CASE(type, value) case type: addUniform(bindingLocation, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, value); break
+
+	switch(type)
+	{
+		// Bool
+		UNIFORM_CASE(UB_FALSE,	0);
+		UNIFORM_CASE(UB_TRUE,	1);
+
+		// BVec4
+		UNIFORM_CASE(UB4_FALSE,	tcu::Vec4(0));
+		UNIFORM_CASE(UB4_TRUE,	tcu::Vec4(1));
+
+		// Integer
+		UNIFORM_CASE(UI_ZERO,	0);
+		UNIFORM_CASE(UI_ONE,	1);
+		UNIFORM_CASE(UI_TWO,	2);
+		UNIFORM_CASE(UI_THREE,	3);
+		UNIFORM_CASE(UI_FOUR,	4);
+		UNIFORM_CASE(UI_FIVE,	5);
+		UNIFORM_CASE(UI_SIX,	6);
+		UNIFORM_CASE(UI_SEVEN,	7);
+		UNIFORM_CASE(UI_EIGHT,	8);
+		UNIFORM_CASE(UI_ONEHUNDREDONE, 101);
+
+		// IVec2
+		UNIFORM_CASE(UI2_MINUS_ONE,	tcu::IVec2(-1));
+		UNIFORM_CASE(UI2_ZERO,		tcu::IVec2(0));
+		UNIFORM_CASE(UI2_ONE,		tcu::IVec2(1));
+		UNIFORM_CASE(UI2_TWO,		tcu::IVec2(2));
+		UNIFORM_CASE(UI2_THREE,		tcu::IVec2(3));
+		UNIFORM_CASE(UI2_FOUR,		tcu::IVec2(4));
+		UNIFORM_CASE(UI2_FIVE,		tcu::IVec2(5));
+
+		// IVec3
+		UNIFORM_CASE(UI3_MINUS_ONE,	tcu::IVec3(-1));
+		UNIFORM_CASE(UI3_ZERO,		tcu::IVec3(0));
+		UNIFORM_CASE(UI3_ONE,		tcu::IVec3(1));
+		UNIFORM_CASE(UI3_TWO,		tcu::IVec3(2));
+		UNIFORM_CASE(UI3_THREE,		tcu::IVec3(3));
+		UNIFORM_CASE(UI3_FOUR,		tcu::IVec3(4));
+		UNIFORM_CASE(UI3_FIVE,		tcu::IVec3(5));
+
+		// IVec4
+		UNIFORM_CASE(UI4_MINUS_ONE, tcu::IVec4(-1));
+		UNIFORM_CASE(UI4_ZERO,		tcu::IVec4(0));
+		UNIFORM_CASE(UI4_ONE,		tcu::IVec4(1));
+		UNIFORM_CASE(UI4_TWO,		tcu::IVec4(2));
+		UNIFORM_CASE(UI4_THREE,		tcu::IVec4(3));
+		UNIFORM_CASE(UI4_FOUR,		tcu::IVec4(4));
+		UNIFORM_CASE(UI4_FIVE,		tcu::IVec4(5));
+
+		// Float
+		UNIFORM_CASE(UF_ZERO,		0.0f);
+		UNIFORM_CASE(UF_ONE,		1.0f);
+		UNIFORM_CASE(UF_TWO,		2.0f);
+		UNIFORM_CASE(UF_THREE,		3.0f);
+		UNIFORM_CASE(UF_FOUR,		4.0f);
+		UNIFORM_CASE(UF_FIVE,		5.0f);
+		UNIFORM_CASE(UF_SIX,		6.0f);
+		UNIFORM_CASE(UF_SEVEN,		7.0f);
+		UNIFORM_CASE(UF_EIGHT,		8.0f);
+
+		UNIFORM_CASE(UF_HALF,		1.0f / 2.0f);
+		UNIFORM_CASE(UF_THIRD,		1.0f / 3.0f);
+		UNIFORM_CASE(UF_FOURTH,		1.0f / 4.0f);
+		UNIFORM_CASE(UF_FIFTH,		1.0f / 5.0f);
+		UNIFORM_CASE(UF_SIXTH,		1.0f / 6.0f);
+		UNIFORM_CASE(UF_SEVENTH,	1.0f / 7.0f);
+		UNIFORM_CASE(UF_EIGHTH,		1.0f / 8.0f);
+
+		// Vec2
+		UNIFORM_CASE(UV2_MINUS_ONE,	tcu::Vec2(-1.0f));
+		UNIFORM_CASE(UV2_ZERO,		tcu::Vec2(0.0f));
+		UNIFORM_CASE(UV2_ONE,		tcu::Vec2(1.0f));
+		UNIFORM_CASE(UV2_TWO,		tcu::Vec2(2.0f));
+		UNIFORM_CASE(UV2_THREE,		tcu::Vec2(3.0f));
+
+		UNIFORM_CASE(UV2_HALF,		tcu::Vec2(1.0f / 2.0f));
+
+		// Vec3
+		UNIFORM_CASE(UV3_MINUS_ONE,	tcu::Vec3(-1.0f));
+		UNIFORM_CASE(UV3_ZERO,		tcu::Vec3(0.0f));
+		UNIFORM_CASE(UV3_ONE,		tcu::Vec3(1.0f));
+		UNIFORM_CASE(UV3_TWO,		tcu::Vec3(2.0f));
+		UNIFORM_CASE(UV3_THREE,		tcu::Vec3(3.0f));
+
+		UNIFORM_CASE(UV3_HALF,		tcu::Vec3(1.0f / 2.0f));
+
+		// Vec4
+		UNIFORM_CASE(UV4_MINUS_ONE,	tcu::Vec4(-1.0f));
+		UNIFORM_CASE(UV4_ZERO,		tcu::Vec4(0.0f));
+		UNIFORM_CASE(UV4_ONE,		tcu::Vec4(1.0f));
+		UNIFORM_CASE(UV4_TWO,		tcu::Vec4(2.0f));
+		UNIFORM_CASE(UV4_THREE,		tcu::Vec4(3.0f));
+
+		UNIFORM_CASE(UV4_HALF,		tcu::Vec4(1.0f / 2.0f));
+
+		UNIFORM_CASE(UV4_BLACK,		tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
+		UNIFORM_CASE(UV4_GRAY,		tcu::Vec4(0.5f, 0.5f, 0.5f, 1.0f));
+		UNIFORM_CASE(UV4_WHITE,		tcu::Vec4(1.0f, 1.0f, 1.0f, 1.0f));
+
+		default:
+			m_context.getTestContext().getLog() << tcu::TestLog::Message << "Unknown Uniform type: " << type << tcu::TestLog::EndMessage;
+			break;
+	}
+
+	#undef UNIFORM_CASE
+}
+
+const tcu::UVec2 ShaderRenderCaseInstance::getViewportSize (void) const
+{
+	return tcu::UVec2(de::min(m_renderSize.x(), MAX_RENDER_WIDTH),
+					  de::min(m_renderSize.y(), MAX_RENDER_HEIGHT));
+}
+
+Move<VkImage> ShaderRenderCaseInstance::createImage2D (const tcu::Texture2D&	texture,
+													   const VkFormat			format,
+													   const VkImageUsageFlags	usage,
+													   const VkImageTiling		tiling)
+{
+	const VkDevice			vkDevice			= m_context.getDevice();
+	const DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const deUint32			queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	const VkImageCreateInfo	imageCreateInfo		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,						// VkStructureType			sType;
+		DE_NULL,													// const void*				pNext;
+		0,															// VkImageCreateFlags		flags;
+		VK_IMAGE_TYPE_2D,											// VkImageType				imageType;
+		format,														// VkFormat					format;
+		{
+			(deUint32)texture.getWidth(),
+			(deUint32)texture.getHeight(),
+			1u
+		},															// VkExtend3D				extent;
+		1u,															// deUint32					mipLevels;
+		1u,															// deUint32					arraySize;
+		VK_SAMPLE_COUNT_1_BIT,										// deUint32					samples;
+		tiling,														// VkImageTiling			tiling;
+		usage,														// VkImageUsageFlags		usage;
+		VK_SHARING_MODE_EXCLUSIVE,									// VkSharingMode			sharingMode;
+		1,															// deuint32					queueFamilyCount;
+		&queueFamilyIndex,											// const deUint32*			pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,									// VkImageLayout			initialLayout;
+	};
+
+	Move<VkImage>			vkTexture			= createImage(vk, vkDevice, &imageCreateInfo);
+	return vkTexture;
+}
+
+de::MovePtr<Allocation> ShaderRenderCaseInstance::uploadImage2D (const tcu::Texture2D&	refTexture,
+																 const VkImage&			vkTexture)
+{
+	const VkDevice				vkDevice	= m_context.getDevice();
+	const DeviceInterface&		vk			= m_context.getDeviceInterface();
+
+	de::MovePtr<Allocation>		allocation	= m_memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, vkTexture), MemoryRequirement::HostVisible);
+	VK_CHECK(vk.bindImageMemory(vkDevice, vkTexture, allocation->getMemory(), allocation->getOffset()));
+
+	const VkImageSubresource	subres				=
+	{
+		VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspectFlags	aspectMask;
+		0u,							// deUint32				mipLevel;
+		0u							// deUint32				arraySlice
+	};
+
+	VkSubresourceLayout layout;
+	vk.getImageSubresourceLayout(vkDevice, vkTexture, &subres, &layout);
+
+	tcu::ConstPixelBufferAccess	access		= refTexture.getLevel(0);
+	tcu::PixelBufferAccess		destAccess	(refTexture.getFormat(), refTexture.getWidth(), refTexture.getHeight(), 1, allocation->getHostPtr());
+
+	tcu::copy(destAccess, access);
+
+	flushMappedMemoryRange(vk, vkDevice, allocation->getMemory(), allocation->getOffset(), layout.size);
+
+	return allocation;
+}
+
+void ShaderRenderCaseInstance::copyTilingImageToOptimal	(const vk::VkImage&	srcImage,
+														 const vk::VkImage&	dstImage,
+														 deUint32			width,
+														 deUint32			height)
+{
+	const VkDevice						vkDevice			= m_context.getDevice();
+	const DeviceInterface&				vk					= m_context.getDeviceInterface();
+	const VkQueue						queue				= m_context.getUniversalQueue();
+	const deUint32						queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	// Create command pool
+	const VkCommandPoolCreateInfo		cmdPoolParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,		// VkCmdPoolCreateFlags	flags;
+		queueFamilyIndex,							// deUint32				queueFamilyIndex;
+	};
+
+	Move<VkCommandPool>					cmdPool				= createCommandPool(vk, vkDevice, &cmdPoolParams);
+
+	// Create command buffer
+	const VkCommandBufferAllocateInfo	cmdBufferParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		*cmdPool,										// VkCommandPool			commandPool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCommandBufferLevel		level;
+		1u												// deUint32					bufferCount;
+	};
+
+	const VkCommandBufferUsageFlags		usageFlags			= VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
+	const VkCommandBufferBeginInfo		cmdBufferBeginInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType				sType;
+		DE_NULL,										// const void*					pNext;
+		usageFlags,										// VkCommandBufferUsageFlags	flags;
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	Move<VkCommandBuffer>				cmdBuffer			= allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+
+	// Add image barriers
+	const VkImageMemoryBarrier			layoutBarriers[2]	=
+	{
+		createImageMemoryBarrier(srcImage, (VkAccessFlags)0u, (VkAccessFlags)0u, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL),
+		createImageMemoryBarrier(dstImage, (VkAccessFlags)0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
+	};
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+						  0, (const VkMemoryBarrier*)DE_NULL,
+						  0, (const VkBufferMemoryBarrier*)DE_NULL,
+						  DE_LENGTH_OF_ARRAY(layoutBarriers), layoutBarriers);
+
+	// Add image copy
+	const VkImageCopy				imageCopy			=
+	{
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,		// VkImageAspect	aspect;
+			0u,								// deUint32			mipLevel;
+			0u,								// deUint32			arrayLayer;
+			1u								// deUint32			arraySize;
+		},											// VkImageSubresourceCopy	srcSubresource;
+		{
+			0,								// int32			x;
+			0,								// int32			y;
+			0								// int32			z;
+		},											// VkOffset3D				srcOffset;
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,		// VkImageAspect	aspect;
+			0u,								// deUint32			mipLevel;
+			0u,								// deUint32			arrayLayer;
+			1u								// deUint32			arraySize;
+		},											// VkImageSubresourceCopy	destSubResource;
+		{
+			0,								// int32			x;
+			0,								// int32			y;
+			0								// int32			z;
+		},											// VkOffset3D				dstOffset;
+		{
+			width,							// int32			width;
+			height,							// int32			height;
+			1,								// int32			depth
+		}	// VkExtent3D					extent;
+	};
+
+	vk.cmdCopyImage(*cmdBuffer, srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &imageCopy);
+
+	// Add destination barrier
+	const VkImageMemoryBarrier		dstBarrier			=
+			createImageMemoryBarrier(dstImage, VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT, 0u, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+
+	vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+						  0, (const VkMemoryBarrier*)DE_NULL,
+						  0, (const VkBufferMemoryBarrier*)DE_NULL,
+						  1, &dstBarrier);
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const VkFenceCreateInfo			fenceParams			=
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u										// VkFenceCreateFlags	flags;
+	};
+	const Unique<VkFence>			fence				(createFence(vk, vkDevice, &fenceParams));
+	const VkSubmitInfo				submitInfo			=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+		0u,
+		(const VkSemaphore*)DE_NULL,
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,
+		&cmdBuffer.get(),
+		0u,
+		(const VkSemaphore*)DE_NULL,
+	};
+
+
+	// Execute copy
+	VK_CHECK(vk.resetFences(vkDevice, 1, &fence.get()));
+	VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(vkDevice, 1, &fence.get(), true, ~(0ull) /* infinity*/));
+}
+
+void ShaderRenderCaseInstance::useSampler2D (deUint32 bindingLocation, deUint32 textureID)
+{
+	DE_ASSERT(textureID < m_textures.size());
+
+	const VkDevice					vkDevice		= m_context.getDevice();
+	const DeviceInterface&			vk				= m_context.getDeviceInterface();
+	const TextureBinding&			textureBinding	= *m_textures[textureID];
+	const tcu::Texture2D&			refTexture		= textureBinding.get2D();
+	const tcu::Sampler&				refSampler		= textureBinding.getSampler();
+	const VkFormat					format			= refTexture.getFormat() == tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8)
+														? VK_FORMAT_R8G8B8A8_UNORM
+														: VK_FORMAT_R8G8B8_UNORM;
+
+	// Create & alloc the image
+	Move<VkImage>					vkTexture;
+	de::MovePtr<Allocation>			allocation;
+
+	if (isSupportedLinearTilingFormat(m_context.getInstanceInterface(), m_context.getPhysicalDevice(), format))
+	{
+		vkTexture = createImage2D(refTexture, format, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_LINEAR);
+		allocation = uploadImage2D(refTexture, *vkTexture);
+	}
+	else if (isSupportedOptimalTilingFormat(m_context.getInstanceInterface(), m_context.getPhysicalDevice(), format))
+	{
+		Move<VkImage>				stagingTexture	(createImage2D(refTexture, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_TILING_LINEAR));
+		de::MovePtr<Allocation>		stagingAlloc	(uploadImage2D(refTexture, *stagingTexture));
+
+		const VkImageUsageFlags		dstUsageFlags	= VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
+		vkTexture = createImage2D(refTexture, format, dstUsageFlags, VK_IMAGE_TILING_OPTIMAL);
+		allocation = m_memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *vkTexture), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *vkTexture, allocation->getMemory(), allocation->getOffset()));
+
+		copyTilingImageToOptimal(*stagingTexture, *vkTexture, refTexture.getWidth(), refTexture.getHeight());
+	}
+	else
+	{
+		TCU_THROW(InternalError, "Unable to create 2D image");
+	}
+
+	// Create sampler
+	const VkSamplerCreateInfo		samplerParams	= mapSampler(refSampler, refTexture.getFormat());
+	Move<VkSampler>					sampler			= createSampler(vk, vkDevice, &samplerParams);
+
+	const VkImageViewCreateInfo		viewParams		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,	// VkStructureType			sType;
+		NULL,										// const voide*				pNext;
+		0u,											// VkImageViewCreateFlags	flags;
+		*vkTexture,									// VkImage					image;
+		VK_IMAGE_VIEW_TYPE_2D,						// VkImageViewType			viewType;
+		format,										// VkFormat					format;
+		{
+			VK_COMPONENT_SWIZZLE_R,			// VkChannelSwizzle		r;
+			VK_COMPONENT_SWIZZLE_G,			// VkChannelSwizzle		g;
+			VK_COMPONENT_SWIZZLE_B,			// VkChannelSwizzle		b;
+			VK_COMPONENT_SWIZZLE_A			// VkChannelSwizzle		a;
+		},											// VkChannelMapping			channels;
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,		// VkImageAspectFlags	aspectMask;
+			0,								// deUint32				baseMipLevel;
+			1,								// deUint32				mipLevels;
+			0,								// deUint32				baseArraySlice;
+			1								// deUint32				arraySize;
+		},											// VkImageSubresourceRange	subresourceRange;
+	};
+
+	Move<VkImageView>				imageView		= createImageView(vk, vkDevice, &viewParams);
+
+	const vk::VkDescriptorImageInfo	descriptor		=
+	{
+		sampler.get(),								// VkSampler				sampler;
+		imageView.get(),							// VkImageView				imageView;
+		VK_IMAGE_LAYOUT_GENERAL,					// VkImageLayout			imageLayout;
+	};
+
+	de::MovePtr<SamplerUniform> uniform(new SamplerUniform());
+	uniform->type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
+	uniform->descriptor = descriptor;
+	uniform->location = bindingLocation;
+	uniform->image = VkImageSp(new vk::Unique<VkImage>(vkTexture));
+	uniform->imageView = VkImageViewSp(new vk::Unique<VkImageView>(imageView));
+	uniform->sampler = VkSamplerSp(new vk::Unique<VkSampler>(sampler));
+	uniform->alloc = AllocationSp(allocation.release());
+
+	m_descriptorSetLayoutBuilder.addSingleSamplerBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, vk::VK_SHADER_STAGE_ALL, &uniform->descriptor.sampler);
+	m_descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
+
+	m_uniformInfos.push_back(UniformInfoSp(new de::UniquePtr<UniformInfo>(uniform)));
+}
+
+void ShaderRenderCaseInstance::setupDefaultInputs (const QuadGrid& quadGrid)
+{
+	/* Configuration of the vertex input attributes:
+		a_position   is at location 0
+		a_coords     is at location 1
+		a_unitCoords is at location 2
+		a_one        is at location 3
+
+	  User attributes starts from at the location 4.
+	*/
+	addAttribute(0u, VK_FORMAT_R32G32B32A32_SFLOAT, sizeof(tcu::Vec4), quadGrid.getNumVertices(), quadGrid.getPositions());
+	addAttribute(1u, VK_FORMAT_R32G32B32A32_SFLOAT, sizeof(tcu::Vec4), quadGrid.getNumVertices(), quadGrid.getCoords());
+	addAttribute(2u, VK_FORMAT_R32G32B32A32_SFLOAT, sizeof(tcu::Vec4), quadGrid.getNumVertices(), quadGrid.getUnitCoords());
+	addAttribute(3u, VK_FORMAT_R32_SFLOAT, sizeof(float), quadGrid.getNumVertices(), quadGrid.getAttribOne());
+
+	static const struct
+	{
+		BaseAttributeType	type;
+		int					userNdx;
+	} userAttributes[] =
+	{
+		{ A_IN0, 0 },
+		{ A_IN1, 1 },
+		{ A_IN2, 2 },
+		{ A_IN3, 3 }
+	};
+
+	static const struct
+	{
+		BaseAttributeType	matrixType;
+		int					numCols;
+		int					numRows;
+	} matrices[] =
+	{
+		{ MAT2,		2, 2 },
+		{ MAT2x3,	2, 3 },
+		{ MAT2x4,	2, 4 },
+		{ MAT3x2,	3, 2 },
+		{ MAT3,		3, 3 },
+		{ MAT3x4,	3, 4 },
+		{ MAT4x2,	4, 2 },
+		{ MAT4x3,	4, 3 },
+		{ MAT4,		4, 4 }
+	};
+
+	for (size_t attrNdx = 0; attrNdx < m_enabledBaseAttributes.size(); attrNdx++)
+	{
+		for (int userNdx = 0; userNdx < DE_LENGTH_OF_ARRAY(userAttributes); userNdx++)
+		{
+			if (userAttributes[userNdx].type != m_enabledBaseAttributes[attrNdx].type)
+				continue;
+
+			addAttribute(m_enabledBaseAttributes[attrNdx].location, VK_FORMAT_R32G32B32A32_SFLOAT, sizeof(tcu::Vec4), quadGrid.getNumVertices(), quadGrid.getUserAttrib(userNdx));
+		}
+
+		for (int matNdx = 0; matNdx < DE_LENGTH_OF_ARRAY(matrices); matNdx++)
+		{
+
+			if (matrices[matNdx].matrixType != m_enabledBaseAttributes[attrNdx].type)
+				continue;
+
+			const int numCols = matrices[matNdx].numCols;
+
+			for (int colNdx = 0; colNdx < numCols; colNdx++)
+			{
+				addAttribute(m_enabledBaseAttributes[attrNdx].location + colNdx, VK_FORMAT_R32G32B32A32_SFLOAT, (deUint32)(4 * sizeof(float)), quadGrid.getNumVertices(), quadGrid.getUserAttrib(colNdx));
+			}
+		}
+	}
+}
+
+void ShaderRenderCaseInstance::render (tcu::Surface& result, const QuadGrid& quadGrid)
+{
+	const VkDevice										vkDevice					= m_context.getDevice();
+	const DeviceInterface&								vk							= m_context.getDeviceInterface();
+	const VkQueue										queue						= m_context.getUniversalQueue();
+	const deUint32										queueFamilyIndex			= m_context.getUniversalQueueFamilyIndex();
+
+	// Create color image
+	{
+		const VkImageCreateInfo							colorImageParams			=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,										// VkStructureType		sType;
+			DE_NULL,																	// const void*			pNext;
+			0u,																			// VkImageCreateFlags	flags;
+			VK_IMAGE_TYPE_2D,															// VkImageType			imageType;
+			m_colorFormat,																// VkFormat				format;
+			{ m_renderSize.x(), m_renderSize.y(), 1u },									// VkExtent3D			extent;
+			1u,																			// deUint32				mipLevels;
+			1u,																			// deUint32				arraySize;
+			VK_SAMPLE_COUNT_1_BIT,														// deUint32				samples;
+			VK_IMAGE_TILING_OPTIMAL,													// VkImageTiling		tiling;
+			VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT,		// VkImageUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,													// VkSharingMode		sharingMode;
+			1u,																			// deUint32				queueFamilyCount;
+			&queueFamilyIndex,															// const deUint32*		pQueueFamilyIndices;
+			VK_IMAGE_LAYOUT_UNDEFINED,													// VkImageLayout		initialLayout;
+		};
+
+		m_colorImage = createImage(vk, vkDevice, &colorImageParams);
+
+		// Allocate and bind color image memory
+		m_colorImageAlloc = m_memAlloc.allocate(getImageMemoryRequirements(vk, vkDevice, *m_colorImage), MemoryRequirement::Any);
+		VK_CHECK(vk.bindImageMemory(vkDevice, *m_colorImage, m_colorImageAlloc->getMemory(), m_colorImageAlloc->getOffset()));
+	}
+
+	// Create color attachment view
+	{
+		const VkImageViewCreateInfo						colorImageViewParams		=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,			// VkStructureType			sType;
+			DE_NULL,											// const void*				pNext;
+			0u,													// VkImageViewCreateFlags	flags;
+			*m_colorImage,										// VkImage					image;
+			VK_IMAGE_VIEW_TYPE_2D,								// VkImageViewType			viewType;
+			m_colorFormat,										// VkFormat					format;
+			{
+				VK_COMPONENT_SWIZZLE_R,			// VkChannelSwizzle		r;
+				VK_COMPONENT_SWIZZLE_G,			// VkChannelSwizzle		g;
+				VK_COMPONENT_SWIZZLE_B,			// VkChannelSwizzle		b;
+				VK_COMPONENT_SWIZZLE_A			// VkChannelSwizzle		a;
+			},													// VkChannelMapping			channels;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,		// VkImageAspectFlags	aspectMask;
+				0,								// deUint32				baseMipLevel;
+				1,								// deUint32				mipLevels;
+				0,								// deUint32				baseArraySlice;
+				1								// deUint32				arraySize;
+			},													// VkImageSubresourceRange	subresourceRange;
+		};
+
+		m_colorImageView = createImageView(vk, vkDevice, &colorImageViewParams);
+	}
+
+	// Create render pass
+	{
+		const VkAttachmentDescription					attachmentDescription		=
+		{
+			(VkAttachmentDescriptionFlags)0,
+			m_colorFormat,										// VkFormat						format;
+			VK_SAMPLE_COUNT_1_BIT,								// deUint32						samples;
+			VK_ATTACHMENT_LOAD_OP_CLEAR,						// VkAttachmentLoadOp			loadOp;
+			VK_ATTACHMENT_STORE_OP_STORE,						// VkAttachmentStoreOp			storeOp;
+			VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// VkAttachmentLoadOp			stencilLoadOp;
+			VK_ATTACHMENT_STORE_OP_DONT_CARE,					// VkAttachmentStoreOp			stencilStoreOp;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				initialLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// VkImageLayout				finalLayout;
+		};
+
+		const VkAttachmentReference						attachmentReference			=
+		{
+			0u,													// deUint32			attachment;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL			// VkImageLayout	layout;
+		};
+
+		const VkSubpassDescription						subpassDescription			=
+		{
+			0u,													// VkSubpassDescriptionFlags	flags;
+			VK_PIPELINE_BIND_POINT_GRAPHICS,					// VkPipelineBindPoint			pipelineBindPoint;
+			0u,													// deUint32						inputCount;
+			DE_NULL,											// constVkAttachmentReference*	pInputAttachments;
+			1u,													// deUint32						colorCount;
+			&attachmentReference,								// constVkAttachmentReference*	pColorAttachments;
+			DE_NULL,											// constVkAttachmentReference*	pResolveAttachments;
+			DE_NULL,											// VkAttachmentReference		depthStencilAttachment;
+			0u,													// deUint32						preserveCount;
+			DE_NULL												// constVkAttachmentReference*	pPreserveAttachments;
+		};
+
+		const VkRenderPassCreateInfo					renderPassParams			=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// VkStructureType					sType;
+			DE_NULL,											// const void*						pNext;
+			(VkRenderPassCreateFlags)0,
+			1u,													// deUint32							attachmentCount;
+			&attachmentDescription,								// const VkAttachmentDescription*	pAttachments;
+			1u,													// deUint32							subpassCount;
+			&subpassDescription,								// const VkSubpassDescription*		pSubpasses;
+			0u,													// deUint32							dependencyCount;
+			DE_NULL												// const VkSubpassDependency*		pDependencies;
+		};
+
+		m_renderPass = createRenderPass(vk, vkDevice, &renderPassParams);
+	}
+
+	// Create framebuffer
+	{
+		const VkFramebufferCreateInfo					framebufferParams			=
+		{
+			VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,			// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkFramebufferCreateFlags)0,
+			*m_renderPass,										// VkRenderPass					renderPass;
+			1u,													// deUint32						attachmentCount;
+			&*m_colorImageView,									// const VkImageView*			pAttachments;
+			(deUint32)m_renderSize.x(),							// deUint32						width;
+			(deUint32)m_renderSize.y(),							// deUint32						height;
+			1u													// deUint32						layers;
+		};
+
+		m_framebuffer = createFramebuffer(vk, vkDevice, &framebufferParams);
+	}
+
+	// Create descriptors
+	{
+		setupUniforms(quadGrid.getConstCoords());
+
+		m_descriptorSetLayout = m_descriptorSetLayoutBuilder.build(vk, vkDevice);
+		if (!m_uniformInfos.empty())
+		{
+			m_descriptorPool 								= m_descriptorPoolBuilder.build(vk, vkDevice, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+			const VkDescriptorSetAllocateInfo	allocInfo	=
+			{
+				VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+				DE_NULL,
+				*m_descriptorPool,
+				1u,
+				&m_descriptorSetLayout.get(),
+			};
+
+			m_descriptorSet = allocateDescriptorSet(vk, vkDevice, &allocInfo);
+		}
+
+		for (deUint32 i = 0; i < m_uniformInfos.size(); i++)
+		{
+			const UniformInfo* uniformInfo = m_uniformInfos[i].get()->get();
+			deUint32 location = uniformInfo->location;
+
+			if (uniformInfo->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER)
+			{
+				const BufferUniform*	bufferInfo	= dynamic_cast<const BufferUniform*>(uniformInfo);
+
+				m_descriptorSetUpdateBuilder.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(location), uniformInfo->type, &bufferInfo->descriptor);
+			}
+			else if (uniformInfo->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+			{
+				const SamplerUniform*	samplerInfo	= dynamic_cast<const SamplerUniform*>(uniformInfo);
+
+				m_descriptorSetUpdateBuilder.writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(location), uniformInfo->type, &samplerInfo->descriptor);
+			}
+			else
+				DE_FATAL("Impossible");
+		}
+
+		m_descriptorSetUpdateBuilder.update(vk, vkDevice);
+	}
+
+	// Create pipeline layout
+	{
+		const VkPipelineLayoutCreateInfo				pipelineLayoutParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,											// const void*					pNext;
+			(VkPipelineLayoutCreateFlags)0,
+			1u,													// deUint32						descriptorSetCount;
+			&*m_descriptorSetLayout,							// const VkDescriptorSetLayout*	pSetLayouts;
+			0u,													// deUint32						pushConstantRangeCount;
+			DE_NULL												// const VkPushConstantRange*	pPushConstantRanges;
+		};
+
+		m_pipelineLayout = createPipelineLayout(vk, vkDevice, &pipelineLayoutParams);
+	}
+
+	// Create shaders
+	{
+		m_vertexShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("vert"), 0);
+		m_fragmentShaderModule	= createShaderModule(vk, vkDevice, m_context.getBinaryCollection().get("frag"), 0);
+	}
+
+	// Create pipeline
+	{
+		const VkPipelineShaderStageCreateInfo			shaderStageParams[2]		=
+		{
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType				sType;
+				DE_NULL,													// const void*					pNext;
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_VERTEX_BIT,									// VkShaderStage				stage;
+				*m_vertexShaderModule,										// VkShader						shader;
+				"main",
+				DE_NULL														// const VkSpecializationInfo*	pSpecializationInfo;
+			},
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,		// VkStructureType				sType;
+				DE_NULL,													// const void*					pNext;
+				(VkPipelineShaderStageCreateFlags)0,
+				VK_SHADER_STAGE_FRAGMENT_BIT,								// VkShaderStage				stage;
+				*m_fragmentShaderModule,									// VkShader						shader;
+				"main",
+				DE_NULL														// const VkSpecializationInfo*	pSpecializationInfo;
+			}
+		};
+
+		// Add test case specific attributes
+		if (m_attribFunc)
+			m_attribFunc(*this, quadGrid.getNumVertices());
+
+		// Add base attributes
+		setupDefaultInputs(quadGrid);
+
+		const VkPipelineVertexInputStateCreateInfo		vertexInputStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			(VkPipelineVertexInputStateCreateFlags)0,
+			(deUint32)m_vertexBindingDescription.size(),					// deUint32									bindingCount;
+			&m_vertexBindingDescription[0],									// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+			(deUint32)m_vertexattributeDescription.size(),					// deUint32									attributeCount;
+			&m_vertexattributeDescription[0],								// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+		};
+
+		const VkPipelineInputAssemblyStateCreateInfo	inputAssemblyStateParams	=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,														// const void*			pNext;
+			(VkPipelineInputAssemblyStateCreateFlags)0,
+			VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// VkPrimitiveTopology	topology;
+			false															// VkBool32				primitiveRestartEnable;
+		};
+
+		const VkViewport								viewport					=
+		{
+			0.0f,						// float	originX;
+			0.0f,						// float	originY;
+			(float)m_renderSize.x(),	// float	width;
+			(float)m_renderSize.y(),	// float	height;
+			0.0f,						// float	minDepth;
+			1.0f						// float	maxDepth;
+		};
+
+		const VkRect2D									scissor						=
+		{
+			{
+				0u,					// deUint32	x;
+				0u,					// deUint32	y;
+			},							// VkOffset2D	offset;
+			{
+				m_renderSize.x(),	// deUint32	width;
+				m_renderSize.y(),	// deUint32	height;
+			},							// VkExtent2D	extent;
+		};
+
+		const VkPipelineViewportStateCreateInfo			viewportStateParams			=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// VkStructureType		sType;
+			DE_NULL,														// const void*			pNext;
+			(VkPipelineViewportStateCreateFlags)0,
+			1u,																// deUint32				viewportCount;
+			&viewport,														// const VkViewport*	pViewports;
+			1u,																// deUint32				scissorsCount;
+			&scissor,														// const VkRect2D*		pScissors;
+		};
+
+		const VkPipelineRasterizationStateCreateInfo	rasterStateParams			=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// VkStructureType	sType;
+			DE_NULL,														// const void*		pNext;
+			(VkPipelineRasterizationStateCreateFlags)0,
+			false,															// VkBool32			depthClipEnable;
+			false,															// VkBool32			rasterizerDiscardEnable;
+			VK_POLYGON_MODE_FILL,											// VkFillMode		fillMode;
+			VK_CULL_MODE_NONE,												// VkCullMode		cullMode;
+			VK_FRONT_FACE_COUNTER_CLOCKWISE,								// VkFrontFace		frontFace;
+			false,															// VkBool32			depthBiasEnable;
+			0.0f,															// float			depthBias;
+			0.0f,															// float			depthBiasClamp;
+			0.0f,															// float			slopeScaledDepthBias;
+			1.0f,															// float			lineWidth;
+		};
+
+		const VkPipelineMultisampleStateCreateInfo		multisampleStateParams =
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// VkStructureType							sType;
+			DE_NULL,														// const void*								pNext;
+			0u,																// VkPipelineMultisampleStateCreateFlags	flags;
+			VK_SAMPLE_COUNT_1_BIT,											// VkSampleCountFlagBits					rasterizationSamples;
+			VK_FALSE,														// VkBool32									sampleShadingEnable;
+			0.0f,															// float									minSampleShading;
+			DE_NULL,														// const VkSampleMask*						pSampleMask;
+			VK_FALSE,														// VkBool32									alphaToCoverageEnable;
+			VK_FALSE														// VkBool32									alphaToOneEnable;
+		};
+
+		const VkPipelineColorBlendAttachmentState		colorBlendAttachmentState	=
+		{
+			false,															// VkBool32			blendEnable;
+			VK_BLEND_FACTOR_ONE,											// VkBlend			srcBlendColor;
+			VK_BLEND_FACTOR_ZERO,											// VkBlend			destBlendColor;
+			VK_BLEND_OP_ADD,												// VkBlendOp		blendOpColor;
+			VK_BLEND_FACTOR_ONE,											// VkBlend			srcBlendAlpha;
+			VK_BLEND_FACTOR_ZERO,											// VkBlend			destBlendAlpha;
+			VK_BLEND_OP_ADD,												// VkBlendOp		blendOpAlpha;
+			(VK_COLOR_COMPONENT_R_BIT |
+			 VK_COLOR_COMPONENT_G_BIT |
+			 VK_COLOR_COMPONENT_B_BIT |
+			 VK_COLOR_COMPONENT_A_BIT),										// VkChannelFlags	channelWriteMask;
+		};
+
+		const VkPipelineColorBlendStateCreateInfo		colorBlendStateParams		=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+			DE_NULL,													// const void*									pNext;
+			(VkPipelineColorBlendStateCreateFlags)0,
+			false,														// VkBool32										logicOpEnable;
+			VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+			1u,															// deUint32										attachmentCount;
+			&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+			{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConst[4];
+		};
+
+		const VkPipelineDynamicStateCreateInfo			dynamicStateInfo			=
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,		// VkStructureType				sType;
+			DE_NULL,													// const void*					pNext;
+			(VkPipelineDynamicStateCreateFlags)0,
+			0u,															// deUint32						dynamicStateCount;
+			DE_NULL														// const VkDynamicState*		pDynamicStates;
+		};
+
+		const VkGraphicsPipelineCreateInfo				graphicsPipelineParams		=
+		{
+			VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+			DE_NULL,											// const void*										pNext;
+			0u,													// VkPipelineCreateFlags							flags;
+			2u,													// deUint32											stageCount;
+			shaderStageParams,									// const VkPipelineShaderStageCreateInfo*			pStages;
+			&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+			&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+			DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+			&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+			&rasterStateParams,									// const VkPipelineRasterStateCreateInfo*			pRasterState;
+			&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+			DE_NULL,											// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+			&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+			&dynamicStateInfo,									// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+			*m_pipelineLayout,									// VkPipelineLayout									layout;
+			*m_renderPass,										// VkRenderPass										renderPass;
+			0u,													// deUint32											subpass;
+			0u,													// VkPipeline										basePipelineHandle;
+			0u													// deInt32											basePipelineIndex;
+		};
+
+		m_graphicsPipeline = createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+	}
+
+	// Create vertex indices buffer
+	{
+		const VkDeviceSize								indiceBufferSize			= quadGrid.getNumTriangles() * 3 * sizeof(deUint16);
+		const VkBufferCreateInfo						indiceBufferParams			=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkBufferCreateFlags	flags;
+			indiceBufferSize,							// VkDeviceSize			size;
+			VK_BUFFER_USAGE_INDEX_BUFFER_BIT,			// VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					// VkSharingMode		sharingMode;
+			1u,											// deUint32				queueFamilyCount;
+			&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+		};
+
+		m_indiceBuffer		= createBuffer(vk, vkDevice, &indiceBufferParams);
+		m_indiceBufferAlloc	= m_memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *m_indiceBuffer), MemoryRequirement::HostVisible);
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *m_indiceBuffer, m_indiceBufferAlloc->getMemory(), m_indiceBufferAlloc->getOffset()));
+
+		// Load vertice indices into buffer
+		deMemcpy(m_indiceBufferAlloc->getHostPtr(), quadGrid.getIndices(), (size_t)indiceBufferSize);
+		flushMappedMemoryRange(vk, vkDevice, m_indiceBufferAlloc->getMemory(), m_indiceBufferAlloc->getOffset(), indiceBufferSize);
+	}
+
+	// Create command pool
+	{
+		const VkCommandPoolCreateInfo					cmdPoolParams				=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// VkStructureType		sType;
+			DE_NULL,										// const void*			pNext;
+			VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,			// VkCmdPoolCreateFlags	flags;
+			queueFamilyIndex,								// deUint32				queueFamilyIndex;
+		};
+
+		m_cmdPool = createCommandPool(vk, vkDevice, &cmdPoolParams);
+	}
+
+	// Create command buffer
+	{
+		const VkCommandBufferAllocateInfo				cmdBufferParams				=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo					cmdBufferBeginInfo			=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkCmdBufferOptimizeFlags	flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const VkClearValue								clearValues					= makeClearValueColorF32(m_clearColor.x(),
+																											 m_clearColor.y(),
+																											 m_clearColor.z(),
+																											 m_clearColor.w());
+
+		const VkRenderPassBeginInfo						renderPassBeginInfo			=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,				// VkStructureType		sType;
+			DE_NULL,												// const void*			pNext;
+			*m_renderPass,											// VkRenderPass			renderPass;
+			*m_framebuffer,											// VkFramebuffer		framebuffer;
+			{ { 0, 0 },  {m_renderSize.x(), m_renderSize.y() } },	// VkRect2D				renderArea;
+			1,														// deUint32				clearValueCount;
+			&clearValues,											// const VkClearValue*	pClearValues;
+		};
+
+		m_cmdBuffer = allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+		VK_CHECK(vk.beginCommandBuffer(*m_cmdBuffer, &cmdBufferBeginInfo));
+
+		// Add texture barriers
+		std::vector<VkImageMemoryBarrier> barriers;
+
+		for(deUint32 i = 0; i < m_uniformInfos.size(); i++)
+		{
+			const UniformInfo* uniformInfo = m_uniformInfos[i].get()->get();
+
+			if (uniformInfo->type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
+			{
+				continue;
+			}
+
+			const SamplerUniform*		sampler			= static_cast<const SamplerUniform*>(uniformInfo);
+
+			const VkAccessFlags			outputMask		= VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
+			const VkImageMemoryBarrier	textureBarrier	= createImageMemoryBarrier(sampler->image->get(), outputMask, 0u, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
+
+			barriers.push_back(textureBarrier);
+		}
+
+		vk.cmdPipelineBarrier(*m_cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, (VkDependencyFlags)0,
+							  0, (const VkMemoryBarrier*)DE_NULL,
+							  0, (const VkBufferMemoryBarrier*)DE_NULL,
+							  (deUint32)barriers.size(), (barriers.empty() ? (const VkImageMemoryBarrier*)DE_NULL : &barriers[0]));
+
+		vk.cmdBeginRenderPass(*m_cmdBuffer, &renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
+
+		vk.cmdBindPipeline(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_graphicsPipeline);
+		if (!m_uniformInfos.empty())
+			vk.cmdBindDescriptorSets(*m_cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1, &*m_descriptorSet, 0u, DE_NULL);
+		vk.cmdBindIndexBuffer(*m_cmdBuffer, *m_indiceBuffer, 0, VK_INDEX_TYPE_UINT16);
+
+		const deUint32 numberOfVertexAttributes = (deUint32)m_vertexBuffers.size();
+		const std::vector<VkDeviceSize> offsets(numberOfVertexAttributes, 0);
+
+		std::vector<VkBuffer> buffers(numberOfVertexAttributes);
+		for (size_t i = 0; i < numberOfVertexAttributes; i++)
+		{
+			buffers[i] = m_vertexBuffers[i].get()->get();
+		}
+
+		vk.cmdBindVertexBuffers(*m_cmdBuffer, 0, numberOfVertexAttributes, &buffers[0], &offsets[0]);
+		vk.cmdDrawIndexed(*m_cmdBuffer, quadGrid.getNumTriangles() * 3, 1, 0, 0, 0);
+
+		vk.cmdEndRenderPass(*m_cmdBuffer);
+		VK_CHECK(vk.endCommandBuffer(*m_cmdBuffer));
+	}
+
+	// Create fence
+	{
+		const VkFenceCreateInfo							fenceParams					=
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,								// const void*			pNext;
+			0u										// VkFenceCreateFlags	flags;
+		};
+		m_fence = createFence(vk, vkDevice, &fenceParams);
+	}
+
+	// Execute Draw
+	{
+		const VkSubmitInfo	submitInfo	=
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const VkSemaphore*)DE_NULL,
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&m_cmdBuffer.get(),
+			0u,
+			(const VkSemaphore*)DE_NULL,
+		};
+
+		VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+		VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity*/));
+	}
+
+	// Read back the result
+	{
+		const VkDeviceSize								imageSizeBytes				= (VkDeviceSize)(sizeof(deUint32) * m_renderSize.x() * m_renderSize.y());
+		const VkBufferCreateInfo						readImageBufferParams		=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		//  VkStructureType		sType;
+			DE_NULL,									//  const void*			pNext;
+			0u,											//  VkBufferCreateFlags	flags;
+			imageSizeBytes,								//  VkDeviceSize		size;
+			VK_BUFFER_USAGE_TRANSFER_DST_BIT,			//  VkBufferUsageFlags	usage;
+			VK_SHARING_MODE_EXCLUSIVE,					//  VkSharingMode		sharingMode;
+			1u,											//  deUint32			queueFamilyCount;
+			&queueFamilyIndex,							//  const deUint32*		pQueueFamilyIndices;
+		};
+		const Unique<VkBuffer>							readImageBuffer				(createBuffer(vk, vkDevice, &readImageBufferParams));
+		const de::UniquePtr<Allocation>					readImageBufferMemory		(m_memAlloc.allocate(getBufferMemoryRequirements(vk, vkDevice, *readImageBuffer), MemoryRequirement::HostVisible));
+
+		VK_CHECK(vk.bindBufferMemory(vkDevice, *readImageBuffer, readImageBufferMemory->getMemory(), readImageBufferMemory->getOffset()));
+
+		// Copy image to buffer
+		const VkCommandBufferAllocateInfo				cmdBufferParams				=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			*m_cmdPool,										// VkCmdPool				cmdPool;
+			VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel			level;
+			1u												// deUint32					bufferCount;
+		};
+
+		const VkCommandBufferBeginInfo					cmdBufferBeginInfo			=
+		{
+			VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext;
+			0u,												// VkCmdBufferOptimizeFlags	flags;
+			(const VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		const Move<VkCommandBuffer>						cmdBuffer					= allocateCommandBuffer(vk, vkDevice, &cmdBufferParams);
+
+		const VkBufferImageCopy							copyParams					=
+		{
+			0u,											// VkDeviceSize			bufferOffset;
+			(deUint32)m_renderSize.x(),					// deUint32				bufferRowLength;
+			(deUint32)m_renderSize.y(),					// deUint32				bufferImageHeight;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,			// VkImageAspect		aspect;
+				0u,									// deUint32				mipLevel;
+				0u,									// deUint32				arraySlice;
+				1u,									// deUint32				arraySize;
+			},											// VkImageSubresourceCopy	imageSubresource;
+			{ 0u, 0u, 0u },								// VkOffset3D			imageOffset;
+			{ m_renderSize.x(), m_renderSize.y(), 1u }	// VkExtent3D			imageExtent;
+		};
+		const VkSubmitInfo								submitInfo					=
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const VkSemaphore*)DE_NULL,
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmdBuffer.get(),
+			0u,
+			(const VkSemaphore*)DE_NULL,
+		};
+
+		VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+		vk.cmdCopyImageToBuffer(*cmdBuffer, *m_colorImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *readImageBuffer, 1u, &copyParams);
+		VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+		VK_CHECK(vk.resetFences(vkDevice, 1, &m_fence.get()));
+		VK_CHECK(vk.queueSubmit(queue, 1, &submitInfo, *m_fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1, &m_fence.get(), true, ~(0ull) /* infinity */));
+
+		invalidateMappedMemoryRange(vk, vkDevice, readImageBufferMemory->getMemory(), readImageBufferMemory->getOffset(), imageSizeBytes);
+
+		const tcu::TextureFormat						resultFormat				(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8);
+		const tcu::ConstPixelBufferAccess				resultAccess				(resultFormat, m_renderSize.x(), m_renderSize.y(), 1, readImageBufferMemory->getHostPtr());
+
+		tcu::copy(result.getAccess(), resultAccess);
+	}
+}
+
+void ShaderRenderCaseInstance::computeVertexReference (tcu::Surface& result, const QuadGrid& quadGrid)
+{
+	// Buffer info.
+	const int				width		= result.getWidth();
+	const int				height		= result.getHeight();
+	const int				gridSize	= quadGrid.getGridSize();
+	const int				stride		= gridSize + 1;
+	const bool				hasAlpha	= true; // \todo [2015-09-07 elecro] add correct alpha check
+	ShaderEvalContext		evalCtx		(quadGrid);
+
+	// Evaluate color for each vertex.
+	std::vector<tcu::Vec4>	colors		((gridSize + 1) * (gridSize + 1));
+	for (int y = 0; y < gridSize+1; y++)
+	for (int x = 0; x < gridSize+1; x++)
+	{
+		const float	sx			= (float)x / (float)gridSize;
+		const float	sy			= (float)y / (float)gridSize;
+		const int	vtxNdx		= ((y * (gridSize+1)) + x);
+
+		evalCtx.reset(sx, sy);
+		m_evaluator.evaluate(evalCtx);
+		DE_ASSERT(!evalCtx.isDiscarded); // Discard is not available in vertex shader.
+		tcu::Vec4 color = evalCtx.color;
+
+		if (!hasAlpha)
+			color.w() = 1.0f;
+
+		colors[vtxNdx] = color;
+	}
+
+	// Render quads.
+	for (int y = 0; y < gridSize; y++)
+	for (int x = 0; x < gridSize; x++)
+	{
+		const float		x0		= (float)x       / (float)gridSize;
+		const float		x1		= (float)(x + 1) / (float)gridSize;
+		const float		y0		= (float)y       / (float)gridSize;
+		const float		y1		= (float)(y + 1) / (float)gridSize;
+
+		const float		sx0		= x0 * (float)width;
+		const float		sx1		= x1 * (float)width;
+		const float		sy0		= y0 * (float)height;
+		const float		sy1		= y1 * (float)height;
+		const float		oosx	= 1.0f / (sx1 - sx0);
+		const float		oosy	= 1.0f / (sy1 - sy0);
+
+		const int		ix0		= deCeilFloatToInt32(sx0 - 0.5f);
+		const int		ix1		= deCeilFloatToInt32(sx1 - 0.5f);
+		const int		iy0		= deCeilFloatToInt32(sy0 - 0.5f);
+		const int		iy1		= deCeilFloatToInt32(sy1 - 0.5f);
+
+		const int		v00		= (y * stride) + x;
+		const int		v01		= (y * stride) + x + 1;
+		const int		v10		= ((y + 1) * stride) + x;
+		const int		v11		= ((y + 1) * stride) + x + 1;
+		const tcu::Vec4	c00		= colors[v00];
+		const tcu::Vec4	c01		= colors[v01];
+		const tcu::Vec4	c10		= colors[v10];
+		const tcu::Vec4	c11		= colors[v11];
+
+		//printf("(%d,%d) -> (%f..%f, %f..%f) (%d..%d, %d..%d)\n", x, y, sx0, sx1, sy0, sy1, ix0, ix1, iy0, iy1);
+
+		for (int iy = iy0; iy < iy1; iy++)
+		for (int ix = ix0; ix < ix1; ix++)
+		{
+			DE_ASSERT(deInBounds32(ix, 0, width));
+			DE_ASSERT(deInBounds32(iy, 0, height));
+
+			const float			sfx		= (float)ix + 0.5f;
+			const float			sfy		= (float)iy + 0.5f;
+			const float			fx1		= deFloatClamp((sfx - sx0) * oosx, 0.0f, 1.0f);
+			const float			fy1		= deFloatClamp((sfy - sy0) * oosy, 0.0f, 1.0f);
+
+			// Triangle quad interpolation.
+			const bool			tri		= fx1 + fy1 <= 1.0f;
+			const float			tx		= tri ? fx1 : (1.0f-fx1);
+			const float			ty		= tri ? fy1 : (1.0f-fy1);
+			const tcu::Vec4&	t0		= tri ? c00 : c11;
+			const tcu::Vec4&	t1		= tri ? c01 : c10;
+			const tcu::Vec4&	t2		= tri ? c10 : c01;
+			const tcu::Vec4		color	= t0 + (t1-t0)*tx + (t2-t0)*ty;
+
+			result.setPixel(ix, iy, tcu::RGBA(color));
+		}
+	}
+}
+
+void ShaderRenderCaseInstance::computeFragmentReference (tcu::Surface& result, const QuadGrid& quadGrid)
+{
+	// Buffer info.
+	const int			width		= result.getWidth();
+	const int			height		= result.getHeight();
+	const bool			hasAlpha	= true;  // \todo [2015-09-07 elecro] add correct alpha check
+	ShaderEvalContext	evalCtx		(quadGrid);
+
+	// Render.
+	for (int y = 0; y < height; y++)
+	for (int x = 0; x < width; x++)
+	{
+		const float sx = ((float)x + 0.5f) / (float)width;
+		const float sy = ((float)y + 0.5f) / (float)height;
+
+		evalCtx.reset(sx, sy);
+		m_evaluator.evaluate(evalCtx);
+		// Select either clear color or computed color based on discarded bit.
+		tcu::Vec4 color = evalCtx.isDiscarded ? m_clearColor : evalCtx.color;
+
+		if (!hasAlpha)
+			color.w() = 1.0f;
+
+		result.setPixel(x, y, tcu::RGBA(color));
+	}
+}
+
+bool ShaderRenderCaseInstance::compareImages (const tcu::Surface& resImage, const tcu::Surface& refImage, float errorThreshold)
+{
+	return tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ComparisonResult", "Image comparison result", refImage, resImage, errorThreshold, tcu::COMPARE_LOG_RESULT);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.hpp
new file mode 100644
index 0000000..3466066
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRender.hpp
@@ -0,0 +1,548 @@
+#ifndef _VKTSHADERRENDER_HPP
+#define _VKTSHADERRENDER_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan ShaderRenderCase
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTexture.hpp"
+#include "tcuSurface.hpp"
+
+#include "deMemory.h"
+#include "deSharedPtr.hpp"
+#include "deUniquePtr.hpp"
+
+#include "vkDefs.hpp"
+#include "vkPrograms.hpp"
+#include "vkRef.hpp"
+#include "vkMemUtil.hpp"
+#include "vkBuilderUtil.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+class LineStream
+{
+public:
+						LineStream		(int indent = 0)	{ m_indent = indent; }
+						~LineStream		(void)				{}
+
+	const char*			str				(void) const		{ m_string = m_stream.str(); return m_string.c_str(); }
+	LineStream&			operator<<		(const char* line)	{ for (int i = 0; i < m_indent; i++) { m_stream << "\t"; } m_stream << line << "\n"; return *this; }
+
+private:
+	int					m_indent;
+	std::ostringstream	m_stream;
+	mutable std::string	m_string;
+};
+
+class QuadGrid;
+class ShaderRenderCaseInstance;
+
+class TextureBinding
+{
+public:
+	enum Type
+	{
+		TYPE_NONE = 0,
+		TYPE_2D,
+		TYPE_CUBE_MAP,
+		TYPE_2D_ARRAY,
+		TYPE_3D,
+
+		TYPE_LAST
+	};
+
+										TextureBinding		(const tcu::Archive&	archive,
+															const char*				filename,
+															const Type				type,
+															const tcu::Sampler&		sampler);
+										~TextureBinding		(void);
+	Type								getType				(void) const { return m_type;		}
+	const tcu::Sampler&					getSampler			(void) const { return m_sampler;	}
+	const tcu::Texture2D&				get2D				(void) const { DE_ASSERT(getType() == TYPE_2D && m_binding.tex2D !=NULL); return *m_binding.tex2D; }
+
+private:
+										TextureBinding		(const TextureBinding&);	// not allowed!
+	TextureBinding&						operator=			(const TextureBinding&);	// not allowed!
+
+	static de::MovePtr<tcu::Texture2D>	loadTexture2D		(const tcu::Archive& archive, const char* filename);
+
+	Type								m_type;
+	tcu::Sampler						m_sampler;
+
+	union
+	{
+		const tcu::Texture2D*	tex2D;
+	} m_binding;
+};
+
+typedef de::SharedPtr<TextureBinding> TextureBindingSp;
+
+// ShaderEvalContext.
+
+class ShaderEvalContext
+{
+public:
+	// Limits.
+	enum
+	{
+		MAX_USER_ATTRIBS	= 4,
+		MAX_TEXTURES		= 4
+	};
+
+	struct ShaderSampler
+	{
+		tcu::Sampler				sampler;
+		const tcu::Texture2D*		tex2D;
+		const tcu::TextureCube*		texCube;
+		const tcu::Texture2DArray*	tex2DArray;
+		const tcu::Texture3D*		tex3D;
+
+		inline ShaderSampler (void)
+			: tex2D		(DE_NULL)
+			, texCube	(DE_NULL)
+			, tex2DArray(DE_NULL)
+			, tex3D		(DE_NULL)
+		{
+		}
+	};
+
+							ShaderEvalContext		(const QuadGrid& quadGrid);
+							~ShaderEvalContext		(void);
+
+	void					reset					(float sx, float sy);
+
+	// Inputs.
+	tcu::Vec4				coords;
+	tcu::Vec4				unitCoords;
+	tcu::Vec4				constCoords;
+
+	tcu::Vec4				in[MAX_USER_ATTRIBS];
+	ShaderSampler			textures[MAX_TEXTURES];
+
+	// Output.
+	tcu::Vec4				color;
+	bool					isDiscarded;
+
+	// Functions.
+	inline void				discard					(void)  { isDiscarded = true; }
+	tcu::Vec4				texture2D				(int unitNdx, const tcu::Vec2& coords);
+
+private:
+	const QuadGrid&			m_quadGrid;
+};
+
+typedef void (*ShaderEvalFunc) (ShaderEvalContext& c);
+
+inline void evalCoordsPassthroughX		(ShaderEvalContext& c) { c.color.x() = c.coords.x(); }
+inline void evalCoordsPassthroughXY		(ShaderEvalContext& c) { c.color.xy() = c.coords.swizzle(0,1); }
+inline void evalCoordsPassthroughXYZ	(ShaderEvalContext& c) { c.color.xyz() = c.coords.swizzle(0,1,2); }
+inline void evalCoordsPassthrough		(ShaderEvalContext& c) { c.color = c.coords; }
+inline void evalCoordsSwizzleWZYX		(ShaderEvalContext& c) { c.color = c.coords.swizzle(3,2,1,0); }
+
+// ShaderEvaluator
+// Either inherit a class with overridden evaluate() or just pass in an evalFunc.
+
+class ShaderEvaluator
+{
+public:
+							ShaderEvaluator			(void);
+							ShaderEvaluator			(const ShaderEvalFunc evalFunc);
+	virtual					~ShaderEvaluator		(void);
+
+	virtual void			evaluate				(ShaderEvalContext& ctx) const;
+
+private:
+							ShaderEvaluator			(const ShaderEvaluator&);   // not allowed!
+	ShaderEvaluator&		operator=				(const ShaderEvaluator&);   // not allowed!
+
+	const ShaderEvalFunc	m_evalFunc;
+};
+
+// UniformSetup
+
+typedef void (*UniformSetupFunc) (ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords);
+
+class UniformSetup
+{
+public:
+							UniformSetup			(void);
+							UniformSetup			(const UniformSetupFunc setup);
+	virtual					~UniformSetup			(void);
+	virtual void			setup					(ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) const;
+
+private:
+							UniformSetup			(const UniformSetup&);	// not allowed!
+	UniformSetup&			operator=				(const UniformSetup&);	// not allowed!
+
+	const UniformSetupFunc	m_setupFunc;
+};
+
+typedef void (*AttributeSetupFunc) (ShaderRenderCaseInstance& instance, deUint32 numVertices);
+
+class ShaderRenderCase : public vkt::TestCase
+{
+public:
+													ShaderRenderCase	(tcu::TestContext&			testCtx,
+																		 const std::string&			name,
+																		 const std::string&			description,
+																		 const bool					isVertexCase,
+																		 const ShaderEvalFunc		evalFunc,
+																		 const UniformSetup*		uniformSetup,
+																		 const AttributeSetupFunc	attribFunc);
+
+													ShaderRenderCase	(tcu::TestContext&			testCtx,
+																		 const std::string&			name,
+																		 const std::string&			description,
+																		 const bool					isVertexCase,
+																		 const ShaderEvaluator*		evaluator,
+																		 const UniformSetup*		uniformSetup,
+																		 const AttributeSetupFunc	attribFunc);
+
+
+	virtual											~ShaderRenderCase	(void);
+	virtual	void									initPrograms		(vk::SourceCollections& programCollection) const;
+	virtual	TestInstance*							createInstance		(Context& context) const;
+
+protected:
+	std::string										m_vertShaderSource;
+	std::string										m_fragShaderSource;
+
+	const bool										m_isVertexCase;
+	const de::UniquePtr<const ShaderEvaluator>		m_evaluator;
+	const de::UniquePtr<const UniformSetup>			m_uniformSetup;
+	const AttributeSetupFunc						m_attribFunc;
+};
+
+
+enum BaseUniformType
+{
+// Bool
+	UB_FALSE,
+	UB_TRUE,
+
+// BVec4
+	UB4_FALSE,
+	UB4_TRUE,
+
+// Integers
+	UI_ZERO,
+	UI_ONE,
+	UI_TWO,
+	UI_THREE,
+	UI_FOUR,
+	UI_FIVE,
+	UI_SIX,
+	UI_SEVEN,
+	UI_EIGHT,
+	UI_ONEHUNDREDONE,
+
+// IVec2
+	UI2_MINUS_ONE,
+	UI2_ZERO,
+	UI2_ONE,
+	UI2_TWO,
+	UI2_THREE,
+	UI2_FOUR,
+	UI2_FIVE,
+
+// IVec3
+	UI3_MINUS_ONE,
+	UI3_ZERO,
+	UI3_ONE,
+	UI3_TWO,
+	UI3_THREE,
+	UI3_FOUR,
+	UI3_FIVE,
+
+// IVec4
+	UI4_MINUS_ONE,
+	UI4_ZERO,
+	UI4_ONE,
+	UI4_TWO,
+	UI4_THREE,
+	UI4_FOUR,
+	UI4_FIVE,
+
+// Float
+	UF_ZERO,
+	UF_ONE,
+	UF_TWO,
+	UF_THREE,
+	UF_FOUR,
+	UF_FIVE,
+	UF_SIX,
+	UF_SEVEN,
+	UF_EIGHT,
+
+	UF_HALF,
+	UF_THIRD,
+	UF_FOURTH,
+	UF_FIFTH,
+	UF_SIXTH,
+	UF_SEVENTH,
+	UF_EIGHTH,
+
+// Vec2
+	UV2_MINUS_ONE,
+	UV2_ZERO,
+	UV2_ONE,
+	UV2_TWO,
+	UV2_THREE,
+
+	UV2_HALF,
+
+// Vec3
+	UV3_MINUS_ONE,
+	UV3_ZERO,
+	UV3_ONE,
+	UV3_TWO,
+	UV3_THREE,
+
+	UV3_HALF,
+
+// Vec4
+	UV4_MINUS_ONE,
+	UV4_ZERO,
+	UV4_ONE,
+	UV4_TWO,
+	UV4_THREE,
+
+	UV4_HALF,
+
+	UV4_BLACK,
+	UV4_GRAY,
+	UV4_WHITE
+};
+
+enum BaseAttributeType
+{
+// User attributes
+	A_IN0,
+	A_IN1,
+	A_IN2,
+	A_IN3,
+
+// Matrices
+	MAT2,
+	MAT2x3,
+	MAT2x4,
+	MAT3x2,
+	MAT3,
+	MAT3x4,
+	MAT4x2,
+	MAT4x3,
+	MAT4
+};
+
+// ShaderRenderCaseInstance.
+
+class ShaderRenderCaseInstance : public vkt::TestInstance
+{
+public:
+														ShaderRenderCaseInstance	(Context&					context,
+																					const bool					isVertexCase,
+																					const ShaderEvaluator&		evaluator,
+																					const UniformSetup&			uniformSetup,
+																					const AttributeSetupFunc	attribFunc);
+
+	virtual												~ShaderRenderCaseInstance	(void);
+	virtual tcu::TestStatus								iterate						(void);
+
+	void												addAttribute				(deUint32			bindingLocation,
+																					vk::VkFormat		format,
+																					deUint32			sizePerElement,
+																					deUint32			count,
+																					const void*			data);
+	void												useAttribute				(deUint32			bindingLocation,
+																					BaseAttributeType	type);
+
+	template<typename T>
+	void												addUniform					(deUint32				bindingLocation,
+																					vk::VkDescriptorType	descriptorType,
+																					const T&				data);
+	void												addUniform					(deUint32				bindingLocation,
+																					vk::VkDescriptorType	descriptorType,
+																					size_t					dataSize,
+																					const void*				data);
+	void												useUniform					(deUint32				bindingLocation,
+																					BaseUniformType			type);
+	void												useSampler2D				(deUint32				bindingLocation,
+																					deUint32				textureId);
+
+protected:
+	virtual void										setup						(void);
+	virtual void										setupUniforms				(const tcu::Vec4& constCoords);
+
+	const tcu::UVec2									getViewportSize				(void) const;
+
+	std::vector<tcu::Mat4>								m_userAttribTransforms;
+	const tcu::Vec4										m_clearColor;
+	std::vector<TextureBindingSp>						m_textures;
+
+	vk::Allocator&										m_memAlloc;
+
+private:
+
+	void												setupTextures				(void);
+	de::MovePtr<vk::Allocation>							uploadImage2D				(const tcu::Texture2D&			refTexture,
+																					 const vk::VkImage&				vkTexture);
+	vk::Move<vk::VkImage>								createImage2D				(const tcu::Texture2D&			texture,
+																					 const vk::VkFormat				format,
+																					 const vk::VkImageUsageFlags	usage,
+																					 const vk::VkImageTiling		tiling);
+	void												copyTilingImageToOptimal	(const vk::VkImage&				srcImage,
+																					 const vk::VkImage&				dstImage,
+																					 deUint32						width,
+																					 deUint32						height);
+
+	void												setupUniformData			(deUint32 bindingLocation, size_t size, const void* dataPtr);
+	void												setupDefaultInputs			(const QuadGrid& quadGrid);
+
+	void												render						(tcu::Surface& result, const QuadGrid& quadGrid);
+	void												computeVertexReference		(tcu::Surface& result, const QuadGrid& quadGrid);
+	void												computeFragmentReference	(tcu::Surface& result, const QuadGrid& quadGrid);
+	bool												compareImages				(const tcu::Surface&	resImage,
+																					 const tcu::Surface&	refImage,
+																					 float					errorThreshold);
+
+	const bool											m_isVertexCase;
+	const ShaderEvaluator&								m_evaluator;
+	const UniformSetup&									m_uniformSetup;
+	const AttributeSetupFunc							m_attribFunc;
+
+	struct EnabledBaseAttribute
+	{
+		deUint32			location;
+		BaseAttributeType	type;
+	};
+	std::vector<EnabledBaseAttribute>					m_enabledBaseAttributes;
+
+	const tcu::UVec2									m_renderSize;
+	const vk::VkFormat									m_colorFormat;
+
+	vk::Move<vk::VkImage>								m_colorImage;
+	de::MovePtr<vk::Allocation>							m_colorImageAlloc;
+	vk::Move<vk::VkImageView>							m_colorImageView;
+
+	vk::Move<vk::VkRenderPass>							m_renderPass;
+	vk::Move<vk::VkFramebuffer>							m_framebuffer;
+	vk::Move<vk::VkPipelineLayout>						m_pipelineLayout;
+	vk::Move<vk::VkPipeline>							m_graphicsPipeline;
+
+	vk::Move<vk::VkShaderModule>						m_vertexShaderModule;
+	vk::Move<vk::VkShaderModule>						m_fragmentShaderModule;
+
+	vk::Move<vk::VkBuffer>								m_indiceBuffer;
+	de::MovePtr<vk::Allocation>							m_indiceBufferAlloc;
+
+	vk::Move<vk::VkDescriptorSetLayout>					m_descriptorSetLayout;
+
+	vk::Move<vk::VkDescriptorPool>						m_descriptorPool;
+	vk::Move<vk::VkDescriptorSet>						m_descriptorSet;
+
+	vk::Move<vk::VkCommandPool>							m_cmdPool;
+	vk::Move<vk::VkCommandBuffer>						m_cmdBuffer;
+
+	vk::Move<vk::VkFence>								m_fence;
+
+	vk::DescriptorSetLayoutBuilder						m_descriptorSetLayoutBuilder;
+	vk::DescriptorPoolBuilder							m_descriptorPoolBuilder;
+	vk::DescriptorSetUpdateBuilder						m_descriptorSetUpdateBuilder;
+
+	typedef de::SharedPtr<vk::Unique<vk::VkBuffer> >		VkBufferSp;
+
+	typedef de::SharedPtr<vk::Unique<vk::VkImage> >			VkImageSp;
+	typedef de::SharedPtr<vk::Unique<vk::VkImageView> >		VkImageViewSp;
+	typedef de::SharedPtr<vk::Unique<vk::VkSampler> >		VkSamplerSp;
+	typedef de::SharedPtr<vk::Allocation>					AllocationSp;
+
+	class UniformInfo
+	{
+	public:
+									UniformInfo		(void) {}
+		virtual						~UniformInfo	(void) {}
+
+		vk::VkDescriptorType		type;
+		deUint32					location;
+	};
+
+	class BufferUniform : public UniformInfo
+	{
+	public:
+									BufferUniform	(void) {}
+		virtual						~BufferUniform	(void) {}
+
+		VkBufferSp					buffer;
+		AllocationSp				alloc;
+		vk::VkDescriptorBufferInfo	descriptor;
+	};
+
+	class SamplerUniform : public UniformInfo
+	{
+	public:
+									SamplerUniform	(void) {}
+		virtual						~SamplerUniform	(void) {}
+
+		VkImageSp					image;
+		VkImageViewSp				imageView;
+		VkSamplerSp					sampler;
+		AllocationSp				alloc;
+		vk::VkDescriptorImageInfo	descriptor;
+	};
+
+	typedef de::SharedPtr<de::UniquePtr<UniformInfo> >	UniformInfoSp;
+	std::vector<UniformInfoSp>							m_uniformInfos;
+
+	std::vector<vk::VkVertexInputBindingDescription>	m_vertexBindingDescription;
+	std::vector<vk::VkVertexInputAttributeDescription>	m_vertexattributeDescription;
+
+	std::vector<VkBufferSp>								m_vertexBuffers;
+	std::vector<AllocationSp>							m_vertexBufferAllocs;
+};
+
+template<typename T>
+void ShaderRenderCaseInstance::addUniform (deUint32 bindingLocation, vk::VkDescriptorType descriptorType, const T& data)
+{
+	addUniform(bindingLocation, descriptorType, sizeof(T), &data);
+}
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDER_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.cpp
new file mode 100644
index 0000000..cc606a5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.cpp
@@ -0,0 +1,404 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader discard statement tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderDiscardTests.hpp"
+#include "vktShaderRender.hpp"
+#include "tcuStringTemplate.hpp"
+#include "gluTexture.hpp"
+
+#include <string>
+
+using tcu::StringTemplate;
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+class SamplerUniformSetup : public UniformSetup
+{
+public:
+						SamplerUniformSetup			(bool useSampler)
+							: m_useSampler(useSampler)
+						{}
+
+	virtual void		setup						 (ShaderRenderCaseInstance& instance, const tcu::Vec4&) const
+						{
+							instance.useUniform(0u, UI_ONE);
+							instance.useUniform(1u, UI_TWO);
+							if (m_useSampler)
+								instance.useSampler2D(2u, 0u); // To the uniform binding location 2 bind the texture 0
+						}
+
+private:
+	const bool			m_useSampler;
+};
+
+
+class ShaderDiscardCaseInstance : public ShaderRenderCaseInstance
+{
+public:
+						ShaderDiscardCaseInstance	(Context&				context,
+													bool					isVertexCase,
+													const ShaderEvaluator&	evaluator,
+													const UniformSetup&		uniformSetup,
+													bool					usesTexture);
+	virtual				~ShaderDiscardCaseInstance	(void);
+};
+
+ShaderDiscardCaseInstance::ShaderDiscardCaseInstance (Context&					context,
+													 bool						isVertexCase,
+													 const ShaderEvaluator&		evaluator,
+													 const UniformSetup&		uniformSetup,
+													 bool						usesTexture)
+	: ShaderRenderCaseInstance	(context, isVertexCase, evaluator, uniformSetup, DE_NULL)
+{
+	if (usesTexture)
+	{
+		de::SharedPtr<TextureBinding> brickTexture(new TextureBinding(m_context.getTestContext().getArchive(),
+																	  "vulkan/data/brick.png",
+																	  TextureBinding::TYPE_2D,
+																	  tcu::Sampler(tcu::Sampler::CLAMP_TO_EDGE,
+																					tcu::Sampler::CLAMP_TO_EDGE,
+																					tcu::Sampler::CLAMP_TO_EDGE,
+																					tcu::Sampler::LINEAR,
+																					tcu::Sampler::LINEAR)));
+		m_textures.push_back(brickTexture);
+	}
+}
+
+ShaderDiscardCaseInstance::~ShaderDiscardCaseInstance (void)
+{
+}
+
+class ShaderDiscardCase : public ShaderRenderCase
+{
+public:
+							ShaderDiscardCase			(tcu::TestContext&		testCtx,
+														 const char*			name,
+														 const char*			description,
+														 const char*			shaderSource,
+														 const ShaderEvalFunc	evalFunc,
+														 bool					usesTexture);
+	virtual TestInstance*	createInstance				(Context& context) const
+							{
+								DE_ASSERT(m_evaluator != DE_NULL);
+								DE_ASSERT(m_uniformSetup != DE_NULL);
+								return new ShaderDiscardCaseInstance(context, m_isVertexCase, *m_evaluator, *m_uniformSetup, m_usesTexture);
+							}
+
+private:
+	const bool				m_usesTexture;
+};
+
+ShaderDiscardCase::ShaderDiscardCase (tcu::TestContext&		testCtx,
+									  const char*			name,
+									  const char*			description,
+									  const char*			shaderSource,
+									  const ShaderEvalFunc	evalFunc,
+									  bool					usesTexture)
+	: ShaderRenderCase	(testCtx, name, description, false, evalFunc, new SamplerUniformSetup(usesTexture), DE_NULL)
+	, m_usesTexture		(usesTexture)
+{
+	m_fragShaderSource	= shaderSource;
+	m_vertShaderSource	=
+		"#version 310 es\n"
+		"layout(location=0) in  highp   vec4 a_position;\n"
+		"layout(location=1) in  highp   vec4 a_coords;\n"
+		"layout(location=0) out mediump vec4 v_color;\n"
+		"layout(location=1) out mediump vec4 v_coords;\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    gl_Position = a_position;\n"
+		"    v_color = vec4(a_coords.xyz, 1.0);\n"
+		"    v_coords = a_coords;\n"
+		"}\n";
+}
+
+
+enum DiscardMode
+{
+	DISCARDMODE_ALWAYS = 0,
+	DISCARDMODE_NEVER,
+	DISCARDMODE_UNIFORM,
+	DISCARDMODE_DYNAMIC,
+	DISCARDMODE_TEXTURE,
+
+	DISCARDMODE_LAST
+};
+
+enum DiscardTemplate
+{
+	DISCARDTEMPLATE_MAIN_BASIC = 0,
+	DISCARDTEMPLATE_FUNCTION_BASIC,
+	DISCARDTEMPLATE_MAIN_STATIC_LOOP,
+	DISCARDTEMPLATE_MAIN_DYNAMIC_LOOP,
+	DISCARDTEMPLATE_FUNCTION_STATIC_LOOP,
+
+	DISCARDTEMPLATE_LAST
+};
+
+// Evaluation functions
+inline void evalDiscardAlways	(ShaderEvalContext& c) { c.discard(); }
+inline void evalDiscardNever	(ShaderEvalContext& c) { c.color.xyz() = c.coords.swizzle(0,1,2); }
+inline void evalDiscardDynamic	(ShaderEvalContext& c) { c.color.xyz() = c.coords.swizzle(0,1,2); if (c.coords.x()+c.coords.y() > 0.0f) c.discard(); }
+
+inline void evalDiscardTexture (ShaderEvalContext& c)
+{
+	c.color.xyz() = c.coords.swizzle(0,1,2);
+	if (c.texture2D(0, c.coords.swizzle(0,1) * 0.25f + 0.5f).x() < 0.7f)
+		c.discard();
+}
+
+static ShaderEvalFunc getEvalFunc (DiscardMode mode)
+{
+	switch (mode)
+	{
+		case DISCARDMODE_ALWAYS:	return evalDiscardAlways;
+		case DISCARDMODE_NEVER:		return evalDiscardNever;
+		case DISCARDMODE_UNIFORM:	return evalDiscardAlways;
+		case DISCARDMODE_DYNAMIC:	return evalDiscardDynamic;
+		case DISCARDMODE_TEXTURE:	return evalDiscardTexture;
+		default:
+			DE_ASSERT(DE_FALSE);
+			return evalDiscardAlways;
+	}
+}
+
+static const char* getTemplate (DiscardTemplate variant)
+{
+	#define GLSL_SHADER_TEMPLATE_HEADER \
+				"#version 310 es\n"	\
+				"layout(location = 0) in mediump vec4 v_color;\n"	\
+				"layout(location = 1) in mediump vec4 v_coords;\n"	\
+				"layout(location = 0) out mediump vec4 o_color;\n"	\
+				"layout(set = 0, binding = 2) uniform sampler2D    ut_brick;\n"	\
+				"layout(set = 0, binding = 0) uniform block0 { mediump int  ui_one; };\n\n"
+
+	switch (variant)
+	{
+		case DISCARDTEMPLATE_MAIN_BASIC:
+			return GLSL_SHADER_TEMPLATE_HEADER
+				   "void main (void)\n"
+				   "{\n"
+				   "    o_color = v_color;\n"
+				   "    ${DISCARD};\n"
+				   "}\n";
+
+		case DISCARDTEMPLATE_FUNCTION_BASIC:
+			return GLSL_SHADER_TEMPLATE_HEADER
+				   "void myfunc (void)\n"
+				   "{\n"
+				   "    ${DISCARD};\n"
+				   "}\n\n"
+				   "void main (void)\n"
+				   "{\n"
+				   "    o_color = v_color;\n"
+				   "    myfunc();\n"
+				   "}\n";
+
+		case DISCARDTEMPLATE_MAIN_STATIC_LOOP:
+			return GLSL_SHADER_TEMPLATE_HEADER
+				   "void main (void)\n"
+				   "{\n"
+				   "    o_color = v_color;\n"
+				   "    for (int i = 0; i < 2; i++)\n"
+				   "    {\n"
+				   "        if (i > 0)\n"
+				   "            ${DISCARD};\n"
+				   "    }\n"
+				   "}\n";
+
+		case DISCARDTEMPLATE_MAIN_DYNAMIC_LOOP:
+			return GLSL_SHADER_TEMPLATE_HEADER
+				   "layout(set = 0, binding = 1) uniform block1 { mediump int  ui_two; };\n\n"
+				   "void main (void)\n"
+				   "{\n"
+				   "    o_color = v_color;\n"
+				   "    for (int i = 0; i < ui_two; i++)\n"
+				   "    {\n"
+				   "        if (i > 0)\n"
+				   "            ${DISCARD};\n"
+				   "    }\n"
+				   "}\n";
+
+		case DISCARDTEMPLATE_FUNCTION_STATIC_LOOP:
+			return GLSL_SHADER_TEMPLATE_HEADER
+				   "void myfunc (void)\n"
+				   "{\n"
+				   "    for (int i = 0; i < 2; i++)\n"
+				   "    {\n"
+				   "        if (i > 0)\n"
+				   "            ${DISCARD};\n"
+				   "    }\n"
+				   "}\n\n"
+				   "void main (void)\n"
+				   "{\n"
+				   "    o_color = v_color;\n"
+				   "    myfunc();\n"
+				   "}\n";
+
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+
+	#undef GLSL_SHADER_TEMPLATE_HEADER
+}
+
+static const char* getTemplateName (DiscardTemplate variant)
+{
+	switch (variant)
+	{
+		case DISCARDTEMPLATE_MAIN_BASIC:			return "basic";
+		case DISCARDTEMPLATE_FUNCTION_BASIC:		return "function";
+		case DISCARDTEMPLATE_MAIN_STATIC_LOOP:		return "static_loop";
+		case DISCARDTEMPLATE_MAIN_DYNAMIC_LOOP:		return "dynamic_loop";
+		case DISCARDTEMPLATE_FUNCTION_STATIC_LOOP:	return "function_static_loop";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+static const char* getModeName (DiscardMode mode)
+{
+	switch (mode)
+	{
+		case DISCARDMODE_ALWAYS:	return "always";
+		case DISCARDMODE_NEVER:		return "never";
+		case DISCARDMODE_UNIFORM:	return "uniform";
+		case DISCARDMODE_DYNAMIC:	return "dynamic";
+		case DISCARDMODE_TEXTURE:	return "texture";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+static const char* getTemplateDesc (DiscardTemplate variant)
+{
+	switch (variant)
+	{
+		case DISCARDTEMPLATE_MAIN_BASIC:			return "main";
+		case DISCARDTEMPLATE_FUNCTION_BASIC:		return "function";
+		case DISCARDTEMPLATE_MAIN_STATIC_LOOP:		return "static loop";
+		case DISCARDTEMPLATE_MAIN_DYNAMIC_LOOP:		return "dynamic loop";
+		case DISCARDTEMPLATE_FUNCTION_STATIC_LOOP:	return "static loop in function";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+static const char* getModeDesc (DiscardMode mode)
+{
+	switch (mode)
+	{
+		case DISCARDMODE_ALWAYS:	return "Always discard";
+		case DISCARDMODE_NEVER:		return "Never discard";
+		case DISCARDMODE_UNIFORM:	return "Discard based on uniform value";
+		case DISCARDMODE_DYNAMIC:	return "Discard based on varying values";
+		case DISCARDMODE_TEXTURE:	return "Discard based on texture value";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+de::MovePtr<ShaderDiscardCase> makeDiscardCase (tcu::TestContext& testCtx, DiscardTemplate tmpl, DiscardMode mode)
+{
+	StringTemplate shaderTemplate(getTemplate(tmpl));
+
+	std::map<std::string, std::string> params;
+
+	switch (mode)
+	{
+		case DISCARDMODE_ALWAYS:	params["DISCARD"] = "discard";										break;
+		case DISCARDMODE_NEVER:		params["DISCARD"] = "if (false) discard";							break;
+		case DISCARDMODE_UNIFORM:	params["DISCARD"] = "if (ui_one > 0) discard";						break;
+		case DISCARDMODE_DYNAMIC:	params["DISCARD"] = "if (v_coords.x+v_coords.y > 0.0) discard";		break;
+		case DISCARDMODE_TEXTURE:	params["DISCARD"] = "if (texture(ut_brick, v_coords.xy*0.25+0.5).x < 0.7) discard";	break;
+		default:
+			DE_ASSERT(DE_FALSE);
+			break;
+	}
+
+	std::string name		= std::string(getTemplateName(tmpl)) + "_" + getModeName(mode);
+	std::string description	= std::string(getModeDesc(mode)) + " in " + getTemplateDesc(tmpl);
+
+	return de::MovePtr<ShaderDiscardCase>(new ShaderDiscardCase(testCtx, name.c_str(), description.c_str(), shaderTemplate.specialize(params).c_str(), getEvalFunc(mode), mode == DISCARDMODE_TEXTURE));
+}
+
+class ShaderDiscardTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderDiscardTests		(tcu::TestContext& textCtx);
+	virtual					~ShaderDiscardTests		(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderDiscardTests		(const ShaderDiscardTests&);		// not allowed!
+	ShaderDiscardTests&		operator=				(const ShaderDiscardTests&);		// not allowed!
+};
+
+ShaderDiscardTests::ShaderDiscardTests (tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "discard", "Discard statement tests")
+{
+}
+
+ShaderDiscardTests::~ShaderDiscardTests (void)
+{
+}
+
+void ShaderDiscardTests::init (void)
+{
+	for (int tmpl = 0; tmpl < DISCARDTEMPLATE_LAST; tmpl++)
+		for (int mode = 0; mode < DISCARDMODE_LAST; mode++)
+			addChild(makeDiscardCase(m_testCtx, (DiscardTemplate)tmpl, (DiscardMode)mode).release());
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createDiscardTests (tcu::TestContext& testCtx)
+{
+	return new ShaderDiscardTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.hpp
new file mode 100644
index 0000000..1bc4c98
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderDiscardTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERDISCARDTESTS_HPP
+#define _VKTSHADERRENDERDISCARDTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader discard statement tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createDiscardTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERDISCARDTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.cpp
new file mode 100644
index 0000000..58a4e2e
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.cpp
@@ -0,0 +1,1256 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader indexing (arrays, vector, matrices) tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderIndexingTests.hpp"
+#include "vktShaderRender.hpp"
+#include "gluShaderUtil.hpp"
+#include "tcuStringTemplate.hpp"
+
+#include <map>
+
+using namespace std;
+using namespace tcu;
+using namespace glu;
+
+namespace vkt
+{
+namespace sr
+{
+
+namespace
+{
+
+enum IndexAccessType
+{
+	INDEXACCESS_STATIC = 0,
+	INDEXACCESS_DYNAMIC,
+	INDEXACCESS_STATIC_LOOP,
+	INDEXACCESS_DYNAMIC_LOOP,
+
+	INDEXACCESS_LAST
+};
+
+static const char* getIndexAccessTypeName (IndexAccessType accessType)
+{
+	static const char* s_names[INDEXACCESS_LAST] =
+	{
+		"static",
+		"dynamic",
+		"static_loop",
+		"dynamic_loop"
+	};
+
+	DE_ASSERT(deInBounds32((int)accessType, 0, INDEXACCESS_LAST));
+	return s_names[(int)accessType];
+}
+
+enum VectorAccessType
+{
+	DIRECT = 0,
+	COMPONENT,
+	SUBSCRIPT_STATIC,
+	SUBSCRIPT_DYNAMIC,
+	SUBSCRIPT_STATIC_LOOP,
+	SUBSCRIPT_DYNAMIC_LOOP,
+
+	VECTORACCESS_LAST
+};
+
+static const char* getVectorAccessTypeName (VectorAccessType accessType)
+{
+	static const char* s_names[VECTORACCESS_LAST] =
+	{
+		"direct",
+		"component",
+		"static_subscript",
+		"dynamic_subscript",
+		"static_loop_subscript",
+		"dynamic_loop_subscript"
+	};
+
+	DE_ASSERT(deInBounds32((int)accessType, 0, VECTORACCESS_LAST));
+	return s_names[(int)accessType];
+}
+
+void evalArrayCoordsFloat		(ShaderEvalContext& c) { c.color.x()	= 1.875f * c.coords.x(); }
+void evalArrayCoordsVec2		(ShaderEvalContext& c) { c.color.xy()	= 1.875f * c.coords.swizzle(0,1); }
+void evalArrayCoordsVec3		(ShaderEvalContext& c) { c.color.xyz()	= 1.875f * c.coords.swizzle(0,1,2); }
+void evalArrayCoordsVec4		(ShaderEvalContext& c) { c.color		= 1.875f * c.coords; }
+
+static ShaderEvalFunc getArrayCoordsEvalFunc (DataType dataType)
+{
+	if (dataType == TYPE_FLOAT)				return evalArrayCoordsFloat;
+	else if (dataType == TYPE_FLOAT_VEC2)	return evalArrayCoordsVec2;
+	else if (dataType == TYPE_FLOAT_VEC3)	return evalArrayCoordsVec3;
+	else if (dataType == TYPE_FLOAT_VEC4)	return evalArrayCoordsVec4;
+
+	DE_FATAL("Invalid data type.");
+	return NULL;
+}
+
+void evalArrayUniformFloat		(ShaderEvalContext& c) { c.color.x()	= 1.875f * c.constCoords.x(); }
+void evalArrayUniformVec2		(ShaderEvalContext& c) { c.color.xy()	= 1.875f * c.constCoords.swizzle(0,1); }
+void evalArrayUniformVec3		(ShaderEvalContext& c) { c.color.xyz()	= 1.875f * c.constCoords.swizzle(0,1,2); }
+void evalArrayUniformVec4		(ShaderEvalContext& c) { c.color		= 1.875f * c.constCoords; }
+
+static ShaderEvalFunc getArrayUniformEvalFunc (DataType dataType)
+{
+	if (dataType == TYPE_FLOAT)				return evalArrayUniformFloat;
+	else if (dataType == TYPE_FLOAT_VEC2)	return evalArrayUniformVec2;
+	else if (dataType == TYPE_FLOAT_VEC3)	return evalArrayUniformVec3;
+	else if (dataType == TYPE_FLOAT_VEC4)	return evalArrayUniformVec4;
+
+	DE_FATAL("Invalid data type.");
+	return NULL;
+}
+
+static const char* getIntUniformName (int number)
+{
+	switch (number)
+	{
+		case 0:		return "ui_zero";
+		case 1:		return "ui_one";
+		case 2:		return "ui_two";
+		case 3:		return "ui_three";
+		case 4:		return "ui_four";
+		case 5:		return "ui_five";
+		case 6:		return "ui_six";
+		case 7:		return "ui_seven";
+		case 8:		return "ui_eight";
+		case 101:	return "ui_oneHundredOne";
+		default:
+			DE_ASSERT(false);
+			return "";
+	}
+}
+
+class IndexingTestUniformSetup : public UniformSetup
+{
+public:
+							IndexingTestUniformSetup	(const DataType varType, bool usesArray)
+								: m_varType(varType)
+								, m_usesArray(usesArray)
+							{}
+	virtual					~IndexingTestUniformSetup	(void)
+							{}
+
+	virtual void			setup						(ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) const;
+
+private:
+	const DataType			m_varType;
+	const bool				m_usesArray;
+};
+
+void IndexingTestUniformSetup::setup (ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) const
+{
+	instance.useUniform(0u, UI_ZERO);
+	instance.useUniform(1u, UI_ONE);
+	instance.useUniform(2u, UI_TWO);
+	instance.useUniform(3u, UI_THREE);
+	instance.useUniform(4u, UI_FOUR);
+
+	if (m_usesArray)
+	{
+		Vec4 arr[4];
+		if (m_varType == TYPE_FLOAT)
+		{
+			arr[0] = Vec4(constCoords.x());
+			arr[1] = Vec4(constCoords.x() * 0.5f);
+			arr[2] = Vec4(constCoords.x() * 0.25f);
+			arr[3] = Vec4(constCoords.x() * 0.125f);
+		}
+		else if (m_varType == TYPE_FLOAT_VEC2)
+		{
+			arr[0] = constCoords.swizzle(0, 1).toWidth<4>();
+			arr[1] = (constCoords.swizzle(0, 1) * 0.5f).toWidth<4>();
+			arr[2] = (constCoords.swizzle(0, 1) * 0.25f).toWidth<4>();
+			arr[3] = (constCoords.swizzle(0, 1) * 0.125f).toWidth<4>();
+		}
+		else if (m_varType == TYPE_FLOAT_VEC3)
+		{
+			arr[0] = constCoords.swizzle(0, 1, 2).toWidth<4>();
+			arr[1] = (constCoords.swizzle(0, 1, 2) * 0.5f).toWidth<4>();
+			arr[2] = (constCoords.swizzle(0, 1, 2) * 0.25f).toWidth<4>();
+			arr[3] = (constCoords.swizzle(0, 1, 2) * 0.125f).toWidth<4>();
+		}
+		else if (m_varType == TYPE_FLOAT_VEC4)
+		{
+			arr[0] = constCoords;
+			arr[1] = constCoords * 0.5f;
+			arr[2] = constCoords * 0.25f;
+			arr[3] = constCoords * 0.125f;
+		}
+		else
+			throw tcu::TestError("invalid data type for u_arr");
+
+			instance.addUniform(5u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(Vec4) * 4, arr[0].getPtr());
+	}
+}
+
+// ShaderIndexingCase
+
+class ShaderIndexingCase : public ShaderRenderCase
+{
+public:
+								ShaderIndexingCase		(tcu::TestContext&			testCtx,
+														const std::string&			name,
+														const std::string&			description,
+														bool						isVertexCase,
+														const ShaderEvalFunc		evalFunc,
+														const std::string&			vertShaderSource,
+														const std::string&			fragShaderSource,
+														const DataType				varType,
+														const bool					usesArray);
+	virtual						~ShaderIndexingCase		(void);
+
+private:
+								ShaderIndexingCase		(const ShaderIndexingCase&);	// not allowed!
+	ShaderIndexingCase&			operator=				(const ShaderIndexingCase&);	// not allowed!
+};
+
+ShaderIndexingCase::ShaderIndexingCase (tcu::TestContext&			testCtx,
+										const std::string&			name,
+										const std::string&			description,
+										const bool					isVertexCase,
+										const ShaderEvalFunc		evalFunc,
+										const std::string&			vertShaderSource,
+										const std::string&			fragShaderSource,
+										const DataType				varType,
+										const bool					usesArray)
+	: ShaderRenderCase(testCtx, name, description, isVertexCase, evalFunc, new IndexingTestUniformSetup(varType, usesArray), DE_NULL)
+{
+	m_vertShaderSource	= vertShaderSource;
+	m_fragShaderSource	= fragShaderSource;
+}
+
+ShaderIndexingCase::~ShaderIndexingCase (void)
+{
+}
+
+// Test case builders.
+
+static de::MovePtr<ShaderIndexingCase> createVaryingArrayCase (tcu::TestContext&	context,
+															const std::string&		caseName,
+															const std::string&		description,
+															DataType				varType,
+															IndexAccessType			vertAccess,
+															IndexAccessType			fragAccess)
+{
+	std::ostringstream vtx;
+	vtx << "#version 310 es\n";
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	vtx << "layout(location = 1) in highp vec4 a_coords;\n";
+	if (vertAccess == INDEXACCESS_DYNAMIC)
+	{
+		vtx << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		vtx << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		vtx << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		vtx << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+	else if (vertAccess == INDEXACCESS_DYNAMIC_LOOP)
+		vtx << "layout(std140, binding = 4) uniform something { mediump int ui_four; };\n";
+	vtx << "layout(location = 0) out ${PRECISION} ${VAR_TYPE} var[${ARRAY_LEN}];\n";
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+	if (vertAccess == INDEXACCESS_STATIC)
+	{
+		vtx << "	var[0] = ${VAR_TYPE}(a_coords);\n";
+		vtx << "	var[1] = ${VAR_TYPE}(a_coords) * 0.5;\n";
+		vtx << "	var[2] = ${VAR_TYPE}(a_coords) * 0.25;\n";
+		vtx << "	var[3] = ${VAR_TYPE}(a_coords) * 0.125;\n";
+	}
+	else if (vertAccess == INDEXACCESS_DYNAMIC)
+	{
+		vtx << "	var[ui_zero]  = ${VAR_TYPE}(a_coords);\n";
+		vtx << "	var[ui_one]   = ${VAR_TYPE}(a_coords) * 0.5;\n";
+		vtx << "	var[ui_two]   = ${VAR_TYPE}(a_coords) * 0.25;\n";
+		vtx << "	var[ui_three] = ${VAR_TYPE}(a_coords) * 0.125;\n";
+	}
+	else if (vertAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		vtx << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(a_coords);\n";
+		vtx << "	for (int i = 0; i < 4; i++)\n";
+		vtx << "	{\n";
+		vtx << "		var[i] = ${VAR_TYPE}(coords);\n";
+		vtx << "		coords = coords * 0.5;\n";
+		vtx << "	}\n";
+	}
+	else
+	{
+		DE_ASSERT(vertAccess == INDEXACCESS_DYNAMIC_LOOP);
+		vtx << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(a_coords);\n";
+		vtx << "	for (int i = 0; i < ui_four; i++)\n";
+		vtx << "	{\n";
+		vtx << "		var[i] = ${VAR_TYPE}(coords);\n";
+		vtx << "		coords = coords * 0.5;\n";
+		vtx << "	}\n";
+	}
+	vtx << "}\n";
+
+	std::ostringstream frag;
+	frag << "#version 310 es\n";
+	frag << "precision mediump int;\n";
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+	if (fragAccess == INDEXACCESS_DYNAMIC)
+	{
+		frag << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		frag << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		frag << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		frag << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+	else if (fragAccess == INDEXACCESS_DYNAMIC_LOOP)
+		frag << "layout(std140, binding = 4) uniform something4 { mediump int ui_four; };\n";
+	frag << "layout(location = 0) in ${PRECISION} ${VAR_TYPE} var[${ARRAY_LEN}];\n";
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+	frag << "	${PRECISION} ${VAR_TYPE} res = ${VAR_TYPE}(0.0);\n";
+	if (fragAccess == INDEXACCESS_STATIC)
+	{
+		frag << "	res += var[0];\n";
+		frag << "	res += var[1];\n";
+		frag << "	res += var[2];\n";
+		frag << "	res += var[3];\n";
+	}
+	else if (fragAccess == INDEXACCESS_DYNAMIC)
+	{
+		frag << "	res += var[ui_zero];\n";
+		frag << "	res += var[ui_one];\n";
+		frag << "	res += var[ui_two];\n";
+		frag << "	res += var[ui_three];\n";
+	}
+	else if (fragAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		frag << "	for (int i = 0; i < 4; i++)\n";
+		frag << "		res += var[i];\n";
+	}
+	else
+	{
+		DE_ASSERT(fragAccess == INDEXACCESS_DYNAMIC_LOOP);
+		frag << "	for (int i = 0; i < ui_four; i++)\n";
+		frag << "		res += var[i];\n";
+	}
+	frag << "	o_color = vec4(res${PADDING});\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	map<string, string> params;
+	params.insert(pair<string, string>("VAR_TYPE", getDataTypeName(varType)));
+	params.insert(pair<string, string>("ARRAY_LEN", "4"));
+	params.insert(pair<string, string>("PRECISION", "mediump"));
+
+	if (varType == TYPE_FLOAT)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC2)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC3)
+		params.insert(pair<string, string>("PADDING", ", 1.0"));
+	else
+		params.insert(pair<string, string>("PADDING", ""));
+
+	StringTemplate vertTemplate(vtx.str());
+	StringTemplate fragTemplate(frag.str());
+	string vertexShaderSource = vertTemplate.specialize(params);
+	string fragmentShaderSource = fragTemplate.specialize(params);
+
+	ShaderEvalFunc evalFunc = getArrayCoordsEvalFunc(varType);
+	return de::MovePtr<ShaderIndexingCase>(new ShaderIndexingCase(context, caseName, description, true, evalFunc, vertexShaderSource, fragmentShaderSource, varType, false));
+}
+
+static de::MovePtr<ShaderIndexingCase> createUniformArrayCase (tcu::TestContext&	context,
+															const std::string&		caseName,
+															const std::string&		description,
+															bool					isVertexCase,
+															DataType				varType,
+															IndexAccessType			readAccess)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	vtx << "layout(location = 1) in highp vec4 a_coords;\n";
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_coords;\n";
+		frag << "layout(location = 0) in mediump vec4 v_coords;\n";
+	}
+
+	if (readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		op << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		op << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		op << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+	else if (readAccess == INDEXACCESS_DYNAMIC_LOOP)
+		op << "layout(std140, binding = 4) uniform something4 { mediump int ui_four; };\n";
+
+	op << "layout(std140, binding = 5) uniform something5 { ${PRECISION} ${VAR_TYPE} u_arr[${ARRAY_LEN}]; };\n";
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	// Read array.
+	op << "	${PRECISION} ${VAR_TYPE} res = ${VAR_TYPE}(0.0);\n";
+	if (readAccess == INDEXACCESS_STATIC)
+	{
+		op << "	res += u_arr[0];\n";
+		op << "	res += u_arr[1];\n";
+		op << "	res += u_arr[2];\n";
+		op << "	res += u_arr[3];\n";
+	}
+	else if (readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "	res += u_arr[ui_zero];\n";
+		op << "	res += u_arr[ui_one];\n";
+		op << "	res += u_arr[ui_two];\n";
+		op << "	res += u_arr[ui_three];\n";
+	}
+	else if (readAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < 4; i++)\n";
+		op << "		res += u_arr[i];\n";
+	}
+	else
+	{
+		DE_ASSERT(readAccess == INDEXACCESS_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < ui_four; i++)\n";
+		op << "		res += u_arr[i];\n";
+	}
+
+	if (isVertexCase)
+	{
+		vtx << "	v_color = vec4(res${PADDING});\n";
+		frag << "	o_color = v_color;\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res${PADDING});\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	map<string, string> params;
+	params.insert(pair<string, string>("VAR_TYPE", getDataTypeName(varType)));
+	params.insert(pair<string, string>("ARRAY_LEN", "4"));
+	params.insert(pair<string, string>("PRECISION", "mediump"));
+
+	if (varType == TYPE_FLOAT)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC2)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC3)
+		params.insert(pair<string, string>("PADDING", ", 1.0"));
+	else
+		params.insert(pair<string, string>("PADDING", ""));
+
+	StringTemplate vertTemplate(vtx.str());
+	StringTemplate fragTemplate(frag.str());
+	string vertexShaderSource = vertTemplate.specialize(params);
+	string fragmentShaderSource = fragTemplate.specialize(params);
+
+	ShaderEvalFunc evalFunc = getArrayUniformEvalFunc(varType);
+	return de::MovePtr<ShaderIndexingCase>(new ShaderIndexingCase(context, caseName, description, isVertexCase, evalFunc, vertexShaderSource, fragmentShaderSource, varType, true));
+}
+
+static de::MovePtr<ShaderIndexingCase> createTmpArrayCase (tcu::TestContext&	context,
+														const std::string&		caseName,
+														const std::string&		description,
+														bool					isVertexCase,
+														DataType				varType,
+														IndexAccessType			writeAccess,
+														IndexAccessType			readAccess)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	vtx << "layout(location = 1) in highp vec4 a_coords;\n";
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_coords;\n";
+		frag << "layout(location = 0) in mediump vec4 v_coords;\n";
+	}
+
+	if (writeAccess == INDEXACCESS_DYNAMIC || readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		op << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		op << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		op << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+
+	if (writeAccess == INDEXACCESS_DYNAMIC_LOOP || readAccess == INDEXACCESS_DYNAMIC_LOOP)
+		op << "layout(std140, binding = 4) uniform something4 { mediump int ui_four; };\n";
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	// Write array.
+	if (isVertexCase)
+		op << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(a_coords);\n";
+	else
+		op << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(v_coords);\n";
+
+	op << "	${PRECISION} ${VAR_TYPE} arr[${ARRAY_LEN}];\n";
+	if (writeAccess == INDEXACCESS_STATIC)
+	{
+		op << "	arr[0] = ${VAR_TYPE}(coords);\n";
+		op << "	arr[1] = ${VAR_TYPE}(coords) * 0.5;\n";
+		op << "	arr[2] = ${VAR_TYPE}(coords) * 0.25;\n";
+		op << "	arr[3] = ${VAR_TYPE}(coords) * 0.125;\n";
+	}
+	else if (writeAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "	arr[ui_zero]  = ${VAR_TYPE}(coords);\n";
+		op << "	arr[ui_one]   = ${VAR_TYPE}(coords) * 0.5;\n";
+		op << "	arr[ui_two]   = ${VAR_TYPE}(coords) * 0.25;\n";
+		op << "	arr[ui_three] = ${VAR_TYPE}(coords) * 0.125;\n";
+	}
+	else if (writeAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < 4; i++)\n";
+		op << "	{\n";
+		op << "		arr[i] = ${VAR_TYPE}(coords);\n";
+		op << "		coords = coords * 0.5;\n";
+		op << "	}\n";
+	}
+	else
+	{
+		DE_ASSERT(writeAccess == INDEXACCESS_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < ui_four; i++)\n";
+		op << "	{\n";
+		op << "		arr[i] = ${VAR_TYPE}(coords);\n";
+		op << "		coords = coords * 0.5;\n";
+		op << "	}\n";
+	}
+
+	// Read array.
+	op << "	${PRECISION} ${VAR_TYPE} res = ${VAR_TYPE}(0.0);\n";
+	if (readAccess == INDEXACCESS_STATIC)
+	{
+		op << "	res += arr[0];\n";
+		op << "	res += arr[1];\n";
+		op << "	res += arr[2];\n";
+		op << "	res += arr[3];\n";
+	}
+	else if (readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "	res += arr[ui_zero];\n";
+		op << "	res += arr[ui_one];\n";
+		op << "	res += arr[ui_two];\n";
+		op << "	res += arr[ui_three];\n";
+	}
+	else if (readAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < 4; i++)\n";
+		op << "		res += arr[i];\n";
+	}
+	else
+	{
+		DE_ASSERT(readAccess == INDEXACCESS_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < ui_four; i++)\n";
+		op << "		res += arr[i];\n";
+	}
+
+	if (isVertexCase)
+	{
+		vtx << "	v_color = vec4(res${PADDING});\n";
+		frag << "	o_color = v_color;\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res${PADDING});\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	map<string, string> params;
+	params.insert(pair<string, string>("VAR_TYPE", getDataTypeName(varType)));
+	params.insert(pair<string, string>("ARRAY_LEN", "4"));
+	params.insert(pair<string, string>("PRECISION", "mediump"));
+
+	if (varType == TYPE_FLOAT)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC2)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 1.0"));
+	else if (varType == TYPE_FLOAT_VEC3)
+		params.insert(pair<string, string>("PADDING", ", 1.0"));
+	else
+		params.insert(pair<string, string>("PADDING", ""));
+
+	StringTemplate vertTemplate(vtx.str());
+	StringTemplate fragTemplate(frag.str());
+	string vertexShaderSource = vertTemplate.specialize(params);
+	string fragmentShaderSource = fragTemplate.specialize(params);
+
+	ShaderEvalFunc evalFunc = getArrayCoordsEvalFunc(varType);
+	return de::MovePtr<ShaderIndexingCase>(new ShaderIndexingCase(context, caseName, description, isVertexCase, evalFunc, vertexShaderSource, fragmentShaderSource, varType, false));
+}
+
+// VECTOR SUBSCRIPT.
+
+void evalSubscriptVec2 (ShaderEvalContext& c) { c.color.xyz() = Vec3(c.coords.x() + 0.5f*c.coords.y()); }
+void evalSubscriptVec3 (ShaderEvalContext& c) { c.color.xyz() = Vec3(c.coords.x() + 0.5f*c.coords.y() + 0.25f*c.coords.z()); }
+void evalSubscriptVec4 (ShaderEvalContext& c) { c.color.xyz() = Vec3(c.coords.x() + 0.5f*c.coords.y() + 0.25f*c.coords.z() + 0.125f*c.coords.w()); }
+
+static ShaderEvalFunc getVectorSubscriptEvalFunc (DataType dataType)
+{
+	if (dataType == TYPE_FLOAT_VEC2)		return evalSubscriptVec2;
+	else if (dataType == TYPE_FLOAT_VEC3)	return evalSubscriptVec3;
+	else if (dataType == TYPE_FLOAT_VEC4)	return evalSubscriptVec4;
+
+	DE_FATAL("Invalid data type.");
+	return NULL;
+}
+
+static de::MovePtr<ShaderIndexingCase> createVectorSubscriptCase (tcu::TestContext&		context,
+																const std::string&		caseName,
+																const std::string&		description,
+																bool					isVertexCase,
+																DataType				varType,
+																VectorAccessType		writeAccess,
+																VectorAccessType		readAccess)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	int			vecLen		= getDataTypeScalarSize(varType);
+	const char*	vecLenName	= getIntUniformName(vecLen);
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	vtx << "layout(location = 1) in highp vec4 a_coords;\n";
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec3 v_color;\n";
+		frag << "layout(location = 0) in mediump vec3 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_coords;\n";
+		frag << "layout(location = 0) in mediump vec4 v_coords;\n";
+	}
+
+	if (writeAccess == SUBSCRIPT_DYNAMIC || readAccess == SUBSCRIPT_DYNAMIC)
+	{
+		op << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		if (vecLen >= 2) op << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		if (vecLen >= 3) op << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		if (vecLen >= 4) op << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+
+	if (writeAccess == SUBSCRIPT_DYNAMIC_LOOP || readAccess == SUBSCRIPT_DYNAMIC_LOOP)
+		op << "layout(std140, binding = " << vecLen << ") uniform something" << vecLen << " { mediump int " << vecLenName << "; };\n";
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	// Write vector.
+	if (isVertexCase)
+		op << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(a_coords);\n";
+	else
+		op << "	${PRECISION} ${VAR_TYPE} coords = ${VAR_TYPE}(v_coords);\n";
+
+	op << "	${PRECISION} ${VAR_TYPE} tmp;\n";
+	if (writeAccess == DIRECT)
+		op << "	tmp = coords.${SWIZZLE} * vec4(1.0, 0.5, 0.25, 0.125).${SWIZZLE};\n";
+	else if (writeAccess == COMPONENT)
+	{
+		op << "	tmp.x = coords.x;\n";
+		if (vecLen >= 2) op << "	tmp.y = coords.y * 0.5;\n";
+		if (vecLen >= 3) op << "	tmp.z = coords.z * 0.25;\n";
+		if (vecLen >= 4) op << "	tmp.w = coords.w * 0.125;\n";
+	}
+	else if (writeAccess == SUBSCRIPT_STATIC)
+	{
+		op << "	tmp[0] = coords.x;\n";
+		if (vecLen >= 2) op << "	tmp[1] = coords.y * 0.5;\n";
+		if (vecLen >= 3) op << "	tmp[2] = coords.z * 0.25;\n";
+		if (vecLen >= 4) op << "	tmp[3] = coords.w * 0.125;\n";
+	}
+	else if (writeAccess == SUBSCRIPT_DYNAMIC)
+	{
+		op << "	tmp[ui_zero]  = coords.x;\n";
+		if (vecLen >= 2) op << "	tmp[ui_one]   = coords.y * 0.5;\n";
+		if (vecLen >= 3) op << "	tmp[ui_two]   = coords.z * 0.25;\n";
+		if (vecLen >= 4) op << "	tmp[ui_three] = coords.w * 0.125;\n";
+	}
+	else if (writeAccess == SUBSCRIPT_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < " << vecLen << "; i++)\n";
+		op << "	{\n";
+		op << "		tmp[i] = coords.x;\n";
+		op << "		coords = coords.${ROT_SWIZZLE} * 0.5;\n";
+		op << "	}\n";
+	}
+	else
+	{
+		DE_ASSERT(writeAccess == SUBSCRIPT_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < " << vecLenName << "; i++)\n";
+		op << "	{\n";
+		op << "		tmp[i] = coords.x;\n";
+		op << "		coords = coords.${ROT_SWIZZLE} * 0.5;\n";
+		op << "	}\n";
+	}
+
+	// Read vector.
+	op << "	${PRECISION} float res = 0.0;\n";
+	if (readAccess == DIRECT)
+		op << "	res = dot(tmp, ${VAR_TYPE}(1.0));\n";
+	else if (readAccess == COMPONENT)
+	{
+		op << "	res += tmp.x;\n";
+		if (vecLen >= 2) op << "	res += tmp.y;\n";
+		if (vecLen >= 3) op << "	res += tmp.z;\n";
+		if (vecLen >= 4) op << "	res += tmp.w;\n";
+	}
+	else if (readAccess == SUBSCRIPT_STATIC)
+	{
+		op << "	res += tmp[0];\n";
+		if (vecLen >= 2) op << "	res += tmp[1];\n";
+		if (vecLen >= 3) op << "	res += tmp[2];\n";
+		if (vecLen >= 4) op << "	res += tmp[3];\n";
+	}
+	else if (readAccess == SUBSCRIPT_DYNAMIC)
+	{
+		op << "	res += tmp[ui_zero];\n";
+		if (vecLen >= 2) op << "	res += tmp[ui_one];\n";
+		if (vecLen >= 3) op << "	res += tmp[ui_two];\n";
+		if (vecLen >= 4) op << "	res += tmp[ui_three];\n";
+	}
+	else if (readAccess == SUBSCRIPT_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < " << vecLen << "; i++)\n";
+		op << "		res += tmp[i];\n";
+	}
+	else
+	{
+		DE_ASSERT(readAccess == SUBSCRIPT_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < " << vecLenName << "; i++)\n";
+		op << "		res += tmp[i];\n";
+	}
+
+	if (isVertexCase)
+	{
+		vtx << "	v_color = vec3(res);\n";
+		frag << "	o_color = vec4(v_color.rgb, 1.0);\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(vec3(res), 1.0);\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	static const char* s_swizzles[5]	= { "", "x", "xy", "xyz", "xyzw" };
+	static const char* s_rotSwizzles[5]	= { "", "x", "yx", "yzx", "yzwx" };
+
+	map<string, string> params;
+	params.insert(pair<string, string>("VAR_TYPE", getDataTypeName(varType)));
+	params.insert(pair<string, string>("PRECISION", "mediump"));
+	params.insert(pair<string, string>("SWIZZLE", s_swizzles[vecLen]));
+	params.insert(pair<string, string>("ROT_SWIZZLE", s_rotSwizzles[vecLen]));
+
+	StringTemplate vertTemplate(vtx.str());
+	StringTemplate fragTemplate(frag.str());
+	string vertexShaderSource = vertTemplate.specialize(params);
+	string fragmentShaderSource = fragTemplate.specialize(params);
+
+	ShaderEvalFunc evalFunc = getVectorSubscriptEvalFunc(varType);
+	return de::MovePtr<ShaderIndexingCase>(new ShaderIndexingCase(context, caseName, description, isVertexCase, evalFunc, vertexShaderSource, fragmentShaderSource, varType, false));
+}
+
+// MATRIX SUBSCRIPT.
+
+void evalSubscriptMat2		(ShaderEvalContext& c) { c.color.xy()	= c.coords.swizzle(0,1) + 0.5f*c.coords.swizzle(1,2); }
+void evalSubscriptMat2x3	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(0,1,2) + 0.5f*c.coords.swizzle(1,2,3); }
+void evalSubscriptMat2x4	(ShaderEvalContext& c) { c.color		= c.coords.swizzle(0,1,2,3) + 0.5f*c.coords.swizzle(1,2,3,0); }
+
+void evalSubscriptMat3x2	(ShaderEvalContext& c) { c.color.xy()	= c.coords.swizzle(0,1) + 0.5f*c.coords.swizzle(1,2) + 0.25f*c.coords.swizzle(2,3); }
+void evalSubscriptMat3		(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(0,1,2) + 0.5f*c.coords.swizzle(1,2,3) + 0.25f*c.coords.swizzle(2,3,0); }
+void evalSubscriptMat3x4	(ShaderEvalContext& c) { c.color		= c.coords.swizzle(0,1,2,3) + 0.5f*c.coords.swizzle(1,2,3,0) + 0.25f*c.coords.swizzle(2,3,0,1); }
+
+void evalSubscriptMat4x2	(ShaderEvalContext& c) { c.color.xy()	= c.coords.swizzle(0,1) + 0.5f*c.coords.swizzle(1,2) + 0.25f*c.coords.swizzle(2,3) + 0.125f*c.coords.swizzle(3,0); }
+void evalSubscriptMat4x3	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(0,1,2) + 0.5f*c.coords.swizzle(1,2,3) + 0.25f*c.coords.swizzle(2,3,0) + 0.125f*c.coords.swizzle(3,0,1); }
+void evalSubscriptMat4		(ShaderEvalContext& c) { c.color		= c.coords + 0.5f*c.coords.swizzle(1,2,3,0) + 0.25f*c.coords.swizzle(2,3,0,1) + 0.125f*c.coords.swizzle(3,0,1,2); }
+
+static ShaderEvalFunc getMatrixSubscriptEvalFunc (DataType dataType)
+{
+	switch (dataType)
+	{
+		case TYPE_FLOAT_MAT2:		return evalSubscriptMat2;
+		case TYPE_FLOAT_MAT2X3:		return evalSubscriptMat2x3;
+		case TYPE_FLOAT_MAT2X4:		return evalSubscriptMat2x4;
+		case TYPE_FLOAT_MAT3X2:		return evalSubscriptMat3x2;
+		case TYPE_FLOAT_MAT3:		return evalSubscriptMat3;
+		case TYPE_FLOAT_MAT3X4:		return evalSubscriptMat3x4;
+		case TYPE_FLOAT_MAT4X2:		return evalSubscriptMat4x2;
+		case TYPE_FLOAT_MAT4X3:		return evalSubscriptMat4x3;
+		case TYPE_FLOAT_MAT4:		return evalSubscriptMat4;
+
+		default:
+			DE_FATAL("Invalid data type.");
+			return DE_NULL;
+	}
+}
+
+static de::MovePtr<ShaderIndexingCase> createMatrixSubscriptCase (tcu::TestContext&		context,
+																const std::string&		caseName,
+																const std::string&		description,
+																bool					isVertexCase,
+																DataType				varType,
+																IndexAccessType			writeAccess,
+																IndexAccessType			readAccess)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	int			numCols		= getDataTypeMatrixNumColumns(varType);
+	int			numRows		= getDataTypeMatrixNumRows(varType);
+	const char*	matSizeName	= getIntUniformName(numCols);
+	DataType	vecType		= getDataTypeFloatVec(numRows);
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	vtx << "layout(location = 1) in highp vec4 a_coords;\n";
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_coords;\n";
+		frag << "layout(location = 0) in mediump vec4 v_coords;\n";
+	}
+
+	if (writeAccess == INDEXACCESS_DYNAMIC || readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "layout(std140, binding = 0) uniform something0 { mediump int ui_zero; };\n";
+		if (numCols >= 2) op << "layout(std140, binding = 1) uniform something1 { mediump int ui_one; };\n";
+		if (numCols >= 3) op << "layout(std140, binding = 2) uniform something2 { mediump int ui_two; };\n";
+		if (numCols >= 4) op << "layout(std140, binding = 3) uniform something3 { mediump int ui_three; };\n";
+	}
+
+	if (writeAccess == INDEXACCESS_DYNAMIC_LOOP || readAccess == INDEXACCESS_DYNAMIC_LOOP)
+		op << "layout(std140, binding = " << numCols << ") uniform something" << numCols << " { mediump int " << matSizeName << "; };\n";
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	// Write matrix.
+	if (isVertexCase)
+		op << "	${PRECISION} vec4 coords = a_coords;\n";
+	else
+		op << "	${PRECISION} vec4 coords = v_coords;\n";
+
+	op << "	${PRECISION} ${MAT_TYPE} tmp;\n";
+	if (writeAccess == INDEXACCESS_STATIC)
+	{
+		op << "	tmp[0] = ${VEC_TYPE}(coords);\n";
+		if (numCols >= 2) op << "	tmp[1] = ${VEC_TYPE}(coords.yzwx) * 0.5;\n";
+		if (numCols >= 3) op << "	tmp[2] = ${VEC_TYPE}(coords.zwxy) * 0.25;\n";
+		if (numCols >= 4) op << "	tmp[3] = ${VEC_TYPE}(coords.wxyz) * 0.125;\n";
+	}
+	else if (writeAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "	tmp[ui_zero]  = ${VEC_TYPE}(coords);\n";
+		if (numCols >= 2) op << "	tmp[ui_one]   = ${VEC_TYPE}(coords.yzwx) * 0.5;\n";
+		if (numCols >= 3) op << "	tmp[ui_two]   = ${VEC_TYPE}(coords.zwxy) * 0.25;\n";
+		if (numCols >= 4) op << "	tmp[ui_three] = ${VEC_TYPE}(coords.wxyz) * 0.125;\n";
+	}
+	else if (writeAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < " << numCols << "; i++)\n";
+		op << "	{\n";
+		op << "		tmp[i] = ${VEC_TYPE}(coords);\n";
+		op << "		coords = coords.yzwx * 0.5;\n";
+		op << "	}\n";
+	}
+	else
+	{
+		DE_ASSERT(writeAccess == INDEXACCESS_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < " << matSizeName << "; i++)\n";
+		op << "	{\n";
+		op << "		tmp[i] = ${VEC_TYPE}(coords);\n";
+		op << "		coords = coords.yzwx * 0.5;\n";
+		op << "	}\n";
+	}
+
+	// Read matrix.
+	op << "	${PRECISION} ${VEC_TYPE} res = ${VEC_TYPE}(0.0);\n";
+	if (readAccess == INDEXACCESS_STATIC)
+	{
+		op << "	res += tmp[0];\n";
+		if (numCols >= 2) op << "	res += tmp[1];\n";
+		if (numCols >= 3) op << "	res += tmp[2];\n";
+		if (numCols >= 4) op << "	res += tmp[3];\n";
+	}
+	else if (readAccess == INDEXACCESS_DYNAMIC)
+	{
+		op << "	res += tmp[ui_zero];\n";
+		if (numCols >= 2) op << "	res += tmp[ui_one];\n";
+		if (numCols >= 3) op << "	res += tmp[ui_two];\n";
+		if (numCols >= 4) op << "	res += tmp[ui_three];\n";
+	}
+	else if (readAccess == INDEXACCESS_STATIC_LOOP)
+	{
+		op << "	for (int i = 0; i < " << numCols << "; i++)\n";
+		op << "		res += tmp[i];\n";
+	}
+	else
+	{
+		DE_ASSERT(readAccess == INDEXACCESS_DYNAMIC_LOOP);
+		op << "	for (int i = 0; i < " << matSizeName << "; i++)\n";
+		op << "		res += tmp[i];\n";
+	}
+
+	if (isVertexCase)
+	{
+		vtx << "	v_color = vec4(res${PADDING});\n";
+		frag << "	o_color = v_color;\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res${PADDING});\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	map<string, string> params;
+	params.insert(pair<string, string>("MAT_TYPE", getDataTypeName(varType)));
+	params.insert(pair<string, string>("VEC_TYPE", getDataTypeName(vecType)));
+	params.insert(pair<string, string>("PRECISION", "mediump"));
+
+	if (numRows == 2)
+		params.insert(pair<string, string>("PADDING", ", 0.0, 1.0"));
+	else if (numRows == 3)
+		params.insert(pair<string, string>("PADDING", ", 1.0"));
+	else
+		params.insert(pair<string, string>("PADDING", ""));
+
+	StringTemplate vertTemplate(vtx.str());
+	StringTemplate fragTemplate(frag.str());
+	string vertexShaderSource = vertTemplate.specialize(params);
+	string fragmentShaderSource = fragTemplate.specialize(params);
+
+	ShaderEvalFunc evalFunc = getMatrixSubscriptEvalFunc(varType);
+	return de::MovePtr<ShaderIndexingCase>(new ShaderIndexingCase(context, caseName, description, isVertexCase, evalFunc, vertexShaderSource, fragmentShaderSource, varType, false));
+}
+
+// ShaderIndexingTests.
+
+class ShaderIndexingTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderIndexingTests		(tcu::TestContext& context);
+	virtual					~ShaderIndexingTests	(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderIndexingTests		(const ShaderIndexingTests&);		// not allowed!
+	ShaderIndexingTests&	operator=				(const ShaderIndexingTests&);		// not allowed!
+};
+
+ShaderIndexingTests::ShaderIndexingTests(tcu::TestContext& context)
+	: TestCaseGroup(context, "indexing", "Indexing Tests")
+{
+}
+
+ShaderIndexingTests::~ShaderIndexingTests (void)
+{
+}
+
+void ShaderIndexingTests::init (void)
+{
+	static const ShaderType s_shaderTypes[] =
+	{
+		SHADERTYPE_VERTEX,
+		SHADERTYPE_FRAGMENT
+	};
+
+	static const DataType s_floatAndVecTypes[] =
+	{
+		TYPE_FLOAT,
+		TYPE_FLOAT_VEC2,
+		TYPE_FLOAT_VEC3,
+		TYPE_FLOAT_VEC4
+	};
+
+	// Varying array access cases.
+	{
+		de::MovePtr<TestCaseGroup> varyingGroup(new TestCaseGroup(m_testCtx, "varying_array", "Varying array access tests."));
+
+		for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_floatAndVecTypes); typeNdx++)
+		{
+			DataType varType = s_floatAndVecTypes[typeNdx];
+			for (int vertAccess = 0; vertAccess < INDEXACCESS_LAST; vertAccess++)
+			{
+				for (int fragAccess = 0; fragAccess < INDEXACCESS_LAST; fragAccess++)
+				{
+					const char* vertAccessName = getIndexAccessTypeName((IndexAccessType)vertAccess);
+					const char* fragAccessName = getIndexAccessTypeName((IndexAccessType)fragAccess);
+					string name = string(getDataTypeName(varType)) + "_" + vertAccessName + "_write_" + fragAccessName + "_read";
+					string desc = string("Varying array with ") + vertAccessName + " write in vertex shader and " + fragAccessName + " read in fragment shader.";
+					de::MovePtr<ShaderIndexingCase> testCase(createVaryingArrayCase(m_testCtx, name, desc, varType, (IndexAccessType)vertAccess, (IndexAccessType)fragAccess));
+					varyingGroup->addChild(testCase.release());
+				}
+			}
+		}
+
+		addChild(varyingGroup.release());
+	}
+
+	// Uniform array access cases.
+	{
+		de::MovePtr<TestCaseGroup> uniformGroup(new TestCaseGroup(m_testCtx, "uniform_array", "Uniform array access tests."));
+
+		for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_floatAndVecTypes); typeNdx++)
+		{
+			DataType varType = s_floatAndVecTypes[typeNdx];
+			for (int readAccess = 0; readAccess < INDEXACCESS_LAST; readAccess++)
+			{
+				const char* readAccessName = getIndexAccessTypeName((IndexAccessType)readAccess);
+				for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+				{
+					ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+					const char*	shaderTypeName	= getShaderTypeName(shaderType);
+					string		name			= string(getDataTypeName(varType)) + "_" + readAccessName + "_read_" + shaderTypeName;
+					string		desc			= string("Uniform array with ") + readAccessName + " read in " + shaderTypeName + " shader.";
+					bool isVertexCase = ((ShaderType)shaderType == SHADERTYPE_VERTEX);
+					de::MovePtr<ShaderIndexingCase> testCase(createUniformArrayCase(m_testCtx, name, desc, isVertexCase, varType, (IndexAccessType)readAccess));
+					uniformGroup->addChild(testCase.release());
+				}
+			}
+		}
+
+		addChild(uniformGroup.release());
+	}
+
+	// Temporary array access cases.
+	{
+		de::MovePtr<TestCaseGroup> tmpGroup(new TestCaseGroup(m_testCtx, "tmp_array", "Temporary array access tests."));
+
+		for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_floatAndVecTypes); typeNdx++)
+		{
+			DataType varType = s_floatAndVecTypes[typeNdx];
+			for (int writeAccess = 0; writeAccess < INDEXACCESS_LAST; writeAccess++)
+			{
+				for (int readAccess = 0; readAccess < INDEXACCESS_LAST; readAccess++)
+				{
+					const char* writeAccessName = getIndexAccessTypeName((IndexAccessType)writeAccess);
+					const char* readAccessName = getIndexAccessTypeName((IndexAccessType)readAccess);
+
+					for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+					{
+						ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+						const char* shaderTypeName	= getShaderTypeName(shaderType);
+						string		name			= string(getDataTypeName(varType)) + "_" + writeAccessName + "_write_" + readAccessName + "_read_" + shaderTypeName;
+						string		desc			= string("Temporary array with ") + writeAccessName + " write and " + readAccessName + " read in " + shaderTypeName + " shader.";
+						bool		isVertexCase	= ((ShaderType)shaderType == SHADERTYPE_VERTEX);
+						de::MovePtr<ShaderIndexingCase> testCase(createTmpArrayCase(m_testCtx, name, desc, isVertexCase, varType, (IndexAccessType)writeAccess, (IndexAccessType)readAccess));
+						tmpGroup->addChild(testCase.release());
+					}
+				}
+			}
+		}
+
+		addChild(tmpGroup.release());
+	}
+
+	// Vector indexing with subscripts.
+	{
+		de::MovePtr<TestCaseGroup> vecGroup(new TestCaseGroup(m_testCtx, "vector_subscript", "Vector subscript indexing."));
+
+		static const DataType s_vectorTypes[] =
+		{
+			TYPE_FLOAT_VEC2,
+			TYPE_FLOAT_VEC3,
+			TYPE_FLOAT_VEC4
+		};
+
+		for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_vectorTypes); typeNdx++)
+		{
+			DataType varType = s_vectorTypes[typeNdx];
+			for (int writeAccess = 0; writeAccess < VECTORACCESS_LAST; writeAccess++)
+			{
+				for (int readAccess = 0; readAccess < VECTORACCESS_LAST; readAccess++)
+				{
+					const char* writeAccessName = getVectorAccessTypeName((VectorAccessType)writeAccess);
+					const char* readAccessName = getVectorAccessTypeName((VectorAccessType)readAccess);
+
+					for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+					{
+						ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+						const char* shaderTypeName	= getShaderTypeName(shaderType);
+						string		name			= string(getDataTypeName(varType)) + "_" + writeAccessName + "_write_" + readAccessName + "_read_" + shaderTypeName;
+						string		desc			= string("Vector subscript access with ") + writeAccessName + " write and " + readAccessName + " read in " + shaderTypeName + " shader.";
+						bool		isVertexCase	= ((ShaderType)shaderType == SHADERTYPE_VERTEX);
+						de::MovePtr<ShaderIndexingCase> testCase(createVectorSubscriptCase(m_testCtx, name.c_str(), desc.c_str(), isVertexCase, varType, (VectorAccessType)writeAccess, (VectorAccessType)readAccess));
+						vecGroup->addChild(testCase.release());
+					}
+				}
+			}
+		}
+
+		addChild(vecGroup.release());
+	}
+
+	// Matrix indexing with subscripts.
+	{
+		de::MovePtr<TestCaseGroup> matGroup(new TestCaseGroup(m_testCtx, "matrix_subscript", "Matrix subscript indexing."));
+
+		static const DataType s_matrixTypes[] =
+		{
+			TYPE_FLOAT_MAT2,
+			TYPE_FLOAT_MAT2X3,
+			TYPE_FLOAT_MAT2X4,
+			TYPE_FLOAT_MAT3X2,
+			TYPE_FLOAT_MAT3,
+			TYPE_FLOAT_MAT3X4,
+			TYPE_FLOAT_MAT4X2,
+			TYPE_FLOAT_MAT4X3,
+			TYPE_FLOAT_MAT4
+		};
+
+		for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_matrixTypes); typeNdx++)
+		{
+			DataType varType = s_matrixTypes[typeNdx];
+			for (int writeAccess = 0; writeAccess < INDEXACCESS_LAST; writeAccess++)
+			{
+				for (int readAccess = 0; readAccess < INDEXACCESS_LAST; readAccess++)
+				{
+					const char* writeAccessName = getIndexAccessTypeName((IndexAccessType)writeAccess);
+					const char* readAccessName = getIndexAccessTypeName((IndexAccessType)readAccess);
+
+					for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+					{
+						ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+						const char* shaderTypeName	= getShaderTypeName(shaderType);
+						string		name			= string(getDataTypeName(varType)) + "_" + writeAccessName + "_write_" + readAccessName + "_read_" + shaderTypeName;
+						string		desc			= string("Vector subscript access with ") + writeAccessName + " write and " + readAccessName + " read in " + shaderTypeName + " shader.";
+						bool		isVertexCase	= ((ShaderType)shaderType == SHADERTYPE_VERTEX);
+						de::MovePtr<ShaderIndexingCase> testCase(createMatrixSubscriptCase(m_testCtx, name.c_str(), desc.c_str(), isVertexCase, varType, (IndexAccessType)writeAccess, (IndexAccessType)readAccess));
+						matGroup->addChild(testCase.release());
+					}
+				}
+			}
+		}
+
+		addChild(matGroup.release());
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createIndexingTests (tcu::TestContext& testCtx)
+{
+	return new ShaderIndexingTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.hpp
new file mode 100644
index 0000000..04c6319
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderIndexingTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERINDEXINGTESTS_HPP
+#define _VKTSHADERRENDERINDEXINGTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader indexing (arrays, vector, matrices) tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createIndexingTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERINDEXINGTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.cpp
new file mode 100644
index 0000000..fdb52e0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.cpp
@@ -0,0 +1,1380 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader loop tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderLoopTests.hpp"
+
+#include "vktShaderRender.hpp"
+#include "tcuStringTemplate.hpp"
+#include "gluShaderUtil.hpp"
+#include "deStringUtil.hpp"
+
+#include <map>
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+static const char* getIntUniformName (int number)
+{
+	switch (number)
+	{
+		case 0:		return "ui_zero";
+		case 1:		return "ui_one";
+		case 2:		return "ui_two";
+		case 3:		return "ui_three";
+		case 4:		return "ui_four";
+		case 5:		return "ui_five";
+		case 6:		return "ui_six";
+		case 7:		return "ui_seven";
+		case 8:		return "ui_eight";
+		case 101:	return "ui_oneHundredOne";
+		default:
+			DE_ASSERT(false);
+			return "";
+	}
+}
+
+static BaseUniformType getIntUniformType(int number)
+{
+	switch (number)
+	{
+		case 1:		return UI_ONE;
+		case 2:		return UI_TWO;
+		case 3:		return UI_THREE;
+		case 4:		return UI_FOUR;
+		case 5:		return UI_FIVE;
+		case 6:		return UI_SIX;
+		case 7:		return UI_SEVEN;
+		case 8:		return UI_EIGHT;
+		default:
+			DE_ASSERT(false);
+			return UB_FALSE;
+	}
+}
+
+static const char* getFloatFractionUniformName (int number)
+{
+	switch (number)
+	{
+		case 1: return "uf_one";
+		case 2: return "uf_half";
+		case 3: return "uf_third";
+		case 4: return "uf_fourth";
+		case 5: return "uf_fifth";
+		case 6: return "uf_sixth";
+		case 7: return "uf_seventh";
+		case 8: return "uf_eight";
+		default:
+			DE_ASSERT(false);
+			return "";
+	}
+}
+
+static BaseUniformType getFloatFractionUniformType(int number)
+{
+	switch (number)
+	{
+		case 1:		return UF_ONE;
+		case 2:		return UF_HALF;
+		case 3:		return UF_THIRD;
+		case 4:		return UF_FOURTH;
+		case 5:		return UF_FIFTH;
+		case 6:		return UF_SIXTH;
+		case 7:		return UF_SEVENTH;
+		case 8:		return UF_EIGHTH;
+		default:
+			DE_ASSERT(false);
+			return UB_FALSE;
+	}
+}
+
+enum LoopType
+{
+	LOOPTYPE_FOR = 0,
+	LOOPTYPE_WHILE,
+	LOOPTYPE_DO_WHILE,
+	LOOPTYPE_LAST
+};
+
+static const char* getLoopTypeName (LoopType loopType)
+{
+	static const char* s_names[] =
+	{
+		"for",
+		"while",
+		"do_while"
+	};
+
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_names) == LOOPTYPE_LAST);
+	DE_ASSERT(deInBounds32((int)loopType, 0, LOOPTYPE_LAST));
+	return s_names[(int)loopType];
+}
+
+enum LoopCountType
+{
+	LOOPCOUNT_CONSTANT = 0,
+	LOOPCOUNT_UNIFORM,
+	LOOPCOUNT_DYNAMIC,
+
+	LOOPCOUNT_LAST
+};
+
+// Repeated with for, while, do-while. Examples given as 'for' loops.
+// Repeated for const, uniform, dynamic loops.
+enum LoopCase
+{
+		LOOPCASE_EMPTY_BODY = 0,							// for (...) { }
+		LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_FIRST,	// for (...) { break; <body>; }
+		LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_LAST,	// for (...) { <body>; break; }
+		LOOPCASE_INFINITE_WITH_CONDITIONAL_BREAK,			// for (...) { <body>; if (cond) break; }
+		LOOPCASE_SINGLE_STATEMENT,							// for (...) statement;
+		LOOPCASE_COMPOUND_STATEMENT,						// for (...) { statement; statement; }
+		LOOPCASE_SEQUENCE_STATEMENT,						// for (...) statement, statement;
+		LOOPCASE_NO_ITERATIONS,								// for (i=0; i<0; i++) ...
+		LOOPCASE_SINGLE_ITERATION,							// for (i=0; i<1; i++) ...
+		LOOPCASE_SELECT_ITERATION_COUNT,					// for (i=0; i<a?b:c; i++) ...
+		LOOPCASE_CONDITIONAL_CONTINUE,						// for (...) { if (cond) continue; }
+		LOOPCASE_UNCONDITIONAL_CONTINUE,					// for (...) { <body>; continue; }
+		LOOPCASE_ONLY_CONTINUE,								// for (...) { continue; }
+		LOOPCASE_DOUBLE_CONTINUE,							// for (...) { if (cond) continue; <body>; $
+		LOOPCASE_CONDITIONAL_BREAK,							// for (...) { if (cond) break; }
+		LOOPCASE_UNCONDITIONAL_BREAK,						// for (...) { <body>; break; }
+		LOOPCASE_PRE_INCREMENT,								// for (...; ++i) { <body>; }
+		LOOPCASE_POST_INCREMENT,							// for (...; i++) { <body>; }
+		LOOPCASE_MIXED_BREAK_CONTINUE,
+		LOOPCASE_VECTOR_COUNTER,							// for (ivec3 ndx = ...; ndx.x < ndx.y; ndx$
+		LOOPCASE_101_ITERATIONS,							// loop for 101 iterations
+		LOOPCASE_SEQUENCE,									// two loops in sequence
+		LOOPCASE_NESTED,									// two nested loops
+		LOOPCASE_NESTED_SEQUENCE,							// two loops in sequence nested inside a th$
+		LOOPCASE_NESTED_TRICKY_DATAFLOW_1,					// nested loops with tricky data flow
+		LOOPCASE_NESTED_TRICKY_DATAFLOW_2,					// nested loops with tricky data flow
+
+		//LOOPCASE_MULTI_DECLARATION,						// for (int i,j,k; ...) ...  -- illegal?
+
+		LOOPCASE_LAST
+};
+
+static const char* getLoopCaseName (LoopCase loopCase)
+{
+		static const char* s_names[] =
+		{
+				"empty_body",
+				"infinite_with_unconditional_break_first",
+				"infinite_with_unconditional_break_last",
+				"infinite_with_conditional_break",
+				"single_statement",
+				"compound_statement",
+				"sequence_statement",
+				"no_iterations",
+				"single_iteration",
+				"select_iteration_count",
+				"conditional_continue",
+				"unconditional_continue",
+				"only_continue",
+				"double_continue",
+				"conditional_break",
+				"unconditional_break",
+				"pre_increment",
+				"post_increment",
+				"mixed_break_continue",
+				"vector_counter",
+				"101_iterations",
+				"sequence",
+				"nested",
+				"nested_sequence",
+				"nested_tricky_dataflow_1",
+				"nested_tricky_dataflow_2"
+				//"multi_declaration",
+		};
+
+		DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_names) == LOOPCASE_LAST);
+		DE_ASSERT(deInBounds32((int)loopCase, 0, LOOPCASE_LAST));
+		return s_names[(int)loopCase];
+}
+
+static const char* getLoopCountTypeName (LoopCountType countType)
+{
+	static const char* s_names[] =
+	{
+		"constant",
+		"uniform",
+		"dynamic"
+	};
+
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(s_names) == LOOPCOUNT_LAST);
+	DE_ASSERT(deInBounds32((int)countType, 0, LOOPCOUNT_LAST));
+	return s_names[(int)countType];
+}
+
+static void evalLoop0Iters	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(0,1,2); }
+static void evalLoop1Iters	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(1,2,3); }
+static void evalLoop2Iters	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(2,3,0); }
+static void evalLoop3Iters	(ShaderEvalContext& c) { c.color.xyz()	= c.coords.swizzle(3,0,1); }
+
+static ShaderEvalFunc getLoopEvalFunc (int numIters)
+{
+	switch (numIters % 4)
+	{
+		case 0: return evalLoop0Iters;
+		case 1:	return evalLoop1Iters;
+		case 2:	return evalLoop2Iters;
+		case 3:	return evalLoop3Iters;
+	}
+
+	DE_FATAL("Invalid loop iteration count.");
+	return NULL;
+}
+
+// ShaderLoop case
+
+class ShaderLoopCase : public ShaderRenderCase
+{
+public:
+	ShaderLoopCase	(tcu::TestContext&	testCtx,
+					 const std::string&	name,
+					 const std::string&	description,
+					 bool				isVertexCase,
+					 ShaderEvalFunc		evalFunc,
+					 UniformSetup*		uniformSetup,
+					 const std::string&	vertexShaderSource,
+					 const std::string&	fragmentShaderSource)
+		: ShaderRenderCase		(testCtx, name, description, isVertexCase, evalFunc, uniformSetup, DE_NULL)
+	{
+		m_vertShaderSource = vertexShaderSource;
+		m_fragShaderSource = fragmentShaderSource;
+	}
+};
+
+// Uniform setup tools
+
+class LoopUniformSetup : public UniformSetup
+{
+public:
+									LoopUniformSetup	(std::vector<BaseUniformType>& types)
+										: m_uniformInformations(types)
+									{}
+
+	virtual void					setup				(ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) const;
+
+private:
+	std::vector<BaseUniformType>	m_uniformInformations;
+};
+
+void LoopUniformSetup::setup (ShaderRenderCaseInstance& instance, const tcu::Vec4&) const
+{
+	for (size_t i = 0; i < m_uniformInformations.size(); i++)
+	{
+		instance.useUniform((deUint32)i, m_uniformInformations[i]);
+	}
+}
+
+// Testcase builders
+
+static de::MovePtr<ShaderLoopCase> createGenericLoopCase (tcu::TestContext&	testCtx,
+														const std::string&	caseName,
+														const std::string&	description,
+														bool				isVertexCase,
+														LoopType			loopType,
+														LoopCountType		loopCountType,
+														glu::Precision		loopCountPrecision,
+														glu::DataType		loopCountDataType)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location=0) in highp vec4 a_position;\n";
+	vtx << "layout(location=1) in highp vec4 a_coords;\n";
+	frag << "layout(location=0) out mediump vec4 o_color;\n";
+
+	if (loopCountType == LOOPCOUNT_DYNAMIC)
+		vtx << "layout(location=3) in mediump float a_one;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location=0) out mediump vec3 v_color;\n";
+		frag << "layout(location=0) in mediump vec3 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location=0) out mediump vec4 v_coords;\n";
+		frag << "layout(location=0) in mediump vec4 v_coords;\n";
+
+		if (loopCountType == LOOPCOUNT_DYNAMIC)
+		{
+			vtx << "layout(location=1) out mediump float v_one;\n";
+			frag << "layout(location=1) in mediump float v_one;\n";
+		}
+	}
+
+	const int	numLoopIters = 3;
+	const bool	isIntCounter = isDataTypeIntOrIVec(loopCountDataType);
+	deUint32	locationCounter = 0;
+	std::vector<BaseUniformType> uniformInformations;
+
+	if (isIntCounter)
+	{
+		if (loopCountType == LOOPCOUNT_UNIFORM || loopCountType == LOOPCOUNT_DYNAMIC)
+		{
+			op << "layout(std140, set=0, binding=" << locationCounter << ") uniform buff"<< locationCounter <<" {\n";
+			op << " ${COUNTER_PRECISION} int " << getIntUniformName(numLoopIters) << ";\n";
+			op << "};\n";
+			uniformInformations.push_back(getIntUniformType(numLoopIters));
+			locationCounter++;
+		}
+	}
+	else
+	{
+		if (loopCountType == LOOPCOUNT_UNIFORM || loopCountType == LOOPCOUNT_DYNAMIC){
+			op << "layout(std140, set=0, binding=" << locationCounter << ") uniform buff" << locationCounter << " {\n";
+			op << "	${COUNTER_PRECISION} float " << getFloatFractionUniformName(numLoopIters) << ";\n";
+			op << "};\n";
+			uniformInformations.push_back(getFloatFractionUniformType(numLoopIters));
+			locationCounter++;
+		}
+
+		if (numLoopIters != 1){
+			op << "layout(std140, set=0, binding=" << locationCounter << ") uniform buff" << locationCounter << " {\n";
+			op << "	${COUNTER_PRECISION} float uf_one;\n";
+			op << "};\n";
+			uniformInformations.push_back(UF_ONE);
+			locationCounter++;
+		}
+	}
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	if (isVertexCase)
+		vtx << "	${PRECISION} vec4 coords = a_coords;\n";
+	else
+		frag << "	${PRECISION} vec4 coords = v_coords;\n";
+
+
+	if (loopCountType == LOOPCOUNT_DYNAMIC)
+	{
+		if (isIntCounter)
+		{
+			if (isVertexCase)
+				vtx << "	${COUNTER_PRECISION} int one = int(a_one + 0.5);\n";
+			else
+				frag << "	${COUNTER_PRECISION} int one = int(v_one + 0.5);\n";
+		}
+		else
+		{
+			if (isVertexCase)
+				vtx << "	${COUNTER_PRECISION} float one = a_one;\n";
+			else
+				frag << "	${COUNTER_PRECISION} float one = v_one;\n";
+		}
+	}
+
+	// Read array.
+	op << "	${PRECISION} vec4 res = coords;\n";
+
+	// Loop iteration count.
+	std::string	iterMaxStr;
+
+	if (isIntCounter)
+	{
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			iterMaxStr = de::toString(numLoopIters);
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			iterMaxStr = getIntUniformName(numLoopIters);
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			iterMaxStr = std::string(getIntUniformName(numLoopIters)) + "*one";
+		else
+			DE_ASSERT(false);
+	}
+	else
+	{
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			iterMaxStr = "1.0";
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			iterMaxStr = "uf_one";
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			iterMaxStr = "uf_one*one";
+		else
+			DE_ASSERT(false);
+	}
+
+	// Loop operations.
+	std::string initValue			= isIntCounter ? "0" : "0.05";
+	std::string loopCountDeclStr	= "${COUNTER_PRECISION} ${LOOP_VAR_TYPE} ndx = " + initValue;
+	std::string loopCmpStr			= ("ndx < " + iterMaxStr);
+	std::string incrementStr;
+	if (isIntCounter)
+		incrementStr = "ndx++";
+	else
+	{
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			incrementStr = std::string("ndx += ") + de::toString(1.0f / (float)numLoopIters);
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			incrementStr = std::string("ndx += ") + getFloatFractionUniformName(numLoopIters);
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			incrementStr = std::string("ndx += ") + getFloatFractionUniformName(numLoopIters) + "*one";
+		else
+			DE_ASSERT(false);
+	}
+
+	// Loop body.
+	std::string loopBody;
+
+	loopBody = "		res = res.yzwx;\n";
+
+	if (loopType == LOOPTYPE_FOR)
+	{
+		op << "	for (" + loopCountDeclStr + "; " + loopCmpStr + "; " + incrementStr + ")\n";
+		op << "	{\n";
+		op << loopBody;
+		op << "	}\n";
+	}
+	else if (loopType == LOOPTYPE_WHILE)
+	{
+		op << "\t" << loopCountDeclStr + ";\n";
+		op << "	while (" + loopCmpStr + ")\n";
+		op << "	{\n";
+		op << loopBody;
+		op << "\t\t" + incrementStr + ";\n";
+		op << "	}\n";
+	}
+	else if (loopType == LOOPTYPE_DO_WHILE)
+	{
+		op << "\t" << loopCountDeclStr + ";\n";
+		op << "	do\n";
+		op << "	{\n";
+		op << loopBody;
+		op << "\t\t" + incrementStr + ";\n";
+		op << "	} while (" + loopCmpStr + ");\n";
+	}
+	else
+		DE_ASSERT(false);
+
+	if (isVertexCase)
+	{
+		vtx << "	v_color = res.rgb;\n";
+		frag << "	o_color = vec4(v_color.rgb, 1.0);\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res.rgb, 1.0);\n";
+
+		if (loopCountType == LOOPCOUNT_DYNAMIC)
+			vtx << "	v_one = a_one;\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Fill in shader templates.
+	std::map<std::string, std::string> params;
+	params.insert(std::pair<std::string, std::string>("LOOP_VAR_TYPE", getDataTypeName(loopCountDataType)));
+	params.insert(std::pair<std::string, std::string>("PRECISION", "mediump"));
+	params.insert(std::pair<std::string, std::string>("COUNTER_PRECISION", getPrecisionName(loopCountPrecision)));
+
+	tcu::StringTemplate vertTemplate(vtx.str());
+	tcu::StringTemplate fragTemplate(frag.str());
+	std::string vertexShaderSource = vertTemplate.specialize(params);
+	std::string fragmentShaderSource = fragTemplate.specialize(params);
+
+	// Create the case.
+	ShaderEvalFunc evalFunc = getLoopEvalFunc(numLoopIters);
+	UniformSetup* uniformSetup = new LoopUniformSetup(uniformInformations);
+	return de::MovePtr<ShaderLoopCase>(new ShaderLoopCase(testCtx, caseName, description, isVertexCase, evalFunc, uniformSetup, vertexShaderSource, fragmentShaderSource));
+}
+
+static de::MovePtr<ShaderLoopCase> createSpecialLoopCase (tcu::TestContext&	testCtx,
+														const std::string&	caseName,
+														const std::string&	description,
+														bool				isVertexCase,
+														LoopCase			loopCase,
+														LoopType			loopType,
+														LoopCountType		loopCountType)
+{
+	std::ostringstream vtx;
+	std::ostringstream frag;
+	std::ostringstream& op = isVertexCase ? vtx : frag;
+
+	std::vector<BaseUniformType>	uniformInformations;
+	deUint32						locationCounter = 0;
+
+	vtx << "#version 310 es\n";
+	frag << "#version 310 es\n";
+
+	vtx << "layout(location=0) in highp vec4 a_position;\n";
+	vtx << "layout(location=1) in highp vec4 a_coords;\n";
+	frag << "layout(location=0) out mediump vec4 o_color;\n";
+
+	if (loopCountType == LOOPCOUNT_DYNAMIC)
+		vtx << "layout(location=3) in mediump float a_one;\n";
+
+	if (isVertexCase)
+	{
+		vtx << "layout(location=0) out mediump vec3 v_color;\n";
+		frag << "layout(location=0) in mediump vec3 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location=0) out mediump vec4 v_coords;\n";
+		frag << "layout(location=0) in mediump vec4 v_coords;\n";
+
+		if (loopCountType == LOOPCOUNT_DYNAMIC)
+		{
+			vtx << "layout(location=1) out mediump float v_one;\n";
+			frag << "layout(location=1) in mediump float v_one;\n";
+		}
+	}
+
+	if (loopCase == LOOPCASE_SELECT_ITERATION_COUNT) {
+		op << "layout(std140, set=0, binding=" << locationCounter << ") uniform buff" << locationCounter << " {\n";
+		op << "  bool ub_true;\n";
+		op << "};\n";
+		uniformInformations.push_back(UB_TRUE);
+		locationCounter++;
+	}
+
+	struct
+	{
+		char const*		name;
+		BaseUniformType	type;
+	} uniforms[] =
+	{
+		{ "ui_zero",	UI_ZERO },
+		{ "ui_one",		UI_ONE },
+		{ "ui_two",		UI_TWO },
+		{ "ui_three",	UI_THREE },
+		{ "ui_four",	UI_FOUR },
+		{ "ui_five",	UI_FIVE },
+		{ "ui_six",		UI_SIX  },
+	};
+
+	for (int i = 0; i < DE_LENGTH_OF_ARRAY(uniforms); ++i)
+	{
+		op << "layout(std140, set=0, binding=" << locationCounter << ") uniform buff" << locationCounter << " {\n";
+		op << "  ${COUNTER_PRECISION} int " << uniforms[i].name << ";\n";
+		op << "};\n";
+		uniformInformations.push_back(uniforms[i].type);
+		locationCounter++;
+	}
+
+	if (loopCase == LOOPCASE_101_ITERATIONS) {
+
+		op << "layout(std140, set=0, binding=" << locationCounter <<  ") uniform buff" << locationCounter << " {\n";
+		op << "  ${COUNTER_PRECISION} int ui_oneHundredOne;\n";
+		op << "};\n";
+		uniformInformations.push_back(UI_ONEHUNDREDONE);
+		locationCounter++;
+	}
+
+	int iterCount	= 3;	// value to use in loop
+	int numIters	= 3;	// actual number of iterations
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	if (loopCountType == LOOPCOUNT_DYNAMIC)
+	{
+		if (isVertexCase)
+			vtx << "	${COUNTER_PRECISION} int one = int(a_one + 0.5);\n";
+		else
+			frag << "	${COUNTER_PRECISION} int one = int(v_one + 0.5);\n";
+	}
+
+	if (isVertexCase)
+		vtx << "	${PRECISION} vec4 coords = a_coords;\n";
+	else
+		frag << "	${PRECISION} vec4 coords = v_coords;\n";
+
+	// Read array.
+	op << "	${PRECISION} vec4 res = coords;\n";
+
+	// Handle all loop types.
+	std::string counterPrecisionStr = "mediump";
+	std::string forLoopStr;
+	std::string whileLoopStr;
+	std::string doWhileLoopPreStr;
+	std::string doWhileLoopPostStr;
+
+	if (loopType == LOOPTYPE_FOR)
+	{
+		switch (loopCase)
+		{
+			case LOOPCASE_EMPTY_BODY:
+				numIters = 0;
+				op << "	${FOR_LOOP} {}\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_FIRST:
+				numIters = 0;
+				op << "	for (;;) { break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_LAST:
+				numIters = 1;
+				op << "	for (;;) { res = res.yzwx; break; }\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	for (;;) { res = res.yzwx; if (i == ${ONE}) break; i++; }\n";
+				break;
+
+			case LOOPCASE_SINGLE_STATEMENT:
+				op << "	${FOR_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_COMPOUND_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${FOR_LOOP} { res = res.yzwx; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_SEQUENCE_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${FOR_LOOP} res = res.yzwx, res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_NO_ITERATIONS:
+				iterCount	= 0;
+				numIters	= 0;
+				op << "	${FOR_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SINGLE_ITERATION:
+				iterCount	= 1;
+				numIters	= 1;
+				op << "	${FOR_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SELECT_ITERATION_COUNT:
+				op << "	for (int i = 0; i < (ub_true ? ${ITER_COUNT} : 0); i++) res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${FOR_LOOP} { if (i == ${TWO}) continue; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_CONTINUE:
+				op << "	${FOR_LOOP} { res = res.yzwx; continue; }\n";
+				break;
+
+			case LOOPCASE_ONLY_CONTINUE:
+				numIters = 0;
+				op << "	${FOR_LOOP} { continue; }\n";
+				break;
+
+			case LOOPCASE_DOUBLE_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${FOR_LOOP} { if (i == ${TWO}) continue; res = res.yzwx; continue; }\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${FOR_LOOP} { if (i == ${TWO}) break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_BREAK:
+				numIters = 1;
+				op << "	${FOR_LOOP} { res = res.yzwx; break; }\n";
+				break;
+
+			case LOOPCASE_PRE_INCREMENT:
+				op << "	for (int i = 0; i < ${ITER_COUNT}; ++i) { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_POST_INCREMENT:
+				op << "	${FOR_LOOP} { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_MIXED_BREAK_CONTINUE:
+				numIters	= 2;
+				iterCount	= 5;
+				op << "	${FOR_LOOP} { if (i == 0) continue; else if (i == 3) break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_VECTOR_COUNTER:
+				op << "	for (${COUNTER_PRECISION} ivec4 i = ivec4(0, 1, ${ITER_COUNT}, 0); i.x < i.z; i.x += i.y) { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_101_ITERATIONS:
+				numIters = iterCount = 101;
+				op << "	${FOR_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SEQUENCE:
+				iterCount	= 5;
+				numIters	= 5;
+				op << "	${COUNTER_PRECISION} int i;\n";
+				op << "	for (i = 0; i < ${TWO}; i++) { res = res.yzwx; }\n";
+				op << "	for (; i < ${ITER_COUNT}; i++) { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_NESTED:
+				numIters = 2 * iterCount;
+				op << "	for (${COUNTER_PRECISION} int i = 0; i < ${TWO}; i++)\n";
+				op << "	{\n";
+				op << "		for (${COUNTER_PRECISION} int j = 0; j < ${ITER_COUNT}; j++)\n";
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_SEQUENCE:
+				numIters = 3 * iterCount;
+				op << "	for (${COUNTER_PRECISION} int i = 0; i < ${ITER_COUNT}; i++)\n";
+				op << "	{\n";
+				op << "		for (${COUNTER_PRECISION} int j = 0; j < ${TWO}; j++)\n";
+				op << "			res = res.yzwx;\n";
+				op << "		for (${COUNTER_PRECISION} int j = 0; j < ${ONE}; j++)\n";
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_1:
+				numIters = 2;
+				op << "	${FOR_LOOP}\n";
+				op << "	{\n";
+				op << "		res = coords; // ignore outer loop effect \n";
+				op << "		for (${COUNTER_PRECISION} int j = 0; j < ${TWO}; j++)\n";
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_2:
+				numIters = iterCount;
+				op << "	${FOR_LOOP}\n";
+				op << "	{\n";
+				op << "		res = coords.wxyz;\n";
+				op << "		for (${COUNTER_PRECISION} int j = 0; j < ${TWO}; j++)\n";
+				op << "			res = res.yzwx;\n";
+				op << "		coords = res;\n";
+				op << "	}\n";
+				break;
+
+			default:
+				DE_ASSERT(false);
+		}
+
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			forLoopStr = std::string("for (") + counterPrecisionStr + " int i = 0; i < " + de::toString(iterCount) + "; i++)";
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			forLoopStr = std::string("for (") + counterPrecisionStr + " int i = 0; i < " + getIntUniformName(iterCount) + "; i++)";
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			forLoopStr = std::string("for (") + counterPrecisionStr + " int i = 0; i < one*" + getIntUniformName(iterCount) + "; i++)";
+		else
+			DE_ASSERT(false);
+	}
+	else if (loopType == LOOPTYPE_WHILE)
+	{
+		switch (loopCase)
+		{
+			case LOOPCASE_EMPTY_BODY:
+				numIters = 0;
+				op << "	${WHILE_LOOP} {}\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_FIRST:
+				numIters = 0;
+				op << "	while (true) { break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_LAST:
+				numIters = 1;
+				op << "	while (true) { res = res.yzwx; break; }\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (true) { res = res.yzwx; if (i == ${ONE}) break; i++; }\n";
+				break;
+
+			case LOOPCASE_SINGLE_STATEMENT:
+				op << "	${WHILE_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_COMPOUND_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${WHILE_LOOP} { res = res.yzwx; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_SEQUENCE_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${WHILE_LOOP} res = res.yzwx, res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_NO_ITERATIONS:
+				iterCount	= 0;
+				numIters	= 0;
+				op << "	${WHILE_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SINGLE_ITERATION:
+				iterCount	= 1;
+				numIters	= 1;
+				op << "	${WHILE_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SELECT_ITERATION_COUNT:
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (i < (ub_true ? ${ITER_COUNT} : 0)) { res = res.yzwx; i++; }\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${WHILE_LOOP} { if (i == ${TWO}) continue; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_CONTINUE:
+				op << "	${WHILE_LOOP} { res = res.yzwx; continue; }\n";
+				break;
+
+			case LOOPCASE_ONLY_CONTINUE:
+				numIters = 0;
+				op << "	${WHILE_LOOP} { continue; }\n";
+				break;
+
+			case LOOPCASE_DOUBLE_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${WHILE_LOOP} { if (i == ${ONE}) continue; res = res.yzwx; continue; }\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${WHILE_LOOP} { if (i == ${THREE}) break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_BREAK:
+				numIters = 1;
+				op << "	${WHILE_LOOP} { res = res.yzwx; break; }\n";
+				break;
+
+			case LOOPCASE_PRE_INCREMENT:
+				numIters = iterCount - 1;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (++i < ${ITER_COUNT}) { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_POST_INCREMENT:
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (i++ < ${ITER_COUNT}) { res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_MIXED_BREAK_CONTINUE:
+				numIters	= 2;
+				iterCount	= 5;
+				op << "	${WHILE_LOOP} { if (i == 0) continue; else if (i == 3) break; res = res.yzwx; }\n";
+				break;
+
+			case LOOPCASE_VECTOR_COUNTER:
+				op << "	${COUNTER_PRECISION} ivec4 i = ivec4(0, 1, ${ITER_COUNT}, 0);\n";
+				op << "	while (i.x < i.z) { res = res.yzwx; i.x += i.y; }\n";
+				break;
+
+			case LOOPCASE_101_ITERATIONS:
+				numIters = iterCount = 101;
+				op << "	${WHILE_LOOP} res = res.yzwx;\n";
+				break;
+
+			case LOOPCASE_SEQUENCE:
+				iterCount	= 6;
+				numIters	= iterCount - 1;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (i++ < ${TWO}) { res = res.yzwx; }\n";
+				op << "	while (i++ < ${ITER_COUNT}) { res = res.yzwx; }\n"; // \note skips one iteration
+				break;
+
+			case LOOPCASE_NESTED:
+				numIters = 2 * iterCount;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (i++ < ${TWO})\n";
+				op << "	{\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		while (j++ < ${ITER_COUNT})\n";
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_SEQUENCE:
+				numIters = 2 * iterCount;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	while (i++ < ${ITER_COUNT})\n";
+				op << "	{\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		while (j++ < ${ONE})\n";
+				op << "			res = res.yzwx;\n";
+				op << "		while (j++ < ${THREE})\n"; // \note skips one iteration
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_1:
+				numIters = 2;
+				op << "	${WHILE_LOOP}\n";
+				op << "	{\n";
+				op << "		res = coords; // ignore outer loop effect \n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		while (j++ < ${TWO})\n";
+				op << "			res = res.yzwx;\n";
+				op << "	}\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_2:
+				numIters = iterCount;
+				op << "	${WHILE_LOOP}\n";
+				op << "	{\n";
+				op << "		res = coords.wxyz;\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		while (j++ < ${TWO})\n";
+				op << "			res = res.yzwx;\n";
+				op << "		coords = res;\n";
+				op << "	}\n";
+				break;
+
+			default:
+				DE_ASSERT(false);
+		}
+
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			whileLoopStr = std::string("\t") + counterPrecisionStr + " int i = 0;\n" + "	while(i++ < " + de::toString(iterCount) + ")";
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			whileLoopStr = std::string("\t") + counterPrecisionStr + " int i = 0;\n" + "	while(i++ < " + getIntUniformName(iterCount) + ")";
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			whileLoopStr = std::string("\t") + counterPrecisionStr + " int i = 0;\n" + "	while(i++ < one*" + getIntUniformName(iterCount) + ")";
+		else
+			DE_ASSERT(false);
+	}
+	else
+	{
+		DE_ASSERT(loopType == LOOPTYPE_DO_WHILE);
+
+		switch (loopCase)
+		{
+			case LOOPCASE_EMPTY_BODY:
+				numIters = 0;
+				op << "	${DO_WHILE_PRE} {} ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_FIRST:
+				numIters = 0;
+				op << "	do { break; res = res.yzwx; } while (true);\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_UNCONDITIONAL_BREAK_LAST:
+				numIters = 1;
+				op << "	do { res = res.yzwx; break; } while (true);\n";
+				break;
+
+			case LOOPCASE_INFINITE_WITH_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do { res = res.yzwx; if (i == ${ONE}) break; i++; } while (true);\n";
+				break;
+
+			case LOOPCASE_SINGLE_STATEMENT:
+				op << "	${DO_WHILE_PRE} res = res.yzwx; ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_COMPOUND_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${DO_WHILE_PRE} { res = res.yzwx; res = res.yzwx; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_SEQUENCE_STATEMENT:
+				iterCount	= 2;
+				numIters	= 2 * iterCount;
+				op << "	${DO_WHILE_PRE} res = res.yzwx, res = res.yzwx; ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_NO_ITERATIONS:
+				DE_ASSERT(false);
+				break;
+
+			case LOOPCASE_SINGLE_ITERATION:
+				iterCount	= 1;
+				numIters	= 1;
+				op << "	${DO_WHILE_PRE} res = res.yzwx; ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_SELECT_ITERATION_COUNT:
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do { res = res.yzwx; } while (++i < (ub_true ? ${ITER_COUNT} : 0));\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${DO_WHILE_PRE} { if (i == ${TWO}) continue; res = res.yzwx; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_CONTINUE:
+				op << "	${DO_WHILE_PRE} { res = res.yzwx; continue; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_ONLY_CONTINUE:
+				numIters = 0;
+				op << "	${DO_WHILE_PRE} { continue; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_DOUBLE_CONTINUE:
+				numIters = iterCount - 1;
+				op << "	${DO_WHILE_PRE} { if (i == ${TWO}) continue; res = res.yzwx; continue; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_CONDITIONAL_BREAK:
+				numIters = 2;
+				op << "	${DO_WHILE_PRE} { res = res.yzwx; if (i == ${ONE}) break; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_UNCONDITIONAL_BREAK:
+				numIters = 1;
+				op << "	${DO_WHILE_PRE} { res = res.yzwx; break; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_PRE_INCREMENT:
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do { res = res.yzwx; } while (++i < ${ITER_COUNT});\n";
+				break;
+
+			case LOOPCASE_POST_INCREMENT:
+				numIters = iterCount + 1;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do { res = res.yzwx; } while (i++ < ${ITER_COUNT});\n";
+				break;
+
+			case LOOPCASE_MIXED_BREAK_CONTINUE:
+				numIters	= 2;
+				iterCount	= 5;
+				op << "	${DO_WHILE_PRE} { if (i == 0) continue; else if (i == 3) break; res = res.yzwx; } ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_VECTOR_COUNTER:
+				op << "	${COUNTER_PRECISION} ivec4 i = ivec4(0, 1, ${ITER_COUNT}, 0);\n";
+				op << "	do { res = res.yzwx; } while ((i.x += i.y) < i.z);\n";
+				break;
+
+			case LOOPCASE_101_ITERATIONS:
+				numIters = iterCount = 101;
+				op << "	${DO_WHILE_PRE} res = res.yzwx; ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_SEQUENCE:
+				iterCount	= 5;
+				numIters	= 5;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do { res = res.yzwx; } while (++i < ${TWO});\n";
+				op << "	do { res = res.yzwx; } while (++i < ${ITER_COUNT});\n";
+				break;
+
+			case LOOPCASE_NESTED:
+				numIters = 2 * iterCount;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do\n";
+				op << "	{\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		do\n";
+				op << "			res = res.yzwx;\n";
+				op << "		while (++j < ${ITER_COUNT});\n";
+				op << "	} while (++i < ${TWO});\n";
+				break;
+
+			case LOOPCASE_NESTED_SEQUENCE:
+				numIters = 3 * iterCount;
+				op << "	${COUNTER_PRECISION} int i = 0;\n";
+				op << "	do\n";
+				op << "	{\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		do\n";
+				op << "			res = res.yzwx;\n";
+				op << "		while (++j < ${TWO});\n";
+				op << "		do\n";
+				op << "			res = res.yzwx;\n";
+				op << "		while (++j < ${THREE});\n";
+				op << "	} while (++i < ${ITER_COUNT});\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_1:
+				numIters = 2;
+				op << "	${DO_WHILE_PRE}\n";
+				op << "	{\n";
+				op << "		res = coords; // ignore outer loop effect \n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		do\n";
+				op << "			res = res.yzwx;\n";
+				op << "		while (++j < ${TWO});\n";
+				op << "	} ${DO_WHILE_POST}\n";
+				break;
+
+			case LOOPCASE_NESTED_TRICKY_DATAFLOW_2:
+				numIters = iterCount;
+				op << "	${DO_WHILE_PRE}\n";
+				op << "	{\n";
+				op << "		res = coords.wxyz;\n";
+				op << "		${COUNTER_PRECISION} int j = 0;\n";
+				op << "		while (j++ < ${TWO})\n";
+				op << "			res = res.yzwx;\n";
+				op << "		coords = res;\n";
+				op << "	} ${DO_WHILE_POST}\n";
+				break;
+
+			default:
+				DE_ASSERT(false);
+		}
+
+		doWhileLoopPreStr = std::string("\t") + counterPrecisionStr + " int i = 0;\n" + "\tdo ";
+		if (loopCountType == LOOPCOUNT_CONSTANT)
+			doWhileLoopPostStr = std::string(" while (++i < ") + de::toString(iterCount) + ");\n";
+		else if (loopCountType == LOOPCOUNT_UNIFORM)
+			doWhileLoopPostStr = std::string(" while (++i < ") + getIntUniformName(iterCount) + ");\n";
+		else if (loopCountType == LOOPCOUNT_DYNAMIC)
+			doWhileLoopPostStr = std::string(" while (++i < one*") + getIntUniformName(iterCount) + ");\n";
+		else
+			DE_ASSERT(false);
+	}
+
+	// Shader footers.
+	if (isVertexCase)
+	{
+		vtx << "	v_color = res.rgb;\n";
+		frag << "	o_color = vec4(v_color.rgb, 1.0);\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res.rgb, 1.0);\n";
+
+		if (loopCountType == LOOPCOUNT_DYNAMIC)
+			vtx << "	v_one = a_one;\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	// Constants.
+	std::string oneStr;
+	std::string twoStr;
+	std::string threeStr;
+	std::string iterCountStr;
+
+	if (loopCountType == LOOPCOUNT_CONSTANT)
+	{
+		oneStr			= "1";
+		twoStr			= "2";
+		threeStr		= "3";
+		iterCountStr	= de::toString(iterCount);
+	}
+	else if (loopCountType == LOOPCOUNT_UNIFORM)
+	{
+		oneStr			= "ui_one";
+		twoStr			= "ui_two";
+		threeStr		= "ui_three";
+		iterCountStr	= getIntUniformName(iterCount);
+	}
+	else if (loopCountType == LOOPCOUNT_DYNAMIC)
+	{
+		oneStr			= "one*ui_one";
+		twoStr			= "one*ui_two";
+		threeStr		= "one*ui_three";
+		iterCountStr	= std::string("one*") + getIntUniformName(iterCount);
+	}
+	else DE_ASSERT(false);
+
+	// Fill in shader templates.
+	std::map<std::string, std::string> params;
+	params.insert(std::pair<std::string, std::string>("PRECISION", "mediump"));
+	params.insert(std::pair<std::string, std::string>("ITER_COUNT", iterCountStr));
+	params.insert(std::pair<std::string, std::string>("COUNTER_PRECISION", counterPrecisionStr));
+	params.insert(std::pair<std::string, std::string>("FOR_LOOP", forLoopStr));
+	params.insert(std::pair<std::string, std::string>("WHILE_LOOP", whileLoopStr));
+	params.insert(std::pair<std::string, std::string>("DO_WHILE_PRE", doWhileLoopPreStr));
+	params.insert(std::pair<std::string, std::string>("DO_WHILE_POST", doWhileLoopPostStr));
+	params.insert(std::pair<std::string, std::string>("ONE", oneStr));
+	params.insert(std::pair<std::string, std::string>("TWO", twoStr));
+	params.insert(std::pair<std::string, std::string>("THREE", threeStr));
+
+	tcu::StringTemplate vertTemplate(vtx.str());
+	tcu::StringTemplate fragTemplate(frag.str());
+	std::string vertexShaderSource = vertTemplate.specialize(params);
+	std::string fragmentShaderSource = fragTemplate.specialize(params);
+
+	// Create the case.
+	UniformSetup* uniformSetup = new LoopUniformSetup(uniformInformations);
+	ShaderEvalFunc evalFunc = getLoopEvalFunc(numIters);
+	return de::MovePtr<ShaderLoopCase>(new ShaderLoopCase(testCtx, caseName, description, isVertexCase, evalFunc, uniformSetup, vertexShaderSource, fragmentShaderSource));
+}
+
+class ShaderLoopTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderLoopTests			(tcu::TestContext& testCtx);
+	virtual					~ShaderLoopTests		(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderLoopTests			(const ShaderLoopTests&);		// not allowed!
+	ShaderLoopTests&		operator=				(const ShaderLoopTests&);		// not allowed!
+};
+
+ShaderLoopTests::ShaderLoopTests(tcu::TestContext& testCtx)
+		: TestCaseGroup(testCtx, "loops", "Loop Tests")
+{
+}
+
+ShaderLoopTests::~ShaderLoopTests (void)
+{
+}
+
+void ShaderLoopTests::init (void)
+{
+	// Loop cases.
+
+	static const glu::ShaderType s_shaderTypes[] =
+	{
+		glu::SHADERTYPE_VERTEX,
+		glu::SHADERTYPE_FRAGMENT
+	};
+
+	static const glu::DataType s_countDataType[] =
+	{
+		glu::TYPE_INT,
+		glu::TYPE_FLOAT
+	};
+
+	TestCaseGroup* genericGroup = new TestCaseGroup(m_testCtx, "generic", "Generic loop tests.");
+	TestCaseGroup* specialGroup = new TestCaseGroup(m_testCtx, "special", "Special loop tests.");
+	addChild(genericGroup);
+	addChild(specialGroup);
+
+	for (int loopType = 0; loopType < LOOPTYPE_LAST; loopType++)
+	{
+		const char* loopTypeName = getLoopTypeName((LoopType)loopType);
+
+		for (int loopCountType = 0; loopCountType < LOOPCOUNT_LAST; loopCountType++)
+		{
+			const char* loopCountName = getLoopCountTypeName((LoopCountType)loopCountType);
+
+			std::string groupName = std::string(loopTypeName) + "_" + std::string(loopCountName) + "_iterations";
+			std::string groupDesc = std::string("Loop tests with ") + loopCountName + " loop counter.";
+			TestCaseGroup* genericSubGroup = new TestCaseGroup(m_testCtx, groupName.c_str(), groupDesc.c_str());
+			TestCaseGroup* specialSubGroup = new TestCaseGroup(m_testCtx, groupName.c_str(), groupDesc.c_str());
+			genericGroup->addChild(genericSubGroup);
+			specialGroup->addChild(specialSubGroup);
+
+			// Generic cases.
+
+			for (int precision = glu::PRECISION_MEDIUMP; precision < glu::PRECISION_LAST; precision++)
+			{
+				const char* precisionName = getPrecisionName((glu::Precision)precision);
+
+				for (int dataTypeNdx = 0; dataTypeNdx < DE_LENGTH_OF_ARRAY(s_countDataType); dataTypeNdx++)
+				{
+					glu::DataType loopDataType = s_countDataType[dataTypeNdx];
+					const char* dataTypeName = getDataTypeName(loopDataType);
+
+					for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+					{
+						glu::ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+						const char*	shaderTypeName	= getShaderTypeName(shaderType);
+						bool		isVertexCase	= (shaderType == glu::SHADERTYPE_VERTEX);
+
+						std::string testName = std::string("basic_") + precisionName + "_" + dataTypeName + "_" + shaderTypeName;
+						std::string testDesc = std::string(loopTypeName) + " loop with " + precisionName + dataTypeName + " " + loopCountName + " iteration count in " + shaderTypeName + " shader.";
+						de::MovePtr<ShaderLoopCase> testCase(createGenericLoopCase(m_testCtx, testName.c_str(), testDesc.c_str(), isVertexCase, (LoopType)loopType, (LoopCountType)loopCountType, (glu::Precision)precision, loopDataType));
+						genericSubGroup->addChild(testCase.release());
+					}
+				}
+			}
+
+			// Special cases.
+
+			for (int loopCase = 0; loopCase < LOOPCASE_LAST; loopCase++)
+			{
+				const char* loopCaseName = getLoopCaseName((LoopCase)loopCase);
+
+				// no-iterations not possible with do-while.
+				if ((loopCase == LOOPCASE_NO_ITERATIONS) && (loopType == LOOPTYPE_DO_WHILE))
+					continue;
+
+				for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+				{
+					glu::ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+					const char*	shaderTypeName	= getShaderTypeName(shaderType);
+					bool		isVertexCase	= (shaderType == glu::SHADERTYPE_VERTEX);
+
+					std::string name = std::string(loopCaseName) + "_" + shaderTypeName;
+					std::string desc = std::string(loopCaseName) + " loop with " + loopTypeName + " iteration count in " + shaderTypeName + " shader.";
+					de::MovePtr<ShaderLoopCase> testCase(createSpecialLoopCase(m_testCtx, name.c_str(), desc.c_str(), isVertexCase, (LoopCase)loopCase, (LoopType)loopType, (LoopCountType)loopCountType));
+					specialSubGroup->addChild(testCase.release());
+				}
+			}
+		}
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createLoopTests (tcu::TestContext& testCtx)
+{
+	return new ShaderLoopTests(testCtx);
+}
+
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.hpp
new file mode 100644
index 0000000..7fc08b6
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderLoopTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERLOOPTESTS_HPP
+#define _VKTSHADERRENDERLOOPTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader loop tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createLoopTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERLOOPTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.cpp
new file mode 100644
index 0000000..30d4ebf
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.cpp
@@ -0,0 +1,2177 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader matrix arithmetic tests.
+ *
+ * Variables:
+ *  + operation
+ *    - mat OP mat
+ *    - mat OP vec
+ *    - vec OP mat
+ *    - mat OP scalar
+ *    - OP ( mat )
+ *    - vec OP vec
+ *    - OP mat
+ *  + matrix source
+ *    - constant (ctor)
+ *    - uniform
+ *    - vertex input
+ *    - fragment input
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderMatrixTests.hpp"
+
+#include "vktShaderRender.hpp"
+#include "tcuVector.hpp"
+#include "tcuMatrix.hpp"
+#include "tcuMatrixUtil.hpp"
+#include "deStringUtil.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+using std::string;
+using std::vector;
+using namespace glu;
+
+using tcu::Vec2;
+using tcu::Vec3;
+using tcu::Vec4;
+using tcu::Mat2;
+using tcu::Mat2x3;
+using tcu::Mat2x4;
+using tcu::Mat3x2;
+using tcu::Mat3;
+using tcu::Mat3x4;
+using tcu::Mat4x2;
+using tcu::Mat4x3;
+using tcu::Mat4;
+
+// Uniform / constant values for tests.
+// \note Input1 should not contain 0 components as it is used as divisor in div cases.
+static const float	s_constInFloat[2]	= { 0.5f, -0.2f };
+static const Vec2	s_constInVec2[2]	= { Vec2(1.2f, 0.5f), Vec2(0.5f, 1.0f) };
+static const Vec3	s_constInVec3[2]	= { Vec3(1.1f, 0.1f, 0.5f), Vec3(-0.2f, 0.5f, 0.8f) };
+static const Vec4	s_constInVec4[2]	= { Vec4(1.4f, 0.2f, -0.5f, 0.7f), Vec4(0.2f, -1.0f, 0.5f, 0.8f) };
+
+static const float s_constInMat2x2[2][4] =
+{
+	{
+		-0.1f,  1.0f,
+		-0.2f,  0.0f,
+	},
+	{
+		 0.8f,  0.1f,
+		 0.5f, -0.9f,
+	},
+};
+static const float s_constInMat3x2[2][6] =
+{
+	{
+		 0.8f, -0.3f,  0.3f,
+		 1.0f,  1.2f, -1.2f,
+	},
+	{
+		 1.2f, -1.0f,  0.5f,
+		-0.8f,  1.1f,  0.3f,
+	},
+};
+static const float s_constInMat4x2[2][8] =
+{
+	{
+		-0.2f,  0.5f, 0.0f, -1.0f,
+		 1.2f, -0.5f, 0.3f, -0.9f,
+	},
+	{
+		1.0f,  0.1f, -1.1f,  0.6f,
+		0.8f, -1.2f, -1.1f,  0.7f,
+	},
+};
+static const float s_constInMat2x3[2][6] =
+{
+	{
+		-0.6f, -0.1f,
+		-0.7f, -1.2f,
+		-0.2f,  0.0f,
+	},
+	{
+		 1.1f,  0.6f,
+		 0.8f,  1.0f,
+		 0.7f,  0.1f,
+	},
+};
+static const float s_constInMat3x3[2][9] =
+{
+	{
+		-0.2f,  1.1f, 1.2f,
+		-1.0f,  1.2f, 0.5f,
+		 0.7f, -0.2f, 1.0f,
+	},
+	{
+		-0.1f, -0.1f,  0.1f,
+		-0.1f, -0.2f,  1.0f,
+		-0.5f,  0.1f, -0.4f,
+	},
+};
+static const float s_constInMat4x3[2][12] =
+{
+	{
+		-0.9f,  0.0f,  0.6f,  0.2f,
+		 0.9f, -0.1f, -0.3f, -0.7f,
+		-0.1f,  0.1f,  1.0f,  0.0f,
+	},
+	{
+		 0.5f,  0.7f,  0.7f,  1.2f,
+		 1.1f,  0.1f,  1.0f, -1.0f,
+		-0.2f, -0.2f, -0.3f, -0.5f,
+	},
+};
+static const float s_constInMat2x4[2][8] =
+{
+	{
+		-0.6f, -1.1f,
+		-0.6f, -0.6f,
+		-0.2f, -0.6f,
+		-0.1f, -0.1f,
+	},
+	{
+		-1.2f, -1.0f,
+		 0.7f, -1.0f,
+		 0.7f,  0.7f,
+		-0.4f, -0.3f,
+	},
+};
+static const float s_constInMat3x4[2][12] =
+{
+	{
+		 0.6f, -0.4f,  1.2f,
+		 0.9f,  0.8f,  0.4f,
+		 1.1f,  0.3f,  0.5f,
+		-0.2f,  0.0f,  1.1f,
+	},
+	{
+		-0.8f,  1.2f, -0.2f,
+		-1.1f, -0.9f, -0.5f,
+		-1.2f,  1.0f,  1.2f,
+		 0.1f, -0.7f, -0.5f,
+	},
+};
+static const float s_constInMat4x4[2][16] =
+{
+	{
+		 0.3f,  0.9f, -0.2f,  1.0f,
+		-0.4f, -0.6f,  0.6f, -1.0f,
+		-0.9f, -0.1f,  0.3f, -0.2f,
+		-0.3f, -0.9f,  1.0f,  0.1f,
+	},
+	{
+		 0.4f, -0.7f, -0.8f,  0.7f,
+		-0.4f, -0.8f,  0.6f, -0.3f,
+		 0.7f, -1.0f,  0.1f, -0.3f,
+		 0.2f,  0.6f,  0.4f, -1.0f,
+	},
+};
+
+namespace MatrixCaseUtils
+{
+
+enum InputType
+{
+	INPUTTYPE_CONST = 0,
+	INPUTTYPE_UNIFORM,
+	INPUTTYPE_DYNAMIC,
+
+	INPUTTYPE_LAST
+};
+
+struct ShaderInput
+{
+	ShaderInput (InputType inputType_, DataType dataType_, Precision precision_)
+		: inputType	(inputType_)
+		, dataType	(dataType_)
+		, precision	(precision_)
+	{
+	}
+
+	InputType		inputType;
+	DataType		dataType;
+	Precision		precision;
+};
+
+enum MatrixOp
+{
+	OP_ADD = 0,
+	OP_SUB,
+	OP_MUL,
+	OP_DIV,
+	OP_COMP_MUL,
+	OP_OUTER_PRODUCT,
+	OP_TRANSPOSE,
+	OP_INVERSE,
+	OP_DETERMINANT,
+	OP_UNARY_PLUS,
+	OP_NEGATION,
+	OP_PRE_INCREMENT,
+	OP_PRE_DECREMENT,
+	OP_POST_INCREMENT,
+	OP_POST_DECREMENT,
+	OP_ADD_INTO,
+	OP_SUBTRACT_FROM,
+	OP_MULTIPLY_INTO,
+	OP_DIVIDE_INTO,
+	OP_LAST
+};
+
+// Type traits.
+
+template <int DataT>
+struct TypeTraits;
+
+#define DECLARE_TYPE_TRAIT(DATATYPE, TYPE)	\
+template<>									\
+struct TypeTraits<DATATYPE> {				\
+	typedef TYPE Type;						\
+}
+
+DECLARE_TYPE_TRAIT(TYPE_FLOAT,			float);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_VEC2,		tcu::Vec2);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_VEC3,		tcu::Vec3);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_VEC4,		tcu::Vec4);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT2,		tcu::Mat2);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT2X3,	tcu::Mat2x3);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT2X4,	tcu::Mat2x4);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT3X2,	tcu::Mat3x2);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT3,		tcu::Mat3);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT3X4,	tcu::Mat3x4);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT4X2,	tcu::Mat4x2);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT4X3,	tcu::Mat4x3);
+DECLARE_TYPE_TRAIT(TYPE_FLOAT_MAT4,		tcu::Mat4);
+
+// Operation info
+
+enum OperationType
+{
+	OPERATIONTYPE_BINARY_OPERATOR = 0,
+	OPERATIONTYPE_BINARY_FUNCTION,
+	OPERATIONTYPE_UNARY_PREFIX_OPERATOR,
+	OPERATIONTYPE_UNARY_POSTFIX_OPERATOR,
+	OPERATIONTYPE_UNARY_FUNCTION,
+	OPERATIONTYPE_ASSIGNMENT,
+
+	OPERATIONTYPE_LAST
+};
+
+static const char* getOperationName (MatrixOp op)
+{
+	switch (op)
+	{
+		case OP_ADD:			return "+";
+		case OP_SUB:			return "-";
+		case OP_MUL:			return "*";
+		case OP_DIV:			return "/";
+		case OP_COMP_MUL:		return "matrixCompMult";
+		case OP_OUTER_PRODUCT:	return "outerProduct";
+		case OP_TRANSPOSE:		return "transpose";
+		case OP_INVERSE:		return "inverse";
+		case OP_DETERMINANT:	return "determinant";
+		case OP_UNARY_PLUS:		return "+";
+		case OP_NEGATION:		return "-";
+		case OP_PRE_INCREMENT:	return "++";
+		case OP_PRE_DECREMENT:	return "--";
+		case OP_POST_INCREMENT:	return "++";
+		case OP_POST_DECREMENT:	return "--";
+		case OP_ADD_INTO:		return "+=";
+		case OP_SUBTRACT_FROM:	return "-=";
+		case OP_MULTIPLY_INTO:	return "*=";
+		case OP_DIVIDE_INTO:	return "/=";
+
+		default:
+			DE_ASSERT(DE_FALSE);
+			return "";
+	}
+}
+
+static OperationType getOperationType (MatrixOp op)
+{
+	switch (op)
+	{
+		case OP_ADD:			return OPERATIONTYPE_BINARY_OPERATOR;
+		case OP_SUB:			return OPERATIONTYPE_BINARY_OPERATOR;
+		case OP_MUL:			return OPERATIONTYPE_BINARY_OPERATOR;
+		case OP_DIV:			return OPERATIONTYPE_BINARY_OPERATOR;
+		case OP_COMP_MUL:		return OPERATIONTYPE_BINARY_FUNCTION;
+		case OP_OUTER_PRODUCT:	return OPERATIONTYPE_BINARY_FUNCTION;
+		case OP_TRANSPOSE:		return OPERATIONTYPE_UNARY_FUNCTION;
+		case OP_INVERSE:		return OPERATIONTYPE_UNARY_FUNCTION;
+		case OP_DETERMINANT:	return OPERATIONTYPE_UNARY_FUNCTION;
+		case OP_UNARY_PLUS:		return OPERATIONTYPE_UNARY_PREFIX_OPERATOR;
+		case OP_NEGATION:		return OPERATIONTYPE_UNARY_PREFIX_OPERATOR;
+		case OP_PRE_INCREMENT:	return OPERATIONTYPE_UNARY_PREFIX_OPERATOR;
+		case OP_PRE_DECREMENT:	return OPERATIONTYPE_UNARY_PREFIX_OPERATOR;
+		case OP_POST_INCREMENT:	return OPERATIONTYPE_UNARY_POSTFIX_OPERATOR;
+		case OP_POST_DECREMENT:	return OPERATIONTYPE_UNARY_POSTFIX_OPERATOR;
+		case OP_ADD_INTO:		return OPERATIONTYPE_ASSIGNMENT;
+		case OP_SUBTRACT_FROM:	return OPERATIONTYPE_ASSIGNMENT;
+		case OP_MULTIPLY_INTO:	return OPERATIONTYPE_ASSIGNMENT;
+		case OP_DIVIDE_INTO:	return OPERATIONTYPE_ASSIGNMENT;
+		default:
+			DE_ASSERT(DE_FALSE);
+			return OPERATIONTYPE_LAST;
+	}
+}
+
+enum TestMatrixType
+{
+	TESTMATRIXTYPE_DEFAULT = 0,
+	TESTMATRIXTYPE_NEGATED,
+	TESTMATRIXTYPE_INCREMENTED,
+	TESTMATRIXTYPE_DECREMENTED,
+	TESTMATRIXTYPE_NEGATED_INCREMENTED,
+	TESTMATRIXTYPE_INCREMENTED_LESS,
+
+	TESTMATRIXTYPE_LAST
+};
+
+static TestMatrixType getOperationTestMatrixType (MatrixOp op)
+{
+	switch(op)
+	{
+		case OP_ADD:			return TESTMATRIXTYPE_DEFAULT;
+		case OP_SUB:			return TESTMATRIXTYPE_DEFAULT;
+		case OP_MUL:			return TESTMATRIXTYPE_DEFAULT;
+		case OP_DIV:			return TESTMATRIXTYPE_DEFAULT;
+		case OP_COMP_MUL:		return TESTMATRIXTYPE_DEFAULT;
+		case OP_OUTER_PRODUCT:	return TESTMATRIXTYPE_DEFAULT;
+		case OP_TRANSPOSE:		return TESTMATRIXTYPE_DEFAULT;
+		case OP_INVERSE:		return TESTMATRIXTYPE_DEFAULT;
+		case OP_DETERMINANT:	return TESTMATRIXTYPE_DEFAULT;
+		case OP_UNARY_PLUS:		return TESTMATRIXTYPE_DECREMENTED;
+		case OP_NEGATION:		return TESTMATRIXTYPE_NEGATED_INCREMENTED;
+		case OP_PRE_INCREMENT:	return TESTMATRIXTYPE_NEGATED;
+		case OP_PRE_DECREMENT:	return TESTMATRIXTYPE_INCREMENTED;
+		case OP_POST_INCREMENT:	return TESTMATRIXTYPE_NEGATED;
+		case OP_POST_DECREMENT:	return TESTMATRIXTYPE_DEFAULT;
+		case OP_ADD_INTO:		return TESTMATRIXTYPE_DEFAULT;
+		case OP_SUBTRACT_FROM:	return TESTMATRIXTYPE_INCREMENTED_LESS;
+		case OP_MULTIPLY_INTO:	return TESTMATRIXTYPE_NEGATED;
+		case OP_DIVIDE_INTO:	return TESTMATRIXTYPE_DECREMENTED;
+
+		default:
+			DE_ASSERT(DE_FALSE);
+			return TESTMATRIXTYPE_LAST;
+	}
+}
+
+static bool isOperationBinary (MatrixOp op)
+{
+	return getOperationType(op) == OPERATIONTYPE_BINARY_OPERATOR ||
+	       getOperationType(op) == OPERATIONTYPE_BINARY_FUNCTION ||
+	       getOperationType(op) == OPERATIONTYPE_ASSIGNMENT;
+}
+
+static bool isOperationMatrixScalar (MatrixOp op)
+{
+	return op == OP_ADD || op == OP_SUB || op == OP_MUL || op == OP_DIV;
+}
+
+static bool isOperationMatrixVector (MatrixOp op)
+{
+	return op == OP_MUL;
+}
+
+static bool isOperationArithmeticMatrixMatrix (MatrixOp op)
+{
+	return op == OP_MUL;
+}
+
+static bool isOperationComponentwiseMatrixMatrix (MatrixOp op)
+{
+	return op == OP_ADD || op == OP_SUB || op == OP_MUL || op == OP_DIV || op == OP_COMP_MUL;
+}
+
+static bool isOperationVectorVector (MatrixOp op)
+{
+	return op == OP_OUTER_PRODUCT;
+}
+
+static bool isOperationUnaryAnyMatrix (MatrixOp op)
+{
+	return  op == OP_TRANSPOSE			 ||
+			op == OP_UNARY_PLUS			 ||
+			op == OP_NEGATION			 ||
+			op == OP_PRE_INCREMENT		 ||
+			op == OP_PRE_DECREMENT		 ||
+			op == OP_POST_INCREMENT		 ||
+			op == OP_POST_DECREMENT;
+}
+
+static bool isOperationUnarySymmetricMatrix (MatrixOp op)
+{
+	return op == OP_INVERSE || op == OP_DETERMINANT;
+}
+
+static bool isOperationValueModifying (MatrixOp op)
+{
+	return  op == OP_PRE_INCREMENT		 ||
+			op == OP_PRE_DECREMENT		 ||
+			op == OP_POST_INCREMENT		 ||
+			op == OP_POST_DECREMENT;
+}
+
+static bool isOperationAssignment (MatrixOp op)
+{
+	return  op == OP_ADD_INTO		 ||
+			op == OP_SUBTRACT_FROM	 ||
+			op == OP_MULTIPLY_INTO	 ||
+			op == OP_DIVIDE_INTO;
+}
+
+static bool isOperationAssignmentAnyMatrix (MatrixOp op)
+{
+	return  op == OP_ADD_INTO		 ||
+			op == OP_SUBTRACT_FROM	 ||
+			op == OP_DIVIDE_INTO;
+}
+
+static bool isOperationAssignmentSymmetricMatrix (MatrixOp op)
+{
+	return op == OP_MULTIPLY_INTO;
+}
+
+// Operation nature
+
+enum OperationNature
+{
+	OPERATIONNATURE_PURE = 0,
+	OPERATIONNATURE_MUTATING,
+	OPERATIONNATURE_ASSIGNMENT,
+
+	OPERATIONNATURE_LAST
+};
+
+static OperationNature getOperationNature (MatrixOp op)
+{
+	if (isOperationAssignment(op))
+		return OPERATIONNATURE_ASSIGNMENT;
+
+	if (isOperationValueModifying(op))
+		return OPERATIONNATURE_MUTATING;
+
+	return OPERATIONNATURE_PURE;
+}
+
+// Input value loader.
+
+template <int InputT, int DataT>
+typename TypeTraits<DataT>::Type getInputValue (const ShaderEvalContext& evalCtx, int inputNdx);
+
+template <> inline float		getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT>			(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return s_constInFloat[inputNdx];	}
+template <> inline tcu::Vec2	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_VEC2>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return s_constInVec2[inputNdx];	}
+template <> inline tcu::Vec3	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_VEC3>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return s_constInVec3[inputNdx];	}
+template <> inline tcu::Vec4	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_VEC4>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return s_constInVec4[inputNdx];	}
+
+template <> inline tcu::Mat2	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT2>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat2(s_constInMat2x2[inputNdx]);		}
+template <> inline tcu::Mat2x3	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT2X3>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat2x3(s_constInMat2x3[inputNdx]);	}
+template <> inline tcu::Mat2x4	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT2X4>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat2x4(s_constInMat2x4[inputNdx]);	}
+template <> inline tcu::Mat3x2	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT3X2>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat3x2(s_constInMat3x2[inputNdx]);	}
+template <> inline tcu::Mat3	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT3>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat3(s_constInMat3x3[inputNdx]);		}
+template <> inline tcu::Mat3x4	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT3X4>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat3x4(s_constInMat3x4[inputNdx]);	}
+template <> inline tcu::Mat4x2	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT4X2>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat4x2(s_constInMat4x2[inputNdx]);	}
+template <> inline tcu::Mat4x3	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT4X3>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat4x3(s_constInMat4x3[inputNdx]);	}
+template <> inline tcu::Mat4	getInputValue<INPUTTYPE_CONST,		TYPE_FLOAT_MAT4>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(evalCtx); return tcu::Mat4(s_constInMat4x4[inputNdx]);		}
+
+template <> inline float		getInputValue<INPUTTYPE_DYNAMIC,	TYPE_FLOAT>			(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(inputNdx); return evalCtx.coords.x();					}
+template <> inline tcu::Vec2	getInputValue<INPUTTYPE_DYNAMIC,	TYPE_FLOAT_VEC2>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(inputNdx); return evalCtx.coords.swizzle(0, 1);			}
+template <> inline tcu::Vec3	getInputValue<INPUTTYPE_DYNAMIC,	TYPE_FLOAT_VEC3>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(inputNdx); return evalCtx.coords.swizzle(0, 1, 2);		}
+template <> inline tcu::Vec4	getInputValue<INPUTTYPE_DYNAMIC,	TYPE_FLOAT_VEC4>	(const ShaderEvalContext& evalCtx, int inputNdx) { DE_UNREF(inputNdx); return evalCtx.coords.swizzle(0, 1, 2, 3);	}
+
+template <> inline tcu::Mat2 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT2> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat2 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1));
+	return m;
+}
+
+template <> inline tcu::Mat2x3 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT2X3> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat2x3 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1,2));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1,2));
+	return m;
+}
+
+template <> inline tcu::Mat2x4 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT2X4> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat2x4 m;
+	m.setColumn(0, evalCtx.in[0]);
+	m.setColumn(1, evalCtx.in[1]);
+	return m;
+}
+
+template <> inline tcu::Mat3x2 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT3X2> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat3x2 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1));
+	m.setColumn(2, evalCtx.in[2].swizzle(0,1));
+	return m;
+}
+
+template <> inline tcu::Mat3 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT3> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat3 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1,2));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1,2));
+	m.setColumn(2, evalCtx.in[2].swizzle(0,1,2));
+	return m;
+}
+
+template <> inline tcu::Mat3x4 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT3X4> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat3x4 m;
+	m.setColumn(0, evalCtx.in[0]);
+	m.setColumn(1, evalCtx.in[1]);
+	m.setColumn(2, evalCtx.in[2]);
+	return m;
+}
+
+template <> inline tcu::Mat4x2 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT4X2> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat4x2 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1));
+	m.setColumn(2, evalCtx.in[2].swizzle(0,1));
+	m.setColumn(3, evalCtx.in[3].swizzle(0,1));
+	return m;
+}
+
+template <> inline tcu::Mat4x3 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT4X3> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat4x3 m;
+	m.setColumn(0, evalCtx.in[0].swizzle(0,1,2));
+	m.setColumn(1, evalCtx.in[1].swizzle(0,1,2));
+	m.setColumn(2, evalCtx.in[2].swizzle(0,1,2));
+	m.setColumn(3, evalCtx.in[3].swizzle(0,1,2));
+	return m;
+}
+
+template <> inline tcu::Mat4 getInputValue<INPUTTYPE_DYNAMIC, TYPE_FLOAT_MAT4> (const ShaderEvalContext& evalCtx, int inputNdx)
+{
+	DE_UNREF(inputNdx); // Not used.
+	tcu::Mat4 m;
+	m.setColumn(0, evalCtx.in[0]);
+	m.setColumn(1, evalCtx.in[1]);
+	m.setColumn(2, evalCtx.in[2]);
+	m.setColumn(3, evalCtx.in[3]);
+	return m;
+}
+
+// Reduction from expression result to vec3.
+
+inline tcu::Vec3 reduceToVec3 (const tcu::Vec2& value)		{ return value.swizzle(0,1,0); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Vec3& value)		{ return value; }
+inline tcu::Vec3 reduceToVec3 (const tcu::Vec4& value)		{ return tcu::Vec3(value.x(), value.y(), value.z()+value.w()); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat2& value)		{ return tcu::Vec3(value(0, 0), value(0, 1), value(1, 0)+value(1, 1)); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat2x3& value)	{ return value.getColumn(0) + value.getColumn(1); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat2x4& value)	{ return value.getColumn(0).swizzle(0,1,2) + value.getColumn(1).swizzle(1,2,3); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat3x2& value)	{ return tcu::Vec3(value(0,0)+value(1,0), value(0,1)+value(1,1), value(0,2)+value(1,2)); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat3& value)		{ return value.getColumn(0) + value.getColumn(1) + value.getColumn(2); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat3x4& value)	{ return value.getColumn(0).swizzle(0,1,2) + value.getColumn(1).swizzle(1,2,3) + value.getColumn(2).swizzle(2,3,0); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat4x2& value)	{ return tcu::Vec3(value(0,0)+value(1,0)+value(0,3), value(0,1)+value(1,1)+value(1,3), value(0,2)+value(1,2)); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat4x3& value)	{ return value.getColumn(0) + value.getColumn(1) + value.getColumn(2) + value.getColumn(3); }
+inline tcu::Vec3 reduceToVec3 (const tcu::Mat4& value)		{ return value.getColumn(0).swizzle(0,1,2) + value.getColumn(1).swizzle(1,2,3) + value.getColumn(2).swizzle(2,3,0) + value.getColumn(3).swizzle(3,0,1); }
+
+// matrixCompMult
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Rows, Cols> matrixCompMult (const tcu::Matrix<T, Rows, Cols>& a, const tcu::Matrix<T, Rows, Cols>& b)
+{
+	tcu::Matrix<T, Rows, Cols> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(r,c) = a(r,c) * b(r, c);
+
+	return retVal;
+}
+
+// transpose
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Cols, Rows> transpose (const tcu::Matrix<T, Rows, Cols>& mat)
+{
+	tcu::Matrix<T, Cols, Rows> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(c, r) = mat(r, c);
+
+	return retVal;
+}
+
+// outerProduct
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Cols, Rows> outerProduct (const tcu::Vector<T, Cols>& a, const tcu::Vector<T, Rows>& b)
+{
+	tcu::Matrix<T, Rows, Cols> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(r,c) = a[c] * b[r];
+
+	return transpose(retVal); // to gl-form (column-major)
+}
+
+// Determinant
+
+template <int Size>
+float determinant (const tcu::Matrix<float, Size, Size>& mat);
+
+template <>
+float determinant<2> (const tcu::Matrix<float, 2, 2>& mat)
+{
+	return mat(0,0) * mat(1,1) - mat(1,0) * mat(0,1);
+}
+
+template <>
+float determinant<3> (const tcu::Matrix<float, 3, 3>& mat)
+{
+	return	+ mat(0,0) * mat(1,1) * mat(2,2)
+			+ mat(0,1) * mat(1,2) * mat(2,0)
+			+ mat(0,2) * mat(1,0) * mat(2,1)
+			- mat(0,0) * mat(1,2) * mat(2,1)
+			- mat(0,1) * mat(1,0) * mat(2,2)
+			- mat(0,2) * mat(1,1) * mat(2,0);
+}
+
+template <>
+float determinant<4> (const tcu::Matrix<float, 4, 4>& mat)
+{
+	const float minorMatrices[4][3*3] =
+	{
+		{
+			mat(1,1),	mat(2,1),	mat(3,1),
+			mat(1,2),	mat(2,2),	mat(3,2),
+			mat(1,3),	mat(2,3),	mat(3,3),
+		},
+		{
+			mat(1,0),	mat(2,0),	mat(3,0),
+			mat(1,2),	mat(2,2),	mat(3,2),
+			mat(1,3),	mat(2,3),	mat(3,3),
+		},
+		{
+			mat(1,0),	mat(2,0),	mat(3,0),
+			mat(1,1),	mat(2,1),	mat(3,1),
+			mat(1,3),	mat(2,3),	mat(3,3),
+		},
+		{
+			mat(1,0),	mat(2,0),	mat(3,0),
+			mat(1,1),	mat(2,1),	mat(3,1),
+			mat(1,2),	mat(2,2),	mat(3,2),
+		}
+	};
+
+	return	+ mat(0,0) * determinant(tcu::Mat3(minorMatrices[0]))
+			- mat(0,1) * determinant(tcu::Mat3(minorMatrices[1]))
+			+ mat(0,2) * determinant(tcu::Mat3(minorMatrices[2]))
+			- mat(0,3) * determinant(tcu::Mat3(minorMatrices[3]));
+}
+
+// Inverse
+
+template <int Size>
+tcu::Matrix<float, Size, Size> inverse (const tcu::Matrix<float, Size, Size>& mat);
+
+template <>
+tcu::Matrix<float, 2, 2> inverse<2> (const tcu::Matrix<float, 2, 2>& mat)
+{
+	const float					det		= determinant(mat);
+	tcu::Matrix<float, 2, 2>	retVal;
+
+	DE_ASSERT(det != 0.0f);
+
+	retVal(0, 0) =  mat(1, 1) / det;
+	retVal(0, 1) = -mat(0, 1) / det;
+	retVal(1, 0) = -mat(1, 0) / det;
+	retVal(1, 1) =  mat(0, 0) / det;
+
+	return retVal;
+}
+
+template <>
+tcu::Matrix<float, 3, 3> inverse<3> (const tcu::Matrix<float, 3, 3>& mat)
+{
+	// Blockwise inversion
+
+	DE_ASSERT(determinant(mat) != 0.0f);
+
+	const float areaA[2*2] =
+	{
+		mat(0,0),	mat(0,1),
+		mat(1,0),	mat(1,1)
+	};
+	const float areaB[2] =
+	{
+		mat(0,2),
+		mat(1,2),
+	};
+	const float areaC[2] =
+	{
+		mat(2,0),	mat(2,1),
+	};
+	const float areaD[1] =
+	{
+		mat(2,2)
+	};
+	const float nullField[4] = { 0.0f };
+
+	const tcu::Matrix<float, 2, 2>	invA = inverse(tcu::Matrix<float, 2, 2>(areaA));
+	const tcu::Matrix<float, 2, 1>	matB =         tcu::Matrix<float, 2, 1>(areaB);
+	const tcu::Matrix<float, 1, 2>	matC =         tcu::Matrix<float, 1, 2>(areaC);
+	const tcu::Matrix<float, 1, 1>	matD =         tcu::Matrix<float, 1, 1>(areaD);
+
+	const float						schurComplement = 1.0f / (matD - matC*invA*matB)(0,0);
+	const tcu::Matrix<float, 2, 2>	zeroMat         = Mat2(nullField);
+
+	const tcu::Matrix<float, 2, 2>	blockA = invA + invA*matB*schurComplement*matC*invA;
+	const tcu::Matrix<float, 2, 1>	blockB = (zeroMat-invA)*matB*schurComplement;
+	const tcu::Matrix<float, 1, 2>	blockC = matC*invA*(-schurComplement);
+	const float						blockD = schurComplement;
+
+	const float result[3*3] =
+	{
+		blockA(0,0),	blockA(0,1),	blockB(0,0),
+		blockA(1,0),	blockA(1,1),	blockB(1,0),
+		blockC(0,0),	blockC(0,1),	blockD,
+	};
+
+	return Mat3(result);
+}
+
+template <>
+tcu::Matrix<float, 4, 4> inverse<4> (const tcu::Matrix<float, 4, 4>& mat)
+{
+	// Blockwise inversion
+
+	DE_ASSERT(determinant(mat) != 0.0f);
+
+	const float areaA[2*2] =
+	{
+		mat(0,0),	mat(0,1),
+		mat(1,0),	mat(1,1)
+	};
+	const float areaB[2*2] =
+	{
+		mat(0,2),	mat(0,3),
+		mat(1,2),	mat(1,3)
+	};
+	const float areaC[2*2] =
+	{
+		mat(2,0),	mat(2,1),
+		mat(3,0),	mat(3,1)
+	};
+	const float areaD[2*2] =
+	{
+		mat(2,2),	mat(2,3),
+		mat(3,2),	mat(3,3)
+	};
+	const float nullField[4] = { 0.0f };
+
+	const tcu::Matrix<float, 2, 2> invA = inverse(Mat2(areaA));
+	const tcu::Matrix<float, 2, 2> matB =         Mat2(areaB);
+	const tcu::Matrix<float, 2, 2> matC =         Mat2(areaC);
+	const tcu::Matrix<float, 2, 2> matD =         Mat2(areaD);
+
+	const tcu::Matrix<float, 2, 2> schurComplement = inverse(matD - matC*invA*matB);
+	const tcu::Matrix<float, 2, 2> zeroMat         = Mat2(nullField);
+
+	const tcu::Matrix<float, 2, 2> blockA = invA + invA*matB*schurComplement*matC*invA;
+	const tcu::Matrix<float, 2, 2> blockB = (zeroMat-invA)*matB*schurComplement;
+	const tcu::Matrix<float, 2, 2> blockC = (zeroMat-schurComplement)*matC*invA;
+	const tcu::Matrix<float, 2, 2> blockD = schurComplement;
+
+	const float result[4*4] =
+	{
+		blockA(0,0),	blockA(0,1),	blockB(0,0),	blockB(0,1),
+		blockA(1,0),	blockA(1,1),	blockB(1,0),	blockB(1,1),
+		blockC(0,0),	blockC(0,1),	blockD(0,0),	blockD(0,1),
+		blockC(1,0),	blockC(1,1),	blockD(1,0),	blockD(1,1),
+	};
+
+	return Mat4(result);
+}
+
+// negate
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Rows, Cols> negate (const tcu::Matrix<T, Rows, Cols>& mat)
+{
+	tcu::Matrix<T, Rows, Cols> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(r,c) = -mat(r, c);
+
+	return retVal;
+}
+
+// increment/decrement
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Rows, Cols> increment (const tcu::Matrix<T, Rows, Cols>& mat)
+{
+	tcu::Matrix<T, Rows, Cols> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(r,c) = mat(r, c) + 1.0f;
+
+	return retVal;
+}
+
+template <typename T, int Rows, int Cols>
+tcu::Matrix<T, Rows, Cols> decrement (const tcu::Matrix<T, Rows, Cols>& mat)
+{
+	tcu::Matrix<T, Rows, Cols> retVal;
+
+	for (int r = 0; r < Rows; ++r)
+		for (int c = 0; c < Cols; ++c)
+			retVal(r,c) = mat(r, c) - 1.0f;
+
+	return retVal;
+}
+
+// Evaluator template.
+
+typedef void (*MatrixShaderEvalFunc) (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type);
+
+template <int Op, int In0DataType, int In1DataType>
+struct Evaluator;
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_ADD, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 + in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_SUB, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 - in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_MUL, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 * in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_DIV, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 / in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_COMP_MUL, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(matrixCompMult(in0, in1));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_OUTER_PRODUCT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(outerProduct(in0, in1));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_TRANSPOSE, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		evalCtx.color.xyz() = reduceToVec3(transpose(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_INVERSE, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		evalCtx.color.xyz() = reduceToVec3(inverse(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_DETERMINANT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		evalCtx.color.xyz() = Vec3(determinant(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_UNARY_PLUS, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		evalCtx.color.xyz() = reduceToVec3(in0);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_NEGATION, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		evalCtx.color.xyz() = reduceToVec3(negate(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_PRE_INCREMENT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+
+		// modifying reduction: sum modified value too
+		evalCtx.color.xyz() = reduceToVec3(increment(in0)) + reduceToVec3(increment(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_PRE_DECREMENT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+
+		// modifying reduction: sum modified value too
+		evalCtx.color.xyz() = reduceToVec3(decrement(in0)) + reduceToVec3(decrement(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_POST_INCREMENT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+
+		// modifying reduction: sum modified value too
+		evalCtx.color.xyz() = reduceToVec3(in0) + reduceToVec3(increment(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_POST_DECREMENT, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		DE_UNREF(in1Type);
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+
+		// modifying reduction: sum modified value too
+		evalCtx.color.xyz() = reduceToVec3(in0) + reduceToVec3(decrement(in0));
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_ADD_INTO, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 + in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_SUBTRACT_FROM, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 - in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_MULTIPLY_INTO, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 * in1);
+	}
+};
+
+template <int In0DataType, int In1DataType>
+struct Evaluator<OP_DIVIDE_INTO, In0DataType, In1DataType>
+{
+	static void evaluate (ShaderEvalContext& evalCtx, InputType in0Type, InputType in1Type)
+	{
+		typename TypeTraits<In0DataType>::Type	in0	= (in0Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In0DataType>(evalCtx, 0)
+																				     : getInputValue<INPUTTYPE_CONST,	In0DataType>(evalCtx, 0);
+		typename TypeTraits<In1DataType>::Type	in1	= (in1Type == INPUTTYPE_DYNAMIC) ? getInputValue<INPUTTYPE_DYNAMIC, In1DataType>(evalCtx, 1)
+																				     : getInputValue<INPUTTYPE_CONST,	In1DataType>(evalCtx, 1);
+		evalCtx.color.xyz() = reduceToVec3(in0 / in1);
+	}
+};
+
+MatrixShaderEvalFunc getEvalFunc (const ShaderInput& in0, const ShaderInput& in1, MatrixOp op)
+{
+	// Evaluator is selected based on op and input data types.
+	// For efficient lookup the types and op enums are packed together to form a 19-bit key:
+	// [18..14 OP] [13..7 TYPE0] [6..0 TYPE1]
+
+	DE_STATIC_ASSERT(TYPE_LAST	<= (1<<7));
+	DE_STATIC_ASSERT(OP_LAST	<= (1<<5));
+
+#define PACK_EVAL_CASE(OP, IN0DATATYPE, IN1DATATYPE)	(((OP) << 14) | ((IN0DATATYPE) << 7) | (IN1DATATYPE))
+
+#define MAKE_EVAL_CASE(OP, IN0DATATYPE, IN1DATATYPE)	\
+	case PACK_EVAL_CASE(OP, IN0DATATYPE, IN1DATATYPE):	\
+		return Evaluator<OP, IN0DATATYPE, IN1DATATYPE>::evaluate
+
+#define MAKE_SCALAR_OPS(IN0DATATYPE, IN1DATATYPE)		\
+	MAKE_EVAL_CASE(OP_ADD, IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_SUB, IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_MUL, IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_DIV, IN0DATATYPE, IN1DATATYPE)
+
+#define MAKE_CWISE_OPS(IN0DATATYPE, IN1DATATYPE)			\
+	MAKE_EVAL_CASE(OP_ADD,		IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_SUB,		IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_DIV,		IN0DATATYPE, IN1DATATYPE);	\
+	MAKE_EVAL_CASE(OP_COMP_MUL,	IN0DATATYPE, IN1DATATYPE)
+
+#define MAKE_MUL_OP(IN0DATATYPE, IN1DATATYPE)			\
+	MAKE_EVAL_CASE(OP_MUL, IN0DATATYPE, IN1DATATYPE)
+
+#define MAKE_VECVEC_OP(IN0DATATYPE, IN1DATATYPE)			\
+	MAKE_EVAL_CASE(OP_OUTER_PRODUCT, IN0DATATYPE, IN1DATATYPE)
+
+#define MAKE_UNARY_OP(IN0DATATYPE)								\
+	MAKE_EVAL_CASE(OP_TRANSPOSE,		IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_UNARY_PLUS,		IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_NEGATION,			IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_PRE_INCREMENT,	IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_PRE_DECREMENT,	IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_POST_INCREMENT,	IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_POST_DECREMENT,	IN0DATATYPE, TYPE_LAST)
+
+#define MAKE_UNARY_SYMMETRIC_OP(IN0DATATYPE)					\
+	MAKE_UNARY_OP(IN0DATATYPE);									\
+	MAKE_EVAL_CASE(OP_DETERMINANT,	IN0DATATYPE, TYPE_LAST);	\
+	MAKE_EVAL_CASE(OP_INVERSE,		IN0DATATYPE, TYPE_LAST)
+
+#define MAKE_ASSIGNMENT_OP(IN0DATATYPE)								\
+	MAKE_EVAL_CASE(OP_ADD_INTO,			IN0DATATYPE, IN0DATATYPE);	\
+	MAKE_EVAL_CASE(OP_SUBTRACT_FROM,	IN0DATATYPE, IN0DATATYPE);	\
+	MAKE_EVAL_CASE(OP_DIVIDE_INTO,		IN0DATATYPE, IN0DATATYPE)
+
+#define MAKE_ASSIGNMENT_SYMMETRIC_OP(IN0DATATYPE)					\
+	MAKE_ASSIGNMENT_OP(IN0DATATYPE);								\
+	MAKE_EVAL_CASE(OP_MULTIPLY_INTO,	IN0DATATYPE, IN0DATATYPE)
+
+	switch (PACK_EVAL_CASE(op, in0.dataType, in1.dataType))
+	{
+		// Matrix-scalar.
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT2,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT2X3,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT2X4,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT3X2,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT3,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT3X4,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT4X2,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT4X3,	TYPE_FLOAT);
+		MAKE_SCALAR_OPS(TYPE_FLOAT_MAT4,	TYPE_FLOAT);
+
+		// Matrix-vector.
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2,	TYPE_FLOAT_VEC2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X3,	TYPE_FLOAT_VEC2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X4,	TYPE_FLOAT_VEC2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X2,	TYPE_FLOAT_VEC3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3,	TYPE_FLOAT_VEC3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X4,	TYPE_FLOAT_VEC3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X2,	TYPE_FLOAT_VEC4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X3,	TYPE_FLOAT_VEC4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4,	TYPE_FLOAT_VEC4);
+
+		// Vector-matrix.
+		MAKE_MUL_OP(TYPE_FLOAT_VEC2, TYPE_FLOAT_MAT2);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC3, TYPE_FLOAT_MAT2X3);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC4, TYPE_FLOAT_MAT2X4);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC2, TYPE_FLOAT_MAT3X2);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC3, TYPE_FLOAT_MAT3);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC4, TYPE_FLOAT_MAT3X4);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC2, TYPE_FLOAT_MAT4X2);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC3, TYPE_FLOAT_MAT4X3);
+		MAKE_MUL_OP(TYPE_FLOAT_VEC4, TYPE_FLOAT_MAT4);
+
+		// Matrix-matrix.
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT2,		TYPE_FLOAT_MAT2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2,		TYPE_FLOAT_MAT2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2,		TYPE_FLOAT_MAT3X2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2,		TYPE_FLOAT_MAT4X2);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT2X3,	TYPE_FLOAT_MAT2X3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X3,		TYPE_FLOAT_MAT2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X3,		TYPE_FLOAT_MAT3X2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X3,		TYPE_FLOAT_MAT4X2);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT2X4,	TYPE_FLOAT_MAT2X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X4,		TYPE_FLOAT_MAT2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X4,		TYPE_FLOAT_MAT3X2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT2X4,		TYPE_FLOAT_MAT4X2);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT3X2,	TYPE_FLOAT_MAT3X2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X2,		TYPE_FLOAT_MAT2X3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X2,		TYPE_FLOAT_MAT3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X2,		TYPE_FLOAT_MAT4X3);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT3,		TYPE_FLOAT_MAT3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3,		TYPE_FLOAT_MAT2X3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3,		TYPE_FLOAT_MAT3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3,		TYPE_FLOAT_MAT4X3);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT3X4,	TYPE_FLOAT_MAT3X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X4,		TYPE_FLOAT_MAT2X3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X4,		TYPE_FLOAT_MAT3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT3X4,		TYPE_FLOAT_MAT4X3);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT4X2,	TYPE_FLOAT_MAT4X2);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X2,		TYPE_FLOAT_MAT2X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X2,		TYPE_FLOAT_MAT3X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X2,		TYPE_FLOAT_MAT4);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT4X3,	TYPE_FLOAT_MAT4X3);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X3,		TYPE_FLOAT_MAT2X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X3,		TYPE_FLOAT_MAT3X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4X3,		TYPE_FLOAT_MAT4);
+
+		MAKE_CWISE_OPS(TYPE_FLOAT_MAT4,		TYPE_FLOAT_MAT4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4,		TYPE_FLOAT_MAT2X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4,		TYPE_FLOAT_MAT3X4);
+		MAKE_MUL_OP(TYPE_FLOAT_MAT4,		TYPE_FLOAT_MAT4);
+
+		// Vector-vector.
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC2,		TYPE_FLOAT_VEC2);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC2,		TYPE_FLOAT_VEC3);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC2,		TYPE_FLOAT_VEC4);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC3,		TYPE_FLOAT_VEC2);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC3,		TYPE_FLOAT_VEC3);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC3,		TYPE_FLOAT_VEC4);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC4,		TYPE_FLOAT_VEC2);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC4,		TYPE_FLOAT_VEC3);
+		MAKE_VECVEC_OP(TYPE_FLOAT_VEC4,		TYPE_FLOAT_VEC4);
+
+		// Unary Matrix.
+		MAKE_UNARY_SYMMETRIC_OP(TYPE_FLOAT_MAT2);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT2X3);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT2X4);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT3X2);
+		MAKE_UNARY_SYMMETRIC_OP(TYPE_FLOAT_MAT3);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT3X4);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT4X2);
+		MAKE_UNARY_OP(TYPE_FLOAT_MAT4X3);
+		MAKE_UNARY_SYMMETRIC_OP(TYPE_FLOAT_MAT4);
+
+		// Assignments
+		MAKE_ASSIGNMENT_SYMMETRIC_OP(TYPE_FLOAT_MAT2);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT2X3);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT2X4);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT3X2);
+		MAKE_ASSIGNMENT_SYMMETRIC_OP(TYPE_FLOAT_MAT3);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT3X4);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT4X2);
+		MAKE_ASSIGNMENT_OP(TYPE_FLOAT_MAT4X3);
+		MAKE_ASSIGNMENT_SYMMETRIC_OP(TYPE_FLOAT_MAT4);
+
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+
+#undef PACK_EVAL_CASE
+#undef MAKE_EVAL_CASE
+#undef MUL_OP
+#undef ALL_OPS
+#undef MAKE_MAT_SCALAR_VEC_CASES
+#undef MAKE_MAT_MAT_CASES
+}
+
+// Shader source format utilities.
+
+template <int Size>
+void writeVectorConstructor (std::ostream& str, const tcu::Vector<float, Size>& v)
+{
+	str << "vec" << Size << "(";
+	for (int ndx = 0; ndx < Size; ndx++)
+	{
+		if (ndx != 0)
+			str << ", ";
+		str << de::floatToString(v[ndx], 1);
+	}
+	str << ")";
+}
+
+template <int Cols, int Rows>
+void writeMatrixConstructor (std::ostream& str, const tcu::Matrix<float, Rows, Cols>& m)
+{
+	if (Rows == Cols)
+		str << "mat" << Cols;
+	else
+		str << "mat" << Cols << "x" << Rows;
+
+	str << "(";
+	for (int colNdx = 0; colNdx < Cols; colNdx++)
+	{
+		for (int rowNdx = 0; rowNdx < Rows; rowNdx++)
+		{
+			if (rowNdx > 0 || colNdx > 0)
+				str << ", ";
+			str << de::floatToString(m(rowNdx, colNdx), 1);
+		}
+	}
+	str << ")";
+}
+
+} // MatrixCaseUtils
+
+using namespace MatrixCaseUtils;
+
+class MatrixShaderEvaluator : public ShaderEvaluator
+{
+public:
+							MatrixShaderEvaluator	(MatrixShaderEvalFunc evalFunc, InputType inType0, InputType inType1);
+
+	virtual void			evaluate				(ShaderEvalContext& evalCtx) const;
+
+private:
+	MatrixShaderEvalFunc	m_matEvalFunc;
+	InputType				m_inType0;
+	InputType				m_inType1;
+};
+
+MatrixShaderEvaluator::MatrixShaderEvaluator (MatrixShaderEvalFunc evalFunc, InputType inType0, InputType inType1)
+	: m_matEvalFunc	(evalFunc)
+	, m_inType0		(inType0)
+	, m_inType1		(inType1)
+{
+}
+
+void MatrixShaderEvaluator::evaluate (ShaderEvalContext& evalCtx) const
+{
+	m_matEvalFunc(evalCtx, m_inType0, m_inType1);
+}
+
+
+BaseAttributeType getAttributeType(const glu::DataType dataType)
+{
+	switch(dataType)
+	{
+	case TYPE_FLOAT_MAT2:		return MAT2;
+	case TYPE_FLOAT_MAT2X3:		return MAT2x3;
+	case TYPE_FLOAT_MAT2X4:		return MAT2x4;
+	case TYPE_FLOAT_MAT3X2:		return MAT3x2;
+	case TYPE_FLOAT_MAT3:		return MAT3;
+	case TYPE_FLOAT_MAT3X4:		return MAT3x4;
+	case TYPE_FLOAT_MAT4X2:		return MAT4x2;
+	case TYPE_FLOAT_MAT4X3:		return MAT4x3;
+	case TYPE_FLOAT_MAT4:		return MAT4;
+	default:
+		TCU_THROW(InternalError, "Not supported");
+		break;
+	}
+}
+
+// ShaderMatrixInstance
+
+class ShaderMatrixInstance : public ShaderRenderCaseInstance
+{
+public:
+							ShaderMatrixInstance		(Context&				context,
+														 bool					isVertex,
+														 const ShaderEvaluator&	evaluator,
+														 const ShaderInput		in0,
+														 const ShaderInput		in1,
+														 const MatrixOp			m_op);
+	virtual					~ShaderMatrixInstance		(void);
+
+protected:
+	virtual void			setupUniforms				(const tcu::Vec4&);
+
+private:
+	void					addMatrixUniform			(deUint32 bindingLocation, DataType dataType, const float* dataPtr);
+
+	const ShaderInput		m_in0;
+	const ShaderInput		m_in1;
+	const MatrixOp			m_op;
+};
+
+ShaderMatrixInstance::ShaderMatrixInstance (Context&				context,
+											bool					isVertex,
+											const ShaderEvaluator&	evaluator,
+											const ShaderInput		in0,
+											const ShaderInput		in1,
+											const MatrixOp			op)
+	: ShaderRenderCaseInstance	(context, isVertex, evaluator, DE_NULL, DE_NULL)
+	, m_in0						(in0)
+	, m_in1						(in1)
+	, m_op						(op)
+{
+	m_userAttribTransforms.resize(4);
+	for (int attribNdx = 0; attribNdx < 4; attribNdx++)
+	{
+		m_userAttribTransforms[attribNdx] = Mat4(0.0f);
+		m_userAttribTransforms[attribNdx](                  0, 3) = 0.2f;								// !< prevent matrix*vec from going into zero (assuming vec.w != 0)
+		m_userAttribTransforms[attribNdx](                  1, 3) = 0.1f;								// !<
+		m_userAttribTransforms[attribNdx](                  2, 3) = 0.4f + 0.15f * float(attribNdx);	// !<
+		m_userAttribTransforms[attribNdx](                  3, 3) = 0.7f;								// !<
+		m_userAttribTransforms[attribNdx]((0 + attribNdx) % 4, 0) = 1.0f;
+		m_userAttribTransforms[attribNdx]((1 + attribNdx) % 4, 1) = 1.0f;
+		m_userAttribTransforms[attribNdx]((2 + attribNdx) % 4, 2) = 1.0f;
+		m_userAttribTransforms[attribNdx]((3 + attribNdx) % 4, 3) = 1.0f;
+	}
+
+	// prevent bad reference cases such as black result images by fine-tuning used matrices
+	if (getOperationTestMatrixType(m_op) != TESTMATRIXTYPE_DEFAULT)
+	{
+		for (int attribNdx = 0; attribNdx < 4; attribNdx++)
+		{
+			for (int row = 0; row < 4; row++)
+			for (int col = 0; col < 4; col++)
+			{
+				switch (getOperationTestMatrixType(m_op))
+				{
+					case TESTMATRIXTYPE_NEGATED:
+						m_userAttribTransforms[attribNdx](row, col) = -m_userAttribTransforms[attribNdx](row, col);
+						break;
+					case TESTMATRIXTYPE_INCREMENTED:
+						m_userAttribTransforms[attribNdx](row, col) += 0.3f;
+						break;
+					case TESTMATRIXTYPE_DECREMENTED:
+						m_userAttribTransforms[attribNdx](row, col) -= 0.3f;
+						break;
+					case TESTMATRIXTYPE_NEGATED_INCREMENTED:
+						m_userAttribTransforms[attribNdx](row, col) = -m_userAttribTransforms[attribNdx](row, col) + 0.3f;
+						break;
+					case TESTMATRIXTYPE_INCREMENTED_LESS:
+						m_userAttribTransforms[attribNdx](row, col) -= 0.1f;
+						break;
+
+					default:
+						DE_ASSERT(DE_FALSE);
+						break;
+				}
+			}
+		}
+	}
+
+	int	numInputs = isOperationBinary(m_op) ? 2 : 1;
+
+	for (int inNdx = 0; inNdx < numInputs; inNdx++)
+	{
+		const ShaderInput& in = inNdx > 0 ? m_in1 : m_in0;
+
+		if (in.inputType == INPUTTYPE_DYNAMIC && isDataTypeMatrix(in.dataType))
+		{
+			useAttribute(4u + inNdx, getAttributeType(in.dataType));
+		}
+	}
+
+}
+
+ShaderMatrixInstance::~ShaderMatrixInstance (void)
+{
+}
+
+void ShaderMatrixInstance::addMatrixUniform(deUint32 bindingLocation, DataType dataType, const float *dataPtr)
+{
+	Mat4			result;
+	const size_t	matrixSize = sizeof(float) * 4 * 4;
+
+	switch(dataType)
+	{
+		case TYPE_FLOAT_MAT2:
+		{
+			Mat2 matrix = Mat2(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT2X3:
+		{
+			Mat2x3 matrix = Mat2x3(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT2X4:
+		{
+			Mat2x4 matrix = Mat2x4(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT3X2:
+		{
+			Mat3x2 matrix = Mat3x2(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT3:
+		{
+			Mat3 matrix = Mat3(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT3X4:
+		{
+			Mat3x4 matrix = Mat3x4(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT4X2:
+		{
+			Mat4x2 matrix = Mat4x2(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			result.setColumn(3, matrix.getColumn(3).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT4X3:
+		{
+			Mat4x3 matrix = Mat4x3(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			result.setColumn(3, matrix.getColumn(3).toWidth<4>());
+			break;
+		}
+		case TYPE_FLOAT_MAT4:
+		{
+			Mat4 matrix = Mat4(dataPtr);
+			result.setColumn(0, matrix.getColumn(0).toWidth<4>());
+			result.setColumn(1, matrix.getColumn(1).toWidth<4>());
+			result.setColumn(2, matrix.getColumn(2).toWidth<4>());
+			result.setColumn(3, matrix.getColumn(3).toWidth<4>());
+			break;
+		}
+		default:
+			DE_ASSERT(false);
+			break;
+	}
+
+	addUniform(bindingLocation, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, matrixSize, result.getColumnMajorData().getPtr());
+}
+
+void ShaderMatrixInstance::setupUniforms (const tcu::Vec4&)
+{
+	const int	numInputs		= isOperationBinary(m_op) ? 2 : 1;
+	deUint32	uniformBinding	= 0;
+
+	for (int inNdx = 0; inNdx < numInputs; inNdx++)
+	{
+		const ShaderInput& in = inNdx > 0 ? m_in1 : m_in0;
+
+		if (in.inputType == INPUTTYPE_UNIFORM)
+		{
+			switch (in.dataType)
+			{
+				case TYPE_FLOAT:		addUniform(uniformBinding, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(float), &s_constInFloat[inNdx]);					break;
+				case TYPE_FLOAT_VEC2:	addUniform(uniformBinding, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, s_constInVec2[inNdx]);			break;
+				case TYPE_FLOAT_VEC3:	addUniform(uniformBinding, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, s_constInVec3[inNdx]);			break;
+				case TYPE_FLOAT_VEC4:	addUniform(uniformBinding, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, s_constInVec4[inNdx]);			break;
+				// \note GLES3 supports transpose in matrix upload.
+				case TYPE_FLOAT_MAT2:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat2x2[inNdx]);	break;
+				case TYPE_FLOAT_MAT2X3:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat2x3[inNdx]);	break;
+				case TYPE_FLOAT_MAT2X4:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat2x4[inNdx]);	break;
+				case TYPE_FLOAT_MAT3X2:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat3x2[inNdx]);	break;
+				case TYPE_FLOAT_MAT3:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat3x3[inNdx]);	break;
+				case TYPE_FLOAT_MAT3X4:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat3x4[inNdx]);	break;
+				case TYPE_FLOAT_MAT4X2:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat4x2[inNdx]);	break;
+				case TYPE_FLOAT_MAT4X3:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat4x3[inNdx]);	break;
+				case TYPE_FLOAT_MAT4:	addMatrixUniform(uniformBinding, in.dataType, s_constInMat4x4[inNdx]);	break;
+				default:
+					DE_ASSERT(false);
+			}
+			uniformBinding++;
+		}
+	}
+}
+
+// ShaderMatrixCase
+
+class ShaderMatrixCase : public ShaderRenderCase
+{
+public:
+							ShaderMatrixCase			(tcu::TestContext&	testCtx,
+														 const std::string&	name,
+														 const std::string&	desc,
+														 const ShaderInput&	in0,
+														 const ShaderInput&	in1,
+														 const MatrixOp		op,
+														 bool				isVertexCase);
+							~ShaderMatrixCase			(void);
+
+	virtual TestInstance*	createInstance				(Context& context) const;
+
+protected:
+	void					setupShader					(void);
+	std::string				genGLSLMatToVec3Reduction	(const glu::DataType& matType, const char* varName);
+
+private:
+	const ShaderInput		m_in0;
+	const ShaderInput		m_in1;
+	const MatrixOp			m_op;
+};
+
+ShaderMatrixCase::ShaderMatrixCase (tcu::TestContext&	testCtx,
+									const std::string&	name,
+									const std::string&	desc,
+									const ShaderInput&	in0,
+									const ShaderInput&	in1,
+									MatrixOp			op,
+									bool				isVertexCase)
+	: ShaderRenderCase	(testCtx,
+						 name,
+						 desc,
+						 isVertexCase,
+						 new MatrixShaderEvaluator(getEvalFunc(in0, in1, op), in0.inputType, in1.inputType),
+						 DE_NULL /* uniform setup */,
+						 DE_NULL /* attribute setup */)
+	, m_in0				(in0)
+	, m_in1				(in1)
+	, m_op				(op)
+{
+	setupShader();
+}
+
+ShaderMatrixCase::~ShaderMatrixCase (void)
+{
+}
+
+TestInstance* ShaderMatrixCase::createInstance (Context& context) const
+{
+	return new ShaderMatrixInstance(context, m_isVertexCase, *m_evaluator, m_in0, m_in1, m_op);
+}
+
+void ShaderMatrixCase::setupShader (void)
+{
+	std::ostringstream	vtx;
+	std::ostringstream	frag;
+	std::ostringstream&	op				= m_isVertexCase ? vtx : frag;
+
+	bool				isInDynMat0		= isDataTypeMatrix(m_in0.dataType) && m_in0.inputType == INPUTTYPE_DYNAMIC;
+	bool				isInDynMat1		= isDataTypeMatrix(m_in1.dataType) && m_in1.inputType == INPUTTYPE_DYNAMIC;
+	string				inValue0;
+	string				inValue1;
+	DataType			resultType		= TYPE_LAST;
+	Precision			resultPrec		= m_in0.precision;
+	vector<string>		passVars;
+	int					numInputs		= (isOperationBinary(m_op)) ? (2) : (1);
+
+	std::string			operationValue0;
+	std::string			operationValue1;
+
+	DE_ASSERT(!isInDynMat0 || !isInDynMat1); // Only single dynamic matrix input is allowed.
+	DE_UNREF(isInDynMat0 && isInDynMat1);
+
+	// Compute result type.
+	if (m_op == OP_MUL && isDataTypeMatrix(m_in0.dataType) && isDataTypeMatrix(m_in1.dataType))
+	{
+		resultType = getDataTypeMatrix(getDataTypeMatrixNumColumns(m_in1.dataType), getDataTypeMatrixNumRows(m_in0.dataType));
+	}
+	else if (m_op == OP_OUTER_PRODUCT)
+	{
+		resultType = getDataTypeMatrix(getDataTypeScalarSize(m_in1.dataType), getDataTypeScalarSize(m_in0.dataType));
+	}
+	else if (m_op == OP_TRANSPOSE)
+	{
+		resultType = getDataTypeMatrix(getDataTypeMatrixNumRows(m_in0.dataType), getDataTypeMatrixNumColumns(m_in0.dataType));
+	}
+	else if (m_op == OP_INVERSE)
+	{
+		resultType = m_in0.dataType;
+	}
+	else if (m_op == OP_DETERMINANT)
+	{
+		resultType = TYPE_FLOAT;
+	}
+	else if (getOperationType(m_op) == OPERATIONTYPE_UNARY_PREFIX_OPERATOR ||
+			 getOperationType(m_op) == OPERATIONTYPE_UNARY_POSTFIX_OPERATOR)
+	{
+		resultType = m_in0.dataType;
+	}
+	else if (isDataTypeMatrix(m_in0.dataType) && isDataTypeMatrix(m_in1.dataType))
+	{
+		DE_ASSERT(m_in0.dataType == m_in1.dataType);
+		resultType = m_in0.dataType;
+	}
+	else if (isDataTypeMatrix(m_in0.dataType) || isDataTypeMatrix(m_in1.dataType))
+	{
+		int			matNdx		= isDataTypeMatrix(m_in0.dataType) ? 0 : 1;
+		DataType	matrixType	= matNdx == 0 ? m_in0.dataType : m_in1.dataType;
+		DataType	otherType	= matNdx == 0 ? m_in1.dataType : m_in0.dataType;
+
+		if (otherType == TYPE_FLOAT)
+			resultType = matrixType;
+		else
+		{
+			DE_ASSERT(isDataTypeVector(otherType));
+			resultType = getDataTypeFloatVec(matNdx == 0 ? getDataTypeMatrixNumRows(matrixType) : getDataTypeMatrixNumColumns(matrixType));
+		}
+	}
+	else
+	{
+		DE_ASSERT(DE_FALSE);
+	}
+
+	static const std::string header =
+		"#version 310 es\n";
+
+	vtx << header;
+	frag << header;
+
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	frag << "layout(location = 0) out mediump vec4 dEQP_FragColor;\n";
+	if (m_isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+
+	// Input declarations.
+	deUint32 uniformBinding = 0;
+	deUint32 padding = 0;
+	for (int inNdx = 0; inNdx < numInputs; inNdx++)
+	{
+		const ShaderInput&	in			= inNdx > 0 ? m_in1 : m_in0;
+		const char*			precName	= getPrecisionName(in.precision);
+		const char*			typeName	= getDataTypeName(in.dataType);
+		string&				inValue		= inNdx > 0 ? inValue1 : inValue0;
+
+		if (in.inputType == INPUTTYPE_DYNAMIC)
+		{
+			if (isDataTypeMatrix(in.dataType))
+			{
+				vtx << "layout(location = " << 4 + inNdx + padding << ") in " << precName << " " << typeName << " a_";
+				// a_matN, v_matN
+				vtx << typeName << ";\n";
+				if (!m_isVertexCase)
+				{
+					vtx << "layout(location = " << 1 + inNdx + padding << ") out " << precName << " " << typeName << " v_" << typeName << ";\n";
+					frag << "layout(location = " << 1 + inNdx + padding << ") in " << precName << " " << typeName << " v_" << typeName << ";\n";
+					passVars.push_back(typeName);
+				}
+
+				inValue = string(m_isVertexCase ? "a_" : "v_") + getDataTypeName(in.dataType);
+				padding += getDataTypeMatrixNumColumns(in.dataType);
+			}
+			else
+			{
+				// a_coords, v_coords
+				vtx << "layout(location = 1) in " << precName << " " << typeName << " a_coords;\n";
+				if (!m_isVertexCase)
+				{
+					vtx << "layout(location = " << 1 + padding << ") out " << precName << " " << typeName << " v_coords;\n";
+					frag << "layout(location = " << 1 + padding << ") in " << precName << " " << typeName << " v_coords;\n";
+					passVars.push_back("coords");
+				}
+
+				inValue = m_isVertexCase ? "a_coords" : "v_coords";
+			}
+		}
+		else if (in.inputType == INPUTTYPE_UNIFORM)
+		{
+			op << "layout(std140, set = 0, binding = " << uniformBinding++ <<  ") uniform buffer"<< inNdx <<" { " << precName << " " << typeName << " u_in" << inNdx << "; };\n";
+			inValue = string("u_in") + de::toString(inNdx);
+		}
+		else if (in.inputType == INPUTTYPE_CONST)
+		{
+			op << "const " << precName << " " << typeName << " in" << inNdx << " = ";
+
+			// Generate declaration.
+			switch (in.dataType)
+			{
+				case TYPE_FLOAT:		op << de::floatToString(s_constInFloat[inNdx], 1);					break;
+				case TYPE_FLOAT_VEC2:	writeVectorConstructor<2>(op, s_constInVec2[inNdx]);				break;
+				case TYPE_FLOAT_VEC3:	writeVectorConstructor<3>(op, s_constInVec3[inNdx]);				break;
+				case TYPE_FLOAT_VEC4:	writeVectorConstructor<4>(op, s_constInVec4[inNdx]);				break;
+				case TYPE_FLOAT_MAT2:	writeMatrixConstructor<2, 2>(op, Mat2(s_constInMat2x2[inNdx]));		break;
+				case TYPE_FLOAT_MAT2X3:	writeMatrixConstructor<2, 3>(op, Mat2x3(s_constInMat2x3[inNdx]));	break;
+				case TYPE_FLOAT_MAT2X4:	writeMatrixConstructor<2, 4>(op, Mat2x4(s_constInMat2x4[inNdx]));	break;
+				case TYPE_FLOAT_MAT3X2:	writeMatrixConstructor<3, 2>(op, Mat3x2(s_constInMat3x2[inNdx]));	break;
+				case TYPE_FLOAT_MAT3:	writeMatrixConstructor<3, 3>(op, Mat3(s_constInMat3x3[inNdx]));		break;
+				case TYPE_FLOAT_MAT3X4:	writeMatrixConstructor<3, 4>(op, Mat3x4(s_constInMat3x4[inNdx]));	break;
+				case TYPE_FLOAT_MAT4X2:	writeMatrixConstructor<4, 2>(op, Mat4x2(s_constInMat4x2[inNdx]));	break;
+				case TYPE_FLOAT_MAT4X3:	writeMatrixConstructor<4, 3>(op, Mat4x3(s_constInMat4x3[inNdx]));	break;
+				case TYPE_FLOAT_MAT4:	writeMatrixConstructor<4, 4>(op, Mat4(s_constInMat4x4[inNdx]));		break;
+
+				default:
+					DE_ASSERT(DE_FALSE);
+			}
+
+			op << ";\n";
+
+			inValue = string("in") + de::toString(inNdx);
+		}
+	}
+
+	vtx << "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	gl_Position = a_position;\n";
+	frag << "\n"
+		 << "void main (void)\n"
+		 << "{\n";
+
+	if (m_isVertexCase)
+		frag << "	dEQP_FragColor = v_color;\n";
+	else
+	{
+		for (vector<string>::const_iterator copyIter = passVars.begin(); copyIter != passVars.end(); copyIter++)
+			vtx << "	v_" << *copyIter << " = " << "a_" << *copyIter << ";\n";
+	}
+
+	// Operation.
+
+	switch (getOperationNature(m_op))
+	{
+		case OPERATIONNATURE_PURE:
+			DE_ASSERT(getOperationType(m_op) != OPERATIONTYPE_ASSIGNMENT);
+
+			operationValue0 = inValue0;
+			operationValue1 = inValue1;
+			break;
+
+		case OPERATIONNATURE_MUTATING:
+			DE_ASSERT(getOperationType(m_op) != OPERATIONTYPE_ASSIGNMENT);
+
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " tmpValue = " << inValue0 << ";\n";
+
+			operationValue0 = "tmpValue";
+			operationValue1 = inValue1;
+			break;
+
+		case OPERATIONNATURE_ASSIGNMENT:
+			DE_ASSERT(getOperationType(m_op) == OPERATIONTYPE_ASSIGNMENT);
+
+			operationValue0 = inValue0;
+			operationValue1 = inValue1;
+			break;
+
+		default:
+			DE_ASSERT(DE_FALSE);
+	}
+
+	switch (getOperationType(m_op))
+	{
+		case OPERATIONTYPE_BINARY_OPERATOR:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << operationValue0 << " " << getOperationName(m_op) << " " << operationValue1 << ";\n";
+			break;
+
+		case OPERATIONTYPE_UNARY_PREFIX_OPERATOR:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << getOperationName(m_op) << operationValue0 << ";\n";
+			break;
+
+		case OPERATIONTYPE_UNARY_POSTFIX_OPERATOR:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << operationValue0 << getOperationName(m_op) << ";\n";
+			break;
+
+		case OPERATIONTYPE_BINARY_FUNCTION:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << getOperationName(m_op) << "(" << operationValue0 << ", " << operationValue1 << ");\n";
+			break;
+
+		case OPERATIONTYPE_UNARY_FUNCTION:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << getOperationName(m_op) << "(" << operationValue0 << ");\n";
+			break;
+
+		case OPERATIONTYPE_ASSIGNMENT:
+			op << "	" << getPrecisionName(resultPrec) << " " << getDataTypeName(resultType) << " res = " << operationValue0 << ";\n";
+			op << "	res " << getOperationName(m_op) << " " << operationValue1 << ";\n";
+			break;
+
+		default:
+			DE_ASSERT(DE_FALSE);
+	}
+
+	// Reduction to vec3 (rgb). Check the used value too if it was modified
+	op << "	" << (m_isVertexCase ? "v_color" : "dEQP_FragColor") << " = ";
+
+	if (isOperationValueModifying(m_op))
+		op << "vec4(" << genGLSLMatToVec3Reduction(resultType, "res") << ", 1.0) + vec4(" << genGLSLMatToVec3Reduction(resultType, "tmpValue") << ", 0.0);\n";
+	else
+		op << "vec4(" << genGLSLMatToVec3Reduction(resultType, "res") << ", 1.0);\n";
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	m_vertShaderSource	= vtx.str();
+	m_fragShaderSource	= frag.str();
+}
+
+std::string ShaderMatrixCase::genGLSLMatToVec3Reduction (const glu::DataType& matType, const char* varName)
+{
+	std::ostringstream op;
+
+	switch (matType)
+	{
+		case TYPE_FLOAT:		op << varName << ", "			<< varName << ", "			<< varName << "";																																			break;
+		case TYPE_FLOAT_VEC2:	op << varName << ".x, "			<< varName << ".y, "		<< varName << ".x";																																			break;
+		case TYPE_FLOAT_VEC3:	op << varName << "";																																																	break;
+		case TYPE_FLOAT_VEC4:	op << varName << ".x, "			<< varName << ".y, "		<< varName << ".z+"			<< varName << ".w";																												break;
+		case TYPE_FLOAT_MAT2:	op << varName << "[0][0], "		<< varName << "[1][0], "	<< varName << "[0][1]+"		<< varName << "[1][1]";																											break;
+		case TYPE_FLOAT_MAT2X3:	op << varName << "[0] + "		<< varName << "[1]";																																									break;
+		case TYPE_FLOAT_MAT2X4:	op << varName << "[0].xyz + "	<< varName << "[1].yzw";																																								break;
+		case TYPE_FLOAT_MAT3X2:	op << varName << "[0][0]+"		<< varName << "[0][1], "	<< varName << "[1][0]+"		<< varName << "[1][1], "	<< varName << "[2][0]+" << varName << "[2][1]";														break;
+		case TYPE_FLOAT_MAT3:	op << varName << "[0] + "		<< varName << "[1] + "		<< varName << "[2]";																																		break;
+		case TYPE_FLOAT_MAT3X4:	op << varName << "[0].xyz + "	<< varName << "[1].yzw + "	<< varName << "[2].zwx";																																	break;
+		case TYPE_FLOAT_MAT4X2:	op << varName << "[0][0]+"		<< varName << "[0][1]+"		<< varName << "[3][0], "	<< varName << "[1][0]+"		<< varName << "[1][1]+" << varName << "[3][1], " << varName << "[2][0]+" << varName << "[2][1]";	break;
+		case TYPE_FLOAT_MAT4X3:	op << varName << "[0] + "		<< varName << "[1] + "		<< varName << "[2] + "		<< varName << "[3]";																											break;
+		case TYPE_FLOAT_MAT4:	op << varName << "[0].xyz+"		<< varName << "[1].yzw+"	<< varName << "[2].zwx+"	<< varName << "[3].wxy";																										break;
+
+		default:
+			DE_ASSERT(DE_FALSE);
+	}
+
+	return op.str();
+}
+
+class ShaderMatrixTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderMatrixTests		(tcu::TestContext& testCtx);
+	virtual					~ShaderMatrixTests		(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderMatrixTests		(const ShaderMatrixTests&);		// not allowed!
+	ShaderMatrixTests&		operator=				(const ShaderMatrixTests&);		// not allowed!
+};
+
+ShaderMatrixTests::ShaderMatrixTests (tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "matrix", "Matrix Tests")
+{
+}
+
+ShaderMatrixTests::~ShaderMatrixTests (void)
+{
+}
+
+void ShaderMatrixTests::init (void)
+{
+	static const struct
+	{
+		const char*		name;
+		const char*		desc;
+		const MatrixOp	op;
+		const bool		extendedInputTypeCases; // !< test with const and uniform types too
+		const bool		createInputTypeGroup;	// !< create group for input types
+	} ops[] =
+	{
+		{ "add",			"Matrix addition tests",						OP_ADD,				true,	true	},
+		{ "sub",			"Matrix subtraction tests",						OP_SUB,				true,	true	},
+		{ "mul",			"Matrix multiplication tests",					OP_MUL,				true,	true	},
+		{ "div",			"Matrix division tests",						OP_DIV,				true,	true	},
+		{ "matrixcompmult",	"Matrix component-wise multiplication tests",	OP_COMP_MUL,		false,	true	},
+		{ "outerproduct",	"Matrix outerProduct() tests",					OP_OUTER_PRODUCT,	false,	true	},
+		{ "transpose",		"Matrix transpose() tests",						OP_TRANSPOSE,		false,	true	},
+		{ "determinant",	"Matrix determinant() tests",					OP_DETERMINANT,		false,	true	},
+		{ "inverse",		"Matrix inverse() tests",						OP_INVERSE,			false,	true	},
+		{ "unary_addition",	"Matrix unary addition tests",					OP_UNARY_PLUS,		false,	false	},
+		{ "negation",		"Matrix negation tests",						OP_NEGATION,		false,	false	},
+		{ "pre_increment",	"Matrix prefix increment tests",				OP_PRE_INCREMENT,	false,	false	},
+		{ "pre_decrement",	"Matrix prefix decrement tests",				OP_PRE_DECREMENT,	false,	false	},
+		{ "post_increment",	"Matrix postfix increment tests",				OP_POST_INCREMENT,	false,	false	},
+		{ "post_decrement",	"Matrix postfix decrement tests",				OP_POST_DECREMENT,	false,	false	},
+		{ "add_assign",		"Matrix add into tests",						OP_ADD_INTO,		false,	false	},
+		{ "sub_assign",		"Matrix subtract from tests",					OP_SUBTRACT_FROM,	false,	false	},
+		{ "mul_assign",		"Matrix multiply into tests",					OP_MULTIPLY_INTO,	false,	false	},
+		{ "div_assign",		"Matrix divide into tests",						OP_DIVIDE_INTO,		false,	false	},
+	};
+
+	struct InputTypeSpec
+	{
+		const char*		name;
+		const char*		desc;
+		const InputType	type;
+	};
+	static const InputTypeSpec extendedInputTypes[] =
+	{
+		{ "const",		"Constant matrix input",	INPUTTYPE_CONST		},
+		{ "uniform",	"Uniform matrix input",		INPUTTYPE_UNIFORM	},
+		{ "dynamic",	"Dynamic matrix input",		INPUTTYPE_DYNAMIC	}
+	};
+	static const InputTypeSpec reducedInputTypes[] =
+	{
+		{ "dynamic",	"Dynamic matrix input",		INPUTTYPE_DYNAMIC	}
+	};
+
+	static const DataType matrixTypes[] =
+	{
+		TYPE_FLOAT_MAT2,
+		TYPE_FLOAT_MAT2X3,
+		TYPE_FLOAT_MAT2X4,
+		TYPE_FLOAT_MAT3X2,
+		TYPE_FLOAT_MAT3,
+		TYPE_FLOAT_MAT3X4,
+		TYPE_FLOAT_MAT4X2,
+		TYPE_FLOAT_MAT4X3,
+		TYPE_FLOAT_MAT4
+	};
+
+	static const Precision precisions[] =
+	{
+		PRECISION_MEDIUMP,
+		PRECISION_HIGHP
+	};
+
+	for (int opNdx = 0; opNdx < DE_LENGTH_OF_ARRAY(ops); opNdx++)
+	{
+		const InputTypeSpec*	inTypeList		= (ops[opNdx].extendedInputTypeCases) ? (extendedInputTypes) : (reducedInputTypes);
+		const int				inTypeListSize	= (ops[opNdx].extendedInputTypeCases) ? (DE_LENGTH_OF_ARRAY(extendedInputTypes)) : (DE_LENGTH_OF_ARRAY(reducedInputTypes));
+		const MatrixOp			op				= ops[opNdx].op;
+		tcu::TestCaseGroup*		opGroup			= new tcu::TestCaseGroup(m_testCtx, ops[opNdx].name, ops[opNdx].desc);
+
+		addChild(opGroup);
+
+		for (int inTypeNdx = 0; inTypeNdx < inTypeListSize; inTypeNdx++)
+		{
+			const InputType		inputType	= inTypeList[inTypeNdx].type;
+			tcu::TestCaseGroup* inGroup;
+
+			if (ops[opNdx].createInputTypeGroup)
+			{
+				inGroup = new tcu::TestCaseGroup(m_testCtx, inTypeList[inTypeNdx].name, inTypeList[inTypeNdx].desc);
+				opGroup->addChild(inGroup);
+			}
+			else
+				inGroup = opGroup;
+
+			for (int matTypeNdx = 0; matTypeNdx < DE_LENGTH_OF_ARRAY(matrixTypes); matTypeNdx++)
+			{
+				DataType	matType		= matrixTypes[matTypeNdx];
+				int			numCols		= getDataTypeMatrixNumColumns(matType);
+				int			numRows		= getDataTypeMatrixNumRows(matType);
+				const char*	matTypeName	= getDataTypeName(matType);
+
+				for (int precNdx = 0; precNdx < DE_LENGTH_OF_ARRAY(precisions); precNdx++)
+				{
+					Precision	precision	= precisions[precNdx];
+					const char*	precName	= getPrecisionName(precision);
+					string		baseName	= string(precName) + "_" + matTypeName + "_";
+					ShaderInput	matIn		(inputType, matType, precision);
+
+					if (isOperationMatrixScalar(op))
+					{
+						// Matrix-scalar \note For div cases we use uniform input.
+						ShaderInput scalarIn(op == OP_DIV ? INPUTTYPE_UNIFORM : INPUTTYPE_DYNAMIC, TYPE_FLOAT, precision);
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_vertex").c_str(),		"Matrix-scalar case", matIn, scalarIn, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_fragment").c_str(),	"Matrix-scalar case", matIn, scalarIn, op, false));
+					}
+
+					if (isOperationMatrixVector(op))
+					{
+						// Matrix-vector.
+						DataType	colVecType	= getDataTypeFloatVec(numCols);
+						ShaderInput colVecIn	(op == OP_DIV ? INPUTTYPE_UNIFORM : INPUTTYPE_DYNAMIC, colVecType, precision);
+
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + getDataTypeName(colVecType) + "_vertex").c_str(),		"Matrix-vector case", matIn, colVecIn, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + getDataTypeName(colVecType) + "_fragment").c_str(),	"Matrix-vector case", matIn, colVecIn, op, false));
+
+						// Vector-matrix.
+						DataType	rowVecType	= getDataTypeFloatVec(numRows);
+						ShaderInput	rowVecIn	(op == OP_DIV ? INPUTTYPE_UNIFORM : INPUTTYPE_DYNAMIC, rowVecType, precision);
+						string		vecMatName	= string(precName) + "_" + getDataTypeName(rowVecType) + "_" + matTypeName;
+
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (vecMatName + "_vertex").c_str(),		"Vector-matrix case", rowVecIn, matIn, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (vecMatName + "_fragment").c_str(),	"Vector-matrix case", rowVecIn, matIn, op, false));
+					}
+
+					if (isOperationArithmeticMatrixMatrix(op))
+					{
+						// Arithmetic matrix-matrix multiplication.
+						for (int otherCols = 2; otherCols <= 4; otherCols++)
+						{
+							ShaderInput otherMatIn(inputType == INPUTTYPE_DYNAMIC ? INPUTTYPE_UNIFORM : inputType, getDataTypeMatrix(otherCols, numCols /* rows */), precision);
+							inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + getDataTypeName(otherMatIn.dataType) + "_vertex").c_str(),	"Matrix-matrix case", matIn, otherMatIn, op, true));
+							inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + getDataTypeName(otherMatIn.dataType) + "_fragment").c_str(),	"Matrix-matrix case", matIn, otherMatIn, op, false));
+						}
+					}
+					else if (isOperationComponentwiseMatrixMatrix(op))
+					{
+						// Component-wise.
+						ShaderInput otherMatIn(inputType == INPUTTYPE_DYNAMIC ? INPUTTYPE_UNIFORM : inputType, matType, precision);
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + matTypeName + "_vertex").c_str(),		"Matrix-matrix case", matIn, otherMatIn, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + matTypeName + "_fragment").c_str(),	"Matrix-matrix case", matIn, otherMatIn, op, false));
+					}
+
+					if (isOperationVectorVector(op))
+					{
+						ShaderInput vec1In(inputType,																getDataTypeFloatVec(numRows), precision);
+						ShaderInput vec2In((inputType == INPUTTYPE_DYNAMIC) ? (INPUTTYPE_UNIFORM) : (inputType),	getDataTypeFloatVec(numCols), precision);
+
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_vertex").c_str(),		"Vector-vector case", vec1In, vec2In, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_fragment").c_str(),	"Vector-vector case", vec1In, vec2In, op, false));
+					}
+
+					if ((isOperationUnaryAnyMatrix(op)) ||
+						(isOperationUnarySymmetricMatrix(op) && numCols == numRows))
+					{
+						ShaderInput voidInput(INPUTTYPE_LAST, TYPE_LAST, PRECISION_LAST);
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_vertex").c_str(),		"Matrix case", matIn, voidInput, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_fragment").c_str(),	"Matrix case", matIn, voidInput, op, false));
+					}
+
+					if ((isOperationAssignmentAnyMatrix(op)) ||
+						(isOperationAssignmentSymmetricMatrix(op) && numCols == numRows))
+					{
+						ShaderInput otherMatIn(inputType == INPUTTYPE_DYNAMIC ? INPUTTYPE_UNIFORM : inputType, matType, precision);
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_vertex").c_str(),		"Matrix assignment case", matIn, otherMatIn, op, true));
+						inGroup->addChild(new ShaderMatrixCase(m_testCtx, (baseName + "float_fragment").c_str(),	"Matrix assignment case", matIn, otherMatIn, op, false));
+					}
+				}
+			}
+		}
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createMatrixTests (tcu::TestContext& testCtx)
+{
+	return new ShaderMatrixTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.hpp
new file mode 100644
index 0000000..368cd83
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderMatrixTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERMATRIXTESTS_HPP
+#define _VKTSHADERRENDERMATRIXTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader matrix arithmetic tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createMatrixTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERMATRIXTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.cpp
new file mode 100644
index 0000000..3e71cb5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.cpp
@@ -0,0 +1,2163 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader operators tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderOperatorTests.hpp"
+#include "vktShaderRender.hpp"
+#include "tcuVectorUtil.hpp"
+#include "deStringUtil.hpp"
+
+#include <limits>
+
+using namespace tcu;
+using namespace glu;
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+#if defined(abs)
+#	undef abs
+#endif
+
+using de::min;
+using de::max;
+using de::clamp;
+
+// \note VS2013 gets confused without these
+using tcu::asinh;
+using tcu::acosh;
+using tcu::atanh;
+using tcu::exp2;
+using tcu::log2;
+using tcu::trunc;
+
+inline bool logicalAnd	(bool a, bool b)	{ return (a && b); }
+inline bool logicalOr	(bool a, bool b)	{ return (a || b); }
+inline bool logicalXor	(bool a, bool b)	{ return (a != b); }
+
+// \note stdlib.h defines div() that is not compatible with the macros.
+template<typename T> inline T div (T a, T b) { return a / b; }
+
+template<typename T> inline T leftShift (T value, int amount) { return value << amount; }
+
+inline deUint32	rightShift (deUint32 value, int amount)		{ return value >> amount; }
+inline int		rightShift (int value, int amount)			{ return (value >> amount) | (value >= 0 ? 0 : ~(~0U >> amount)); } // \note Arithmetic shift.
+
+template<typename T, int Size> Vector<T, Size> leftShift (const Vector<T, Size>& value, const Vector<int, Size>& amount)
+{
+	Vector<T, Size> result;
+	for (int i = 0; i < Size; i++)
+		result[i] = leftShift(value[i], amount[i]);
+	return result;
+}
+
+template<typename T, int Size> Vector<T, Size> rightShift (const Vector<T, Size>& value, const Vector<int, Size>& amount)
+{
+	Vector<T, Size> result;
+	for (int i = 0; i < Size; i++)
+		result[i] = rightShift(value[i], amount[i]);
+	return result;
+}
+
+template<typename T, int Size> Vector<T, Size> leftShiftVecScalar	(const Vector<T, Size>& value, int amount) { return leftShift(value, Vector<int, Size>(amount)); }
+template<typename T, int Size> Vector<T, Size> rightShiftVecScalar	(const Vector<T, Size>& value, int amount) { return rightShift(value, Vector<int, Size>(amount)); }
+
+template<typename T, int Size>
+inline Vector<T, Size> minVecScalar (const Vector<T, Size>& v, T s)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = min(v[i], s);
+	return res;
+}
+
+template<typename T, int Size>
+inline Vector<T, Size> maxVecScalar (const Vector<T, Size>& v, T s)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = max(v[i], s);
+	return res;
+}
+
+template<typename T, int Size>
+inline Vector<T, Size> clampVecScalarScalar (const Vector<T, Size>& v, T s0, T s1)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = clamp(v[i], s0, s1);
+	return res;
+}
+
+template<typename T, int Size>
+inline Vector<T, Size> mixVecVecScalar (const Vector<T, Size>& v0, const Vector<T, Size>& v1, T s)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = mix(v0[i], v1[i], s);
+	return res;
+}
+
+template<typename T, int Size>
+inline Vector<T, Size> stepScalarVec (T s, const Vector<T, Size>& v)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = step(s, v[i]);
+	return res;
+}
+
+template<typename T, int Size>
+inline Vector<T, Size> smoothStepScalarScalarVec (T s0, T s1, const Vector<T, Size>& v)
+{
+	Vector<T, Size> res;
+	for (int i = 0; i < Size; i++)
+		res[i] = smoothStep(s0, s1, v[i]);
+	return res;
+}
+
+inline int		addOne (int v)		{ return v + 1; };
+inline int		subOne (int v)		{ return v - 1; };
+inline deUint32	addOne (deUint32 v)	{ return v + 1; };
+inline deUint32	subOne (deUint32 v)	{ return v - 1; };
+
+template<int Size> inline Vector<float, Size>		addOne (const Vector<float, Size>& v)		{ return v + 1.0f; };
+template<int Size> inline Vector<float, Size>		subOne (const Vector<float, Size>& v)		{ return v - 1.0f; };
+template<int Size> inline Vector<int, Size>			addOne (const Vector<int, Size>& v)			{ return v + 1; };
+template<int Size> inline Vector<int, Size>			subOne (const Vector<int, Size>& v)			{ return v - 1; };
+template<int Size> inline Vector<deUint32, Size>	addOne (const Vector<deUint32, Size>& v)	{ return v + 1U; };
+template<int Size> inline Vector<deUint32, Size>	subOne (const Vector<deUint32, Size>& v)	{ return (v.asInt() - 1).asUint(); };
+
+template<typename T> inline T selection	(bool cond, T a, T b)	{ return cond ? a : b; };
+
+// Vec-scalar and scalar-vec binary operators.
+
+// \note This one is done separately due to how the overloaded minus operator is implemented for vector-scalar operands.
+template<int Size>				inline Vector<deUint32, Size>	subVecScalar			(const Vector<deUint32, Size>& v, deUint32 s)	{ return (v.asInt() - (int)s).asUint(); };
+
+template<typename T, int Size>	inline Vector<T, Size>			addVecScalar			(const Vector<T, Size>& v, T s)					{ return v + s; };
+template<typename T, int Size>	inline Vector<T, Size>			subVecScalar			(const Vector<T, Size>& v, T s)					{ return v - s; };
+template<typename T, int Size>	inline Vector<T, Size>			mulVecScalar			(const Vector<T, Size>& v, T s)					{ return v * s; };
+template<typename T, int Size>	inline Vector<T, Size>			divVecScalar			(const Vector<T, Size>& v, T s)					{ return v / s; };
+template<typename T, int Size>	inline Vector<T, Size>			modVecScalar			(const Vector<T, Size>& v, T s)					{ return mod(v, Vector<T, Size>(s)); };
+template<typename T, int Size>	inline Vector<T, Size>			bitwiseAndVecScalar		(const Vector<T, Size>& v, T s)					{ return bitwiseAnd(v, Vector<T, Size>(s)); };
+template<typename T, int Size>	inline Vector<T, Size>			bitwiseOrVecScalar		(const Vector<T, Size>& v, T s)					{ return bitwiseOr(v, Vector<T, Size>(s)); };
+template<typename T, int Size>	inline Vector<T, Size>			bitwiseXorVecScalar		(const Vector<T, Size>& v, T s)					{ return bitwiseXor(v, Vector<T, Size>(s)); };
+
+template<typename T, int Size> inline Vector<T, Size>			addScalarVec			(T s, const Vector<T, Size>& v)					{ return s + v; };
+template<typename T, int Size> inline Vector<T, Size>			subScalarVec			(T s, const Vector<T, Size>& v)					{ return s - v; };
+template<typename T, int Size> inline Vector<T, Size>			mulScalarVec			(T s, const Vector<T, Size>& v)					{ return s * v; };
+template<typename T, int Size> inline Vector<T, Size>			divScalarVec			(T s, const Vector<T, Size>& v)					{ return s / v; };
+template<typename T, int Size> inline Vector<T, Size>			modScalarVec			(T s, const Vector<T, Size>& v)					{ return mod(Vector<T, Size>(s), v); };
+template<typename T, int Size> inline Vector<T, Size>			bitwiseAndScalarVec		(T s, const Vector<T, Size>& v)					{ return bitwiseAnd(Vector<T, Size>(s), v); };
+template<typename T, int Size> inline Vector<T, Size>			bitwiseOrScalarVec		(T s, const Vector<T, Size>& v)					{ return bitwiseOr(Vector<T, Size>(s), v); };
+template<typename T, int Size> inline Vector<T, Size>			bitwiseXorScalarVec		(T s, const Vector<T, Size>& v)					{ return bitwiseXor(Vector<T, Size>(s), v); };
+
+// Reference functions for specific sequence operations for the sequence operator tests.
+
+// Reference for expression "in0, in2 + in1, in1 + in0"
+inline Vec4		sequenceNoSideEffCase0 (const Vec4& in0, const Vec4& in1, const Vec4& in2)		{ DE_UNREF(in2); return in1 + in0; }
+// Reference for expression "in0, in2 + in1, in1 + in0"
+inline deUint32	sequenceNoSideEffCase1 (float in0, deUint32 in1, float in2)						{ DE_UNREF(in0); DE_UNREF(in2); return in1 + in1; }
+// Reference for expression "in0 && in1, in0, ivec2(vec2(in0) + in2)"
+inline IVec2	sequenceNoSideEffCase2 (bool in0, bool in1, const Vec2& in2)					{ DE_UNREF(in1); return IVec2((int)((float)in0 + in2.x()), (int)((float)in0 + in2.y())); }
+// Reference for expression "in0 + vec4(in1), in2, in1"
+inline IVec4	sequenceNoSideEffCase3 (const Vec4& in0, const IVec4& in1, const BVec4& in2)	{ DE_UNREF(in0); DE_UNREF(in2); return in1; }
+// Reference for expression "in0++, in1 = in0 + in2, in2 = in1"
+inline Vec4		sequenceSideEffCase0 (const Vec4& in0, const Vec4& in1, const Vec4& in2)		{ DE_UNREF(in1); return in0 + 1.0f + in2; }
+// Reference for expression "in1++, in0 = float(in1), in1 = uint(in0 + in2)"
+inline deUint32	sequenceSideEffCase1 (float in0, deUint32 in1, float in2)						{ DE_UNREF(in0); return (deUint32)(float(in1) + 1.0f + in2); }
+// Reference for expression "in1 = in0, in2++, in2 = in2 + vec2(in1), ivec2(in2)"
+inline IVec2	sequenceSideEffCase2 (bool in0, bool in1, const Vec2& in2)						{ DE_UNREF(in1); return (in2 + Vec2(1.0f) + Vec2((float)in0)).asInt(); }
+// Reference for expression "in0 = in0 + vec4(in2), in1 = in1 + ivec4(in0), in1++"
+inline IVec4	sequenceSideEffCase3 (const Vec4& in0, const IVec4& in1, const BVec4& in2)		{ return in1 + (in0 + Vec4((float)in2.x(), (float)in2.y(), (float)in2.z(), (float)in2.w())).asInt(); }
+
+// ShaderEvalFunc-type wrappers for the above functions.
+void evalSequenceNoSideEffCase0	(ShaderEvalContext& ctx) { ctx.color		= sequenceNoSideEffCase0		(ctx.in[0].swizzle(1, 2, 3, 0),	ctx.in[1].swizzle(3, 2, 1, 0),			ctx.in[2].swizzle(0, 3, 2, 1)); }
+void evalSequenceNoSideEffCase1	(ShaderEvalContext& ctx) { ctx.color.x()	= (float)sequenceNoSideEffCase1	(ctx.in[0].z(),					(deUint32)ctx.in[1].x(),				ctx.in[2].y()); }
+void evalSequenceNoSideEffCase2	(ShaderEvalContext& ctx) { ctx.color.yz()	= sequenceNoSideEffCase2		(ctx.in[0].z() > 0.0f,			ctx.in[1].x() > 0.0f,					ctx.in[2].swizzle(2, 1)).asFloat(); }
+void evalSequenceNoSideEffCase3	(ShaderEvalContext& ctx) { ctx.color		= sequenceNoSideEffCase3		(ctx.in[0].swizzle(1, 2, 3, 0),	ctx.in[1].swizzle(3, 2, 1, 0).asInt(),	greaterThan(ctx.in[2].swizzle(0, 3, 2, 1), Vec4(0.0f, 0.0f, 0.0f, 0.0f))).asFloat(); }
+void evalSequenceSideEffCase0	(ShaderEvalContext& ctx) { ctx.color		= sequenceSideEffCase0			(ctx.in[0].swizzle(1, 2, 3, 0),	ctx.in[1].swizzle(3, 2, 1, 0),			ctx.in[2].swizzle(0, 3, 2, 1)); }
+void evalSequenceSideEffCase1	(ShaderEvalContext& ctx) { ctx.color.x()	= (float)sequenceSideEffCase1	(ctx.in[0].z(),					(deUint32)ctx.in[1].x(),				ctx.in[2].y()); }
+void evalSequenceSideEffCase2	(ShaderEvalContext& ctx) { ctx.color.yz()	= sequenceSideEffCase2			(ctx.in[0].z() > 0.0f,			ctx.in[1].x() > 0.0f,					ctx.in[2].swizzle(2, 1)).asFloat(); }
+void evalSequenceSideEffCase3	(ShaderEvalContext& ctx) { ctx.color		= sequenceSideEffCase3			(ctx.in[0].swizzle(1, 2, 3, 0),	ctx.in[1].swizzle(3, 2, 1, 0).asInt(),	greaterThan(ctx.in[2].swizzle(0, 3, 2, 1), Vec4(0.0f, 0.0f, 0.0f, 0.0f))).asFloat(); }
+
+static std::string stringJoin (const std::vector<std::string>& elems, const std::string& delim)
+{
+	std::string result;
+	for (int i = 0; i < (int)elems.size(); i++)
+		result += (i > 0 ? delim : "") + elems[i];
+	return result;
+}
+
+static std::string twoValuedVec4 (const std::string& first, const std::string& second, const BVec4& firstMask)
+{
+	std::vector<std::string> elems(4);
+	for (int i = 0; i < 4; i++)
+		elems[i] = firstMask[i] ? first : second;
+
+	return "vec4(" + stringJoin(elems, ", ") + ")";
+}
+
+enum
+{
+	MAX_INPUTS = 3
+};
+
+enum PrecisionMask
+{
+	PRECMASK_NA				= 0,						//!< Precision not applicable (booleans)
+	PRECMASK_MEDIUMP		= (1<<PRECISION_MEDIUMP),
+	PRECMASK_HIGHP			= (1<<PRECISION_HIGHP),
+
+	PRECMASK_ALL			= PRECMASK_MEDIUMP | PRECMASK_HIGHP
+};
+
+enum ValueType
+{
+	VALUE_NONE			= 0,
+	VALUE_FLOAT			= (1<<0),	// float scalar
+	VALUE_FLOAT_VEC		= (1<<1),	// float vector
+	VALUE_FLOAT_GENTYPE	= (1<<2),	// float scalar/vector
+	VALUE_VEC3			= (1<<3),	// vec3 only
+	VALUE_MATRIX		= (1<<4),	// matrix
+	VALUE_BOOL			= (1<<5),	// boolean scalar
+	VALUE_BOOL_VEC		= (1<<6),	// boolean vector
+	VALUE_BOOL_GENTYPE	= (1<<7),	// boolean scalar/vector
+	VALUE_INT			= (1<<8),	// int scalar
+	VALUE_INT_VEC		= (1<<9),	// int vector
+	VALUE_INT_GENTYPE	= (1<<10),	// int scalar/vector
+	VALUE_UINT			= (1<<11),	// uint scalar
+	VALUE_UINT_VEC		= (1<<12),	// uint vector
+	VALUE_UINT_GENTYPE	= (1<<13),	// uint scalar/vector
+
+	// Shorthands.
+	F				= VALUE_FLOAT,
+	FV				= VALUE_FLOAT_VEC,
+	GT				= VALUE_FLOAT_GENTYPE,
+	V3				= VALUE_VEC3,
+	M				= VALUE_MATRIX,
+	B				= VALUE_BOOL,
+	BV				= VALUE_BOOL_VEC,
+	BGT				= VALUE_BOOL_GENTYPE,
+	I				= VALUE_INT,
+	IV				= VALUE_INT_VEC,
+	IGT				= VALUE_INT_GENTYPE,
+	U				= VALUE_UINT,
+	UV				= VALUE_UINT_VEC,
+	UGT				= VALUE_UINT_GENTYPE
+};
+
+static inline bool isScalarType (ValueType type)
+{
+	return type == VALUE_FLOAT || type == VALUE_BOOL || type == VALUE_INT || type == VALUE_UINT;
+}
+
+static inline bool isFloatType (ValueType type)
+{
+	return (type & (VALUE_FLOAT | VALUE_FLOAT_VEC | VALUE_FLOAT_GENTYPE)) != 0;
+}
+
+static inline bool isIntType (ValueType type)
+{
+	return (type & (VALUE_INT | VALUE_INT_VEC | VALUE_INT_GENTYPE)) != 0;
+}
+
+static inline bool isUintType (ValueType type)
+{
+	return (type & (VALUE_UINT | VALUE_UINT_VEC | VALUE_UINT_GENTYPE)) != 0;
+}
+
+static inline bool isBoolType (ValueType type)
+{
+	return (type & (VALUE_BOOL | VALUE_BOOL_VEC | VALUE_BOOL_GENTYPE)) != 0;
+}
+
+struct Value
+{
+	Value (ValueType valueType_, const float rangeMin_, const float rangeMax_)
+		: valueType	(valueType_)
+		, rangeMin	(rangeMin_)
+		, rangeMax	(rangeMax_)
+	{
+	}
+
+	ValueType		valueType;
+	float			rangeMin;
+	float			rangeMax;
+};
+
+enum OperationType
+{
+	FUNCTION = 0,
+	OPERATOR,
+	SIDE_EFFECT_OPERATOR // Test the side-effect (as opposed to the result) of a side-effect operator.
+};
+
+struct BuiltinFuncInfo
+{
+	BuiltinFuncInfo (const char* caseName_,
+					 const char* shaderFuncName_,
+					 ValueType outValue_,
+					 Value input0_, Value input1_,
+					 Value input2_,
+					 const float resultScale_,
+					 const float resultBias_,
+					 deUint32 precisionMask_,
+					 ShaderEvalFunc evalFuncScalar_,
+					 ShaderEvalFunc evalFuncVec2_,
+					 ShaderEvalFunc evalFuncVec3_,
+					 ShaderEvalFunc evalFuncVec4_,
+					 OperationType type_=FUNCTION,
+					 bool isUnaryPrefix_=true)
+		: caseName			(caseName_)
+		, shaderFuncName	(shaderFuncName_)
+		, outValue			(outValue_)
+		, input0			(input0_)
+		, input1			(input1_)
+		, input2			(input2_)
+		, resultScale		(resultScale_)
+		, resultBias		(resultBias_)
+		, referenceScale	(resultScale_)
+		, referenceBias		(resultBias_)
+		, precisionMask		(precisionMask_)
+		, evalFuncScalar	(evalFuncScalar_)
+		, evalFuncVec2		(evalFuncVec2_)
+		, evalFuncVec3		(evalFuncVec3_)
+		, evalFuncVec4		(evalFuncVec4_)
+		, type				(type_)
+		, isUnaryPrefix		(isUnaryPrefix_)
+	{
+	}
+
+	BuiltinFuncInfo (const char* caseName_,
+					 const char* shaderFuncName_,
+					 ValueType outValue_,
+					 Value input0_,
+					 Value input1_,
+					 Value input2_,
+					 const float resultScale_,
+					 const float resultBias_,
+					 const float referenceScale_,
+					 const float referenceBias_,
+					 deUint32 precisionMask_,
+					 ShaderEvalFunc evalFuncScalar_,
+					 ShaderEvalFunc evalFuncVec2_,
+					 ShaderEvalFunc evalFuncVec3_,
+					 ShaderEvalFunc evalFuncVec4_,
+					 OperationType type_=FUNCTION,
+					 bool isUnaryPrefix_=true)
+		: caseName			(caseName_)
+		, shaderFuncName	(shaderFuncName_)
+		, outValue			(outValue_)
+		, input0			(input0_)
+		, input1			(input1_)
+		, input2			(input2_)
+		, resultScale		(resultScale_)
+		, resultBias		(resultBias_)
+		, referenceScale	(referenceScale_)
+		, referenceBias		(referenceBias_)
+		, precisionMask		(precisionMask_)
+		, evalFuncScalar	(evalFuncScalar_)
+		, evalFuncVec2		(evalFuncVec2_)
+		, evalFuncVec3		(evalFuncVec3_)
+		, evalFuncVec4		(evalFuncVec4_)
+		, type				(type_)
+		, isUnaryPrefix		(isUnaryPrefix_)
+	{
+	}
+
+	const char*		caseName;			//!< Name of case.
+	const char*		shaderFuncName;		//!< Name in shading language.
+	ValueType		outValue;
+	Value			input0;
+	Value			input1;
+	Value			input2;
+	float			resultScale;
+	float			resultBias;
+	float			referenceScale;
+	float			referenceBias;
+	deUint32		precisionMask;
+	ShaderEvalFunc	evalFuncScalar;
+	ShaderEvalFunc	evalFuncVec2;
+	ShaderEvalFunc	evalFuncVec3;
+	ShaderEvalFunc	evalFuncVec4;
+	OperationType	type;
+	bool			isUnaryPrefix;		//!< Whether a unary operator is a prefix operator; redundant unless unary.
+};
+
+static inline BuiltinFuncInfo BuiltinOperInfo (const char* caseName_, const char* shaderFuncName_, ValueType outValue_, Value input0_, Value input1_, Value input2_, const float resultScale_, const float resultBias_, deUint32 precisionMask_, ShaderEvalFunc evalFuncScalar_, ShaderEvalFunc evalFuncVec2_, ShaderEvalFunc evalFuncVec3_, ShaderEvalFunc evalFuncVec4_)
+{
+	return BuiltinFuncInfo(caseName_, shaderFuncName_, outValue_, input0_, input1_, input2_, resultScale_, resultBias_, resultScale_, resultBias_, precisionMask_, evalFuncScalar_, evalFuncVec2_, evalFuncVec3_, evalFuncVec4_, OPERATOR);
+}
+
+// For postfix (unary) operators.
+static inline BuiltinFuncInfo BuiltinPostOperInfo (const char* caseName_, const char* shaderFuncName_, ValueType outValue_, Value input0_, Value input1_, Value input2_, const float resultScale_, const float resultBias_, deUint32 precisionMask_, ShaderEvalFunc evalFuncScalar_, ShaderEvalFunc evalFuncVec2_, ShaderEvalFunc evalFuncVec3_, ShaderEvalFunc evalFuncVec4_)
+{
+	return BuiltinFuncInfo(caseName_, shaderFuncName_, outValue_, input0_, input1_, input2_, resultScale_, resultBias_, resultScale_, resultBias_, precisionMask_, evalFuncScalar_, evalFuncVec2_, evalFuncVec3_, evalFuncVec4_, OPERATOR, false);
+}
+
+static inline BuiltinFuncInfo BuiltinSideEffOperInfo (const char* caseName_, const char* shaderFuncName_, ValueType outValue_, Value input0_, Value input1_, Value input2_, const float resultScale_, const float resultBias_, deUint32 precisionMask_, ShaderEvalFunc evalFuncScalar_, ShaderEvalFunc evalFuncVec2_, ShaderEvalFunc evalFuncVec3_, ShaderEvalFunc evalFuncVec4_)
+{
+	return BuiltinFuncInfo(caseName_, shaderFuncName_, outValue_, input0_, input1_, input2_, resultScale_, resultBias_, resultScale_, resultBias_, precisionMask_, evalFuncScalar_, evalFuncVec2_, evalFuncVec3_, evalFuncVec4_, SIDE_EFFECT_OPERATOR);
+}
+
+// For postfix (unary) operators, testing side-effect.
+static inline BuiltinFuncInfo BuiltinPostSideEffOperInfo (const char* caseName_, const char* shaderFuncName_, ValueType outValue_, Value input0_, Value input1_, Value input2_, const float resultScale_, const float resultBias_, deUint32 precisionMask_, ShaderEvalFunc evalFuncScalar_, ShaderEvalFunc evalFuncVec2_, ShaderEvalFunc evalFuncVec3_, ShaderEvalFunc evalFuncVec4_)
+{
+	return BuiltinFuncInfo(caseName_, shaderFuncName_, outValue_, input0_, input1_, input2_, resultScale_, resultBias_, resultScale_, resultBias_, precisionMask_, evalFuncScalar_, evalFuncVec2_, evalFuncVec3_, evalFuncVec4_, SIDE_EFFECT_OPERATOR, false);
+}
+
+// BuiltinFuncGroup
+
+struct BuiltinFuncGroup
+{
+						BuiltinFuncGroup	(const char* name_, const char* description_) : name(name_), description(description_) {}
+	BuiltinFuncGroup&	operator<<			(const BuiltinFuncInfo& info) { funcInfos.push_back(info); return *this; }
+
+	const char*						name;
+	const char*						description;
+	std::vector<BuiltinFuncInfo>	funcInfos;
+};
+
+static const char* s_inSwizzles[MAX_INPUTS][4] =
+{
+	{ "z", "wy", "zxy", "yzwx" },
+	{ "x", "yx", "yzx", "wzyx" },
+	{ "y", "zy", "wyz", "xwzy" }
+};
+
+static const char* s_outSwizzles[]	= { "x", "yz", "xyz", "xyzw" };
+
+static const BVec4 s_outSwizzleChannelMasks[] =
+{
+	BVec4(true,  false, false, false),
+	BVec4(false, true,  true,  false),
+	BVec4(true,  true,  true,  false),
+	BVec4(true,  true,  true,  true )
+};
+
+// OperatorShaderEvaluator
+
+class OperatorShaderEvaluator : public ShaderEvaluator
+{
+public:
+	OperatorShaderEvaluator (const ShaderEvalFunc evalFunc, const float scale, const float bias, int resultScalarSize)
+		: m_evalFunc					(evalFunc)
+		, m_resultScalarSize			(resultScalarSize)
+		, m_evaluatedScale				(scale)
+		, m_evaluatedBias				(bias)
+	{
+		DE_ASSERT(de::inRange(resultScalarSize, 1, 4));
+	}
+
+	virtual ~OperatorShaderEvaluator (void)
+	{
+	}
+
+	virtual void evaluate (ShaderEvalContext& ctx) const
+	{
+		m_evalFunc(ctx);
+
+		for (int channelNdx = 0; channelNdx < 4; channelNdx++)
+			if (s_outSwizzleChannelMasks[m_resultScalarSize - 1][channelNdx])
+				ctx.color[channelNdx] = ctx.color[channelNdx] * m_evaluatedScale + m_evaluatedBias;
+	}
+
+private:
+	const ShaderEvalFunc	m_evalFunc;
+	const int				m_resultScalarSize;
+
+	const float				m_evaluatedScale;
+	const float				m_evaluatedBias;
+};
+
+// Concrete value.
+
+struct ShaderValue
+{
+	ShaderValue (DataType type_, const float rangeMin_, const float rangeMax_)
+		: type		(type_)
+		, rangeMin	(rangeMin_)
+		, rangeMax	(rangeMax_)
+	{
+	}
+
+	ShaderValue (void)
+		: type		(TYPE_LAST)
+		, rangeMin	(0.0f)
+		, rangeMax	(0.0f)
+	{
+	}
+
+	DataType		type;
+	float			rangeMin;
+	float			rangeMax;
+};
+
+struct ShaderDataSpec
+{
+	ShaderDataSpec (void)
+		: resultScale		(1.0f)
+		, resultBias		(0.0f)
+		, referenceScale	(1.0f)
+		, referenceBias		(0.0f)
+		, precision			(PRECISION_LAST)
+		, output			(TYPE_LAST)
+		, numInputs			(0)
+	{
+	}
+
+	float			resultScale;
+	float			resultBias;
+	float			referenceScale;
+	float			referenceBias;
+	Precision		precision;
+	DataType		output;
+	int				numInputs;
+	ShaderValue		inputs[MAX_INPUTS];
+};
+
+// ShaderOperatorInstance
+
+class ShaderOperatorCaseInstance : public ShaderRenderCaseInstance
+{
+public:
+							ShaderOperatorCaseInstance	(Context&				context,
+														const bool				isVertexCase,
+														const ShaderEvaluator&	evaluator,
+														const UniformSetup&		uniformSetup,
+														const ShaderDataSpec	spec);
+	virtual					~ShaderOperatorCaseInstance	(void);
+
+private:
+	const ShaderDataSpec	m_spec;
+};
+
+ShaderOperatorCaseInstance::ShaderOperatorCaseInstance (Context&				context,
+														const bool				isVertexCase,
+														const ShaderEvaluator&	evaluator,
+														const UniformSetup&		uniformSetup,
+														const ShaderDataSpec	spec)
+	: ShaderRenderCaseInstance	(context, isVertexCase, evaluator, uniformSetup, DE_NULL)
+	, m_spec					(spec)
+{
+	// Setup the user attributes.
+	m_userAttribTransforms.resize(m_spec.numInputs);
+	for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+	{
+		const ShaderValue& v = m_spec.inputs[inputNdx];
+		DE_ASSERT(v.type != TYPE_LAST);
+
+		const float rangeMin	= v.rangeMin;
+		const float rangeMax	= v.rangeMax;
+		const float scale		= rangeMax - rangeMin;
+		const float minBias		= rangeMin;
+		const float maxBias		= rangeMax;
+		Mat4		attribMatrix;
+
+		for (int rowNdx = 0; rowNdx < 4; rowNdx++)
+		{
+			Vec4 row;
+
+			switch ((rowNdx + inputNdx) % 4)
+			{
+				case 0:	row = Vec4(scale, 0.0f, 0.0f, minBias);		break;
+				case 1:	row = Vec4(0.0f, scale, 0.0f, minBias);		break;
+				case 2:	row = Vec4(-scale, 0.0f, 0.0f, maxBias);	break;
+				case 3:	row = Vec4(0.0f, -scale, 0.0f, maxBias);	break;
+				default: DE_ASSERT(false);
+			}
+
+			attribMatrix.setRow(rowNdx, row);
+		}
+
+		m_userAttribTransforms[inputNdx] = attribMatrix;
+
+		const deUint32 location = 4u + inputNdx;
+		switch(inputNdx)
+		{
+			case 0: useAttribute(location, A_IN0); break;
+			case 1: useAttribute(location, A_IN1); break;
+			case 2: useAttribute(location, A_IN2); break;
+			case 3: useAttribute(location, A_IN3); break;
+			default: DE_ASSERT(false);
+		}
+	}
+}
+
+ShaderOperatorCaseInstance::~ShaderOperatorCaseInstance (void)
+{
+}
+
+// ShaderOperatorCase
+
+class ShaderOperatorCase : public ShaderRenderCase
+{
+public:
+								ShaderOperatorCase		(tcu::TestContext&		testCtx,
+														 const char*			caseName,
+														 const char*			description,
+														 const bool				isVertexCase,
+														 const ShaderEvalFunc	evalFunc,
+														 const std::string&		shaderOp,
+														 const ShaderDataSpec&	spec);
+	virtual						~ShaderOperatorCase		(void);
+
+	virtual TestInstance*		createInstance			(Context& context) const;
+
+protected:
+	void						setupShaderData			(void);
+
+private:
+								ShaderOperatorCase		(const ShaderOperatorCase&);	// not allowed!
+	ShaderOperatorCase&			operator=				(const ShaderOperatorCase&);	// not allowed!
+
+	const ShaderDataSpec		m_spec;
+	const std::string			m_shaderOp;
+};
+
+ShaderOperatorCase::ShaderOperatorCase (tcu::TestContext&		testCtx,
+										const char*				caseName,
+										const char*				description,
+										const bool				isVertexCase,
+										const ShaderEvalFunc	evalFunc,
+										const std::string&		shaderOp,
+										const ShaderDataSpec&	spec)
+	: ShaderRenderCase	(testCtx,
+						 caseName,
+						 description,
+						 isVertexCase,
+						 new OperatorShaderEvaluator(evalFunc, spec.referenceScale, spec.referenceBias, getDataTypeScalarSize(spec.output)),
+						 DE_NULL,
+						 DE_NULL)
+	, m_spec			(spec)
+	, m_shaderOp		(shaderOp)
+{
+	setupShaderData();
+}
+
+TestInstance* ShaderOperatorCase::createInstance (Context& context) const
+{
+	DE_ASSERT(m_evaluator != DE_NULL);
+	DE_ASSERT(m_uniformSetup != DE_NULL);
+	return new ShaderOperatorCaseInstance(context, m_isVertexCase, *m_evaluator, *m_uniformSetup, m_spec);
+}
+
+
+void ShaderOperatorCase::setupShaderData (void)
+{
+	const char*			precision	= m_spec.precision != PRECISION_LAST ? getPrecisionName(m_spec.precision) : DE_NULL;
+	const char*			inputPrecision[MAX_INPUTS];
+
+	std::ostringstream	vtx;
+	std::ostringstream	frag;
+	std::ostringstream&	op			= m_isVertexCase ? vtx : frag;
+
+	std::string			header		=
+		"#version 310 es\n";
+
+	vtx << header;
+	frag << header;
+
+	// Compute precision for inputs.
+	for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+	{
+		const bool		isBoolVal	= de::inRange<int>(m_spec.inputs[inputNdx].type, TYPE_BOOL, TYPE_BOOL_VEC4);
+		const bool		isIntVal	= de::inRange<int>(m_spec.inputs[inputNdx].type, TYPE_INT, TYPE_INT_VEC4);
+		const bool		isUintVal	= de::inRange<int>(m_spec.inputs[inputNdx].type, TYPE_UINT, TYPE_UINT_VEC4);
+		// \note Mediump interpolators are used for booleans, and highp for integers.
+		const Precision	prec		= isBoolVal	? PRECISION_MEDIUMP
+									: isIntVal || isUintVal ? PRECISION_HIGHP
+									: m_spec.precision;
+		inputPrecision[inputNdx] = getPrecisionName(prec);
+	}
+
+	// Attributes.
+	vtx << "layout(location = 0) in highp vec4 a_position;\n";
+	for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+		vtx << "layout(location = " << 4 + inputNdx << ") in " << inputPrecision[inputNdx] << " vec4 a_in" << inputNdx << ";\n";
+
+	// Color output.
+	frag << "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (m_isVertexCase)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+	else
+	{
+		for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+		{
+			vtx << "layout(location = " << inputNdx + 1 << ") out " << inputPrecision[inputNdx] << " vec4 v_in" << inputNdx << ";\n";
+			frag << "layout(location = " << inputNdx + 1 << ") in " << inputPrecision[inputNdx] << " vec4 v_in" << inputNdx << ";\n";
+		}
+	}
+
+	vtx << "\n";
+	vtx << "void main()\n";
+	vtx << "{\n";
+	vtx << "	gl_Position = a_position;\n";
+
+	frag << "\n";
+	frag << "void main()\n";
+	frag << "{\n";
+
+	// Expression inputs.
+	const std::string prefix = m_isVertexCase ? "a_" : "v_";
+	for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+	{
+		const DataType	inType		= m_spec.inputs[inputNdx].type;
+		const int		inSize		= getDataTypeScalarSize(inType);
+		const bool		isInt		= de::inRange<int>(inType, TYPE_INT, TYPE_INT_VEC4);
+		const bool		isUint		= de::inRange<int>(inType, TYPE_UINT, TYPE_UINT_VEC4);
+		const bool		isBool		= de::inRange<int>(inType, TYPE_BOOL, TYPE_BOOL_VEC4);
+		const char*		typeName	= getDataTypeName(inType);
+		const char*		swizzle		= s_inSwizzles[inputNdx][inSize - 1];
+
+		op << "\t";
+		if (precision && !isBool) op << precision << " ";
+
+		op << typeName << " in" << inputNdx << " = ";
+
+		if (isBool)
+		{
+			if (inSize == 1)	op << "(";
+			else				op << "greaterThan(";
+		}
+		else if (isInt || isUint)
+			op << typeName << "(";
+
+		op << prefix << "in" << inputNdx << "." << swizzle;
+
+		if (isBool)
+		{
+			if (inSize == 1)	op << " > 0.0)";
+			else				op << ", vec" << inSize << "(0.0))";
+		}
+		else if (isInt || isUint)
+			op << ")";
+
+		op << ";\n";
+	}
+
+	// Result variable.
+	{
+		const char* outTypeName = getDataTypeName(m_spec.output);
+		const bool	isBoolOut	= de::inRange<int>(m_spec.output, TYPE_BOOL, TYPE_BOOL_VEC4);
+
+		op << "\t";
+		if (precision && !isBoolOut) op << precision << " ";
+		op << outTypeName << " res = " << outTypeName << "(0.0);\n\n";
+	}
+
+	// Expression.
+	op << "\t" << m_shaderOp << "\n\n";
+
+	// Convert to color.
+	const bool	isResFloatVec	= de::inRange<int>(m_spec.output, TYPE_FLOAT, TYPE_FLOAT_VEC4);
+	const int	outScalarSize	= getDataTypeScalarSize(m_spec.output);
+
+	op << "\thighp vec4 color = vec4(0.0, 0.0, 0.0, 1.0);\n";
+	op << "\tcolor." << s_outSwizzles[outScalarSize-1] << " = ";
+
+	if (!isResFloatVec && outScalarSize == 1)
+		op << "float(res)";
+	else if (!isResFloatVec)
+		op << "vec" << outScalarSize << "(res)";
+	else
+		op << "res";
+
+	op << ";\n";
+
+	// Scale & bias.
+	const float	resultScale	= m_spec.resultScale;
+	const float	resultBias	= m_spec.resultBias;
+	if ((resultScale != 1.0f) || (resultBias != 0.0f))
+	{
+		op << "\tcolor = color";
+		if (resultScale != 1.0f) op << " * " << twoValuedVec4(de::toString(resultScale),		"1.0", s_outSwizzleChannelMasks[outScalarSize - 1]);
+		if (resultBias != 0.0f)  op << " + " << twoValuedVec4(de::floatToString(resultBias, 2),	"0.0", s_outSwizzleChannelMasks[outScalarSize - 1]);
+		op << ";\n";
+	}
+
+	// ..
+	if (m_isVertexCase)
+	{
+		vtx << "	v_color = color;\n";
+		frag << "	o_color = v_color;\n";
+	}
+	else
+	{
+		for (int inputNdx = 0; inputNdx < m_spec.numInputs; inputNdx++)
+		vtx << "	v_in" << inputNdx << " = a_in" << inputNdx << ";\n";
+		frag << "	o_color = color;\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	m_vertShaderSource = vtx.str();
+	m_fragShaderSource = frag.str();
+}
+
+ShaderOperatorCase::~ShaderOperatorCase (void)
+{
+}
+
+// Vector math functions.
+template<typename T> inline T nop (T f) { return f; }
+
+template <typename T, int Size>
+Vector<T, Size> nop (const Vector<T, Size>& v) { return v; }
+
+#define DECLARE_UNARY_GENTYPE_FUNCS(FUNC_NAME)																			\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2)).x(); }		\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1)); }		\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1)); }	\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0)); }
+
+#define DECLARE_BINARY_GENTYPE_FUNCS(FUNC_NAME)																											\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2),          c.in[1].swizzle(0)).x(); }			\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0)); }			\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0)); }		\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0)); }
+
+#define DECLARE_TERNARY_GENTYPE_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2),          c.in[1].swizzle(0),          c.in[2].swizzle(1)).x(); }		\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0),       c.in[2].swizzle(2, 1)); }			\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0),    c.in[2].swizzle(3, 1, 2)); }		\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0), c.in[2].swizzle(0, 3, 2, 1)); }
+
+#define DECLARE_UNARY_SCALAR_GENTYPE_FUNCS(FUNC_NAME)																	\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2)); }			\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(3, 1)); }		\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1)); }	\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0)); }
+
+#define DECLARE_BINARY_SCALAR_GENTYPE_FUNCS(FUNC_NAME)																									\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2),          c.in[1].swizzle(0)); }				\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0)); }			\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0)); }		\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color.x()	= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0)); }
+
+#define DECLARE_BINARY_BOOL_FUNCS(FUNC_NAME)																		\
+	void eval_##FUNC_NAME##_bool	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(c.in[0].z() > 0.0f, c.in[1].x() > 0.0f); }
+
+#define DECLARE_UNARY_BOOL_GENTYPE_FUNCS(FUNC_NAME)																											\
+	void eval_##FUNC_NAME##_bool	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(c.in[0].z() > 0.0f); }										\
+	void eval_##FUNC_NAME##_bvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f))).asFloat(); }		\
+	void eval_##FUNC_NAME##_bvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f))).asFloat(); }		\
+	void eval_##FUNC_NAME##_bvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f))).asFloat(); }
+
+#define DECLARE_TERNARY_BOOL_GENTYPE_FUNCS(FUNC_NAME)																																																					\
+	void eval_##FUNC_NAME##_bool	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(c.in[0].z() > 0.0f,                            c.in[1].x() > 0.0f,                                   c.in[2].y() > 0.0f); }												\
+	void eval_##FUNC_NAME##_bvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f)),       greaterThan(c.in[1].swizzle(1, 0), Vec2(0.0f)),       greaterThan(c.in[2].swizzle(2, 1), Vec2(0.0f))).asFloat(); }		\
+	void eval_##FUNC_NAME##_bvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f)),    greaterThan(c.in[1].swizzle(1, 2, 0), Vec3(0.0f)),    greaterThan(c.in[2].swizzle(3, 1, 2), Vec3(0.0f))).asFloat(); }		\
+	void eval_##FUNC_NAME##_bvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f)), greaterThan(c.in[1].swizzle(3, 2, 1, 0), Vec4(0.0f)), greaterThan(c.in[2].swizzle(0, 3, 2, 1), Vec4(0.0f))).asFloat(); }
+
+#define DECLARE_UNARY_INT_GENTYPE_FUNCS(FUNC_NAME)																						\
+	void eval_##FUNC_NAME##_int		(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((int)c.in[0].z()); }						\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt()).asFloat(); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt()).asFloat(); }
+
+#define DECLARE_BINARY_INT_GENTYPE_FUNCS(FUNC_NAME)																																\
+	void eval_##FUNC_NAME##_int		(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((int)c.in[0].z(),				(int)c.in[1].x()); }							\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt(),			c.in[1].swizzle(1, 0).asInt()).asFloat(); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt(),		c.in[1].swizzle(1, 2, 0).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt(),	c.in[1].swizzle(3, 2, 1, 0).asInt()).asFloat(); }
+
+#define DECLARE_UNARY_UINT_GENTYPE_FUNCS(FUNC_NAME)																						\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((deUint32)c.in[0].z()); }				\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint()).asFloat(); }
+
+#define DECLARE_BINARY_UINT_GENTYPE_FUNCS(FUNC_NAME)																															\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((deUint32)c.in[0].z(),			(deUint32)c.in[1].x()); }						\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			c.in[1].swizzle(1, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		c.in[1].swizzle(1, 2, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	c.in[1].swizzle(3, 2, 1, 0).asUint()).asFloat(); }
+
+#define DECLARE_TERNARY_INT_GENTYPE_FUNCS(FUNC_NAME)																																								\
+	void eval_##FUNC_NAME##_int		(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((int)c.in[0].z(),				(int)c.in[1].x(),					(int)c.in[2].y()); }							\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt(),			c.in[1].swizzle(1, 0).asInt(),       c.in[2].swizzle(2, 1).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt(),		c.in[1].swizzle(1, 2, 0).asInt(),    c.in[2].swizzle(3, 1, 2).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt(),	c.in[1].swizzle(3, 2, 1, 0).asInt(), c.in[2].swizzle(0, 3, 2, 1).asInt()).asFloat(); }
+
+#define DECLARE_TERNARY_UINT_GENTYPE_FUNCS(FUNC_NAME)																																									\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((deUint32)c.in[0].z(),			(deUint32)c.in[1].x(),					(deUint32)c.in[2].y()); }						\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			c.in[1].swizzle(1, 0).asUint(),			c.in[2].swizzle(2, 1).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		c.in[1].swizzle(1, 2, 0).asUint(),		c.in[2].swizzle(3, 1, 2).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	c.in[1].swizzle(3, 2, 1, 0).asUint(),	c.in[2].swizzle(0, 3, 2, 1).asUint()).asFloat(); }
+
+#define DECLARE_VEC_FLOAT_FUNCS(FUNC_NAME)																								\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),			c.in[1].x()); } \
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),		c.in[1].x()); } \
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0),	c.in[1].x()); }
+
+#define DECLARE_VEC_FLOAT_FLOAT_FUNCS(FUNC_NAME) \
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),			c.in[1].x(), c.in[2].y()); } \
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),		c.in[1].x(), c.in[2].y()); } \
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0),	c.in[1].x(), c.in[2].y()); }
+
+#define DECLARE_VEC_VEC_FLOAT_FUNCS(FUNC_NAME) \
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),			c.in[1].swizzle(1, 0),			c.in[2].y()); } \
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),		c.in[1].swizzle(1, 2, 0),		c.in[2].y()); } \
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0),	c.in[1].swizzle(3, 2, 1, 0),	c.in[2].y()); }
+
+#define DECLARE_FLOAT_FLOAT_VEC_FUNCS(FUNC_NAME) \
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].z(), c.in[1].x(), c.in[2].swizzle(2, 1)); }			\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].z(), c.in[1].x(), c.in[2].swizzle(3, 1, 2)); }		\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].z(), c.in[1].x(), c.in[2].swizzle(0, 3, 2, 1)); }
+
+#define DECLARE_FLOAT_VEC_FUNCS(FUNC_NAME)																												\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].z(),					c.in[1].swizzle(1, 0)); }		\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].z(),					c.in[1].swizzle(1, 2, 0)); }	\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].z(),					c.in[1].swizzle(3, 2, 1, 0)); }
+
+#define DECLARE_IVEC_INT_FUNCS(FUNC_NAME)																														\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt(),			(int)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt(),		(int)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt(),	(int)c.in[1].x()).asFloat(); }
+
+#define DECLARE_IVEC_INT_INT_FUNCS(FUNC_NAME) \
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt(),			(int)c.in[1].x(), (int)c.in[2].y()).asFloat(); } \
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt(),		(int)c.in[1].x(), (int)c.in[2].y()).asFloat(); } \
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt(),	(int)c.in[1].x(), (int)c.in[2].y()).asFloat(); }
+
+#define DECLARE_INT_IVEC_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME((int)c.in[0].z(),					c.in[1].swizzle(1, 0).asInt()).asFloat(); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME((int)c.in[0].z(),					c.in[1].swizzle(1, 2, 0).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME((int)c.in[0].z(),					c.in[1].swizzle(3, 2, 1, 0).asInt()).asFloat(); }
+
+#define DECLARE_UVEC_UINT_FUNCS(FUNC_NAME)																															\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			(deUint32)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		(deUint32)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	(deUint32)c.in[1].x()).asFloat(); }
+
+#define DECLARE_UVEC_UINT_UINT_FUNCS(FUNC_NAME) \
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			(deUint32)c.in[1].x(), (deUint32)c.in[2].y()).asFloat(); } \
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		(deUint32)c.in[1].x(), (deUint32)c.in[2].y()).asFloat(); } \
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	(deUint32)c.in[1].x(), (deUint32)c.in[2].y()).asFloat(); }
+
+#define DECLARE_UINT_UVEC_FUNCS(FUNC_NAME)																																		\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME((deUint32)c.in[0].z(),					c.in[1].swizzle(1, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME((deUint32)c.in[0].z(),					c.in[1].swizzle(1, 2, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME((deUint32)c.in[0].z(),					c.in[1].swizzle(3, 2, 1, 0).asUint()).asFloat(); }
+
+#define DECLARE_BINARY_INT_VEC_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asInt(),			c.in[1].swizzle(1, 0).asInt()).asFloat(); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asInt(),		c.in[1].swizzle(1, 2, 0).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asInt(),	c.in[1].swizzle(3, 2, 1, 0).asInt()).asFloat(); }
+
+#define DECLARE_BINARY_UINT_VEC_FUNCS(FUNC_NAME)																																\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			c.in[1].swizzle(1, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		c.in[1].swizzle(1, 2, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	c.in[1].swizzle(3, 2, 1, 0).asUint()).asFloat(); }
+
+#define DECLARE_UINT_INT_GENTYPE_FUNCS(FUNC_NAME)																																\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((deUint32)c.in[0].z(),			(int)c.in[1].x()); }							\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			c.in[1].swizzle(1, 0).asInt()).asFloat(); }		\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		c.in[1].swizzle(1, 2, 0).asInt()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	c.in[1].swizzle(3, 2, 1, 0).asInt()).asFloat(); }
+
+#define DECLARE_UVEC_INT_FUNCS(FUNC_NAME)																														\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),			(int)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),		(int)c.in[1].x()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(),	(int)c.in[1].x()).asFloat(); }
+
+
+// Operators.
+
+DECLARE_UNARY_GENTYPE_FUNCS(nop)
+DECLARE_UNARY_GENTYPE_FUNCS(negate)
+DECLARE_UNARY_GENTYPE_FUNCS(addOne)
+DECLARE_UNARY_GENTYPE_FUNCS(subOne)
+DECLARE_BINARY_GENTYPE_FUNCS(add)
+DECLARE_BINARY_GENTYPE_FUNCS(sub)
+DECLARE_BINARY_GENTYPE_FUNCS(mul)
+DECLARE_BINARY_GENTYPE_FUNCS(div)
+
+void eval_selection_float	(ShaderEvalContext& c) { c.color.x()	= selection(c.in[0].z() > 0.0f,		c.in[1].x(),					c.in[2].y()); }
+void eval_selection_vec2	(ShaderEvalContext& c) { c.color.yz()	= selection(c.in[0].z() > 0.0f,		c.in[1].swizzle(1, 0),			c.in[2].swizzle(2, 1)); }
+void eval_selection_vec3	(ShaderEvalContext& c) { c.color.xyz()	= selection(c.in[0].z() > 0.0f,		c.in[1].swizzle(1, 2, 0),		c.in[2].swizzle(3, 1, 2)); }
+void eval_selection_vec4	(ShaderEvalContext& c) { c.color		= selection(c.in[0].z() > 0.0f,		c.in[1].swizzle(3, 2, 1, 0),	c.in[2].swizzle(0, 3, 2, 1)); }
+
+DECLARE_UNARY_INT_GENTYPE_FUNCS(nop)
+DECLARE_UNARY_INT_GENTYPE_FUNCS(negate)
+DECLARE_UNARY_INT_GENTYPE_FUNCS(addOne)
+DECLARE_UNARY_INT_GENTYPE_FUNCS(subOne)
+DECLARE_UNARY_INT_GENTYPE_FUNCS(bitwiseNot)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(add)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(sub)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(mul)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(div)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(mod)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(bitwiseAnd)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(bitwiseOr)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(bitwiseXor)
+
+void eval_leftShift_int		(ShaderEvalContext& c) { c.color.x()	= (float)leftShift((int)c.in[0].z(),				(int)c.in[1].x()); }
+DECLARE_BINARY_INT_VEC_FUNCS(leftShift)
+void eval_rightShift_int	(ShaderEvalContext& c) { c.color.x()	= (float)rightShift((int)c.in[0].z(),				(int)c.in[1].x()); }
+DECLARE_BINARY_INT_VEC_FUNCS(rightShift)
+DECLARE_IVEC_INT_FUNCS(leftShiftVecScalar)
+DECLARE_IVEC_INT_FUNCS(rightShiftVecScalar)
+
+void eval_selection_int		(ShaderEvalContext& c) { c.color.x()	= (float)selection(c.in[0].z() > 0.0f,	(int)c.in[1].x(),						(int)c.in[2].y()); }
+void eval_selection_ivec2	(ShaderEvalContext& c) { c.color.yz()	= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(1, 0).asInt(),			c.in[2].swizzle(2, 1).asInt()).asFloat(); }
+void eval_selection_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(1, 2, 0).asInt(),		c.in[2].swizzle(3, 1, 2).asInt()).asFloat(); }
+void eval_selection_ivec4	(ShaderEvalContext& c) { c.color		= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(3, 2, 1, 0).asInt(),	c.in[2].swizzle(0, 3, 2, 1).asInt()).asFloat(); }
+
+DECLARE_UNARY_UINT_GENTYPE_FUNCS(nop)
+DECLARE_UNARY_UINT_GENTYPE_FUNCS(negate)
+DECLARE_UNARY_UINT_GENTYPE_FUNCS(bitwiseNot)
+DECLARE_UNARY_UINT_GENTYPE_FUNCS(addOne)
+DECLARE_UNARY_UINT_GENTYPE_FUNCS(subOne)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(add)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(sub)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(mul)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(div)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(mod)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(bitwiseAnd)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(bitwiseOr)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(bitwiseXor)
+
+DECLARE_UINT_INT_GENTYPE_FUNCS(leftShift)
+DECLARE_UINT_INT_GENTYPE_FUNCS(rightShift)
+DECLARE_UVEC_INT_FUNCS(leftShiftVecScalar)
+DECLARE_UVEC_INT_FUNCS(rightShiftVecScalar)
+
+void eval_selection_uint	(ShaderEvalContext& c) { c.color.x()	= (float)selection(c.in[0].z() > 0.0f,	(deUint32)c.in[1].x(),					(deUint32)c.in[2].y()); }
+void eval_selection_uvec2	(ShaderEvalContext& c) { c.color.yz()	= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(1, 0).asUint(),			c.in[2].swizzle(2, 1).asUint()).asFloat(); }
+void eval_selection_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(1, 2, 0).asUint(),		c.in[2].swizzle(3, 1, 2).asUint()).asFloat(); }
+void eval_selection_uvec4	(ShaderEvalContext& c) { c.color		= selection(c.in[0].z() > 0.0f,			c.in[1].swizzle(3, 2, 1, 0).asUint(),	c.in[2].swizzle(0, 3, 2, 1).asUint()).asFloat(); }
+
+DECLARE_UNARY_BOOL_GENTYPE_FUNCS(boolNot)
+DECLARE_BINARY_BOOL_FUNCS(logicalAnd)
+DECLARE_BINARY_BOOL_FUNCS(logicalOr)
+DECLARE_BINARY_BOOL_FUNCS(logicalXor)
+
+void eval_selection_bool	(ShaderEvalContext& c) { c.color.x()	= (float)selection(c.in[0].z() > 0.0f,	c.in[1].x() > 0.0f,														c.in[2].y() > 0.0f); }
+void eval_selection_bvec2	(ShaderEvalContext& c) { c.color.yz()	= selection(c.in[0].z() > 0.0f,			greaterThan(c.in[1].swizzle(1, 0), Vec2(0.0f, 0.0f)),					greaterThan(c.in[2].swizzle(2, 1), Vec2(0.0f, 0.0f))).asFloat(); }
+void eval_selection_bvec3	(ShaderEvalContext& c) { c.color.xyz()	= selection(c.in[0].z() > 0.0f,			greaterThan(c.in[1].swizzle(1, 2, 0), Vec3(0.0f, 0.0f, 0.0f)),			greaterThan(c.in[2].swizzle(3, 1, 2), Vec3(0.0f, 0.0f, 0.0f))).asFloat(); }
+void eval_selection_bvec4	(ShaderEvalContext& c) { c.color		= selection(c.in[0].z() > 0.0f,			greaterThan(c.in[1].swizzle(3, 2, 1, 0), Vec4(0.0f, 0.0f, 0.0f, 0.0f)),	greaterThan(c.in[2].swizzle(0, 3, 2, 1), Vec4(0.0f, 0.0f, 0.0f, 0.0f))).asFloat(); }
+
+DECLARE_VEC_FLOAT_FUNCS(addVecScalar)
+DECLARE_VEC_FLOAT_FUNCS(subVecScalar)
+DECLARE_VEC_FLOAT_FUNCS(mulVecScalar)
+DECLARE_VEC_FLOAT_FUNCS(divVecScalar)
+
+DECLARE_FLOAT_VEC_FUNCS(addScalarVec)
+DECLARE_FLOAT_VEC_FUNCS(subScalarVec)
+DECLARE_FLOAT_VEC_FUNCS(mulScalarVec)
+DECLARE_FLOAT_VEC_FUNCS(divScalarVec)
+
+DECLARE_IVEC_INT_FUNCS(addVecScalar)
+DECLARE_IVEC_INT_FUNCS(subVecScalar)
+DECLARE_IVEC_INT_FUNCS(mulVecScalar)
+DECLARE_IVEC_INT_FUNCS(divVecScalar)
+DECLARE_IVEC_INT_FUNCS(modVecScalar)
+DECLARE_IVEC_INT_FUNCS(bitwiseAndVecScalar)
+DECLARE_IVEC_INT_FUNCS(bitwiseOrVecScalar)
+DECLARE_IVEC_INT_FUNCS(bitwiseXorVecScalar)
+
+DECLARE_INT_IVEC_FUNCS(addScalarVec)
+DECLARE_INT_IVEC_FUNCS(subScalarVec)
+DECLARE_INT_IVEC_FUNCS(mulScalarVec)
+DECLARE_INT_IVEC_FUNCS(divScalarVec)
+DECLARE_INT_IVEC_FUNCS(modScalarVec)
+DECLARE_INT_IVEC_FUNCS(bitwiseAndScalarVec)
+DECLARE_INT_IVEC_FUNCS(bitwiseOrScalarVec)
+DECLARE_INT_IVEC_FUNCS(bitwiseXorScalarVec)
+
+DECLARE_UVEC_UINT_FUNCS(addVecScalar)
+DECLARE_UVEC_UINT_FUNCS(subVecScalar)
+DECLARE_UVEC_UINT_FUNCS(mulVecScalar)
+DECLARE_UVEC_UINT_FUNCS(divVecScalar)
+DECLARE_UVEC_UINT_FUNCS(modVecScalar)
+DECLARE_UVEC_UINT_FUNCS(bitwiseAndVecScalar)
+DECLARE_UVEC_UINT_FUNCS(bitwiseOrVecScalar)
+DECLARE_UVEC_UINT_FUNCS(bitwiseXorVecScalar)
+
+DECLARE_UINT_UVEC_FUNCS(addScalarVec)
+DECLARE_UINT_UVEC_FUNCS(subScalarVec)
+DECLARE_UINT_UVEC_FUNCS(mulScalarVec)
+DECLARE_UINT_UVEC_FUNCS(divScalarVec)
+DECLARE_UINT_UVEC_FUNCS(modScalarVec)
+DECLARE_UINT_UVEC_FUNCS(bitwiseAndScalarVec)
+DECLARE_UINT_UVEC_FUNCS(bitwiseOrScalarVec)
+DECLARE_UINT_UVEC_FUNCS(bitwiseXorScalarVec)
+
+// Built-in functions.
+
+DECLARE_UNARY_GENTYPE_FUNCS(radians)
+DECLARE_UNARY_GENTYPE_FUNCS(degrees)
+DECLARE_UNARY_GENTYPE_FUNCS(sin)
+DECLARE_UNARY_GENTYPE_FUNCS(cos)
+DECLARE_UNARY_GENTYPE_FUNCS(tan)
+DECLARE_UNARY_GENTYPE_FUNCS(asin)
+DECLARE_UNARY_GENTYPE_FUNCS(acos)
+DECLARE_UNARY_GENTYPE_FUNCS(atan)
+DECLARE_BINARY_GENTYPE_FUNCS(atan2)
+DECLARE_UNARY_GENTYPE_FUNCS(sinh)
+DECLARE_UNARY_GENTYPE_FUNCS(cosh)
+DECLARE_UNARY_GENTYPE_FUNCS(tanh)
+DECLARE_UNARY_GENTYPE_FUNCS(asinh)
+DECLARE_UNARY_GENTYPE_FUNCS(acosh)
+DECLARE_UNARY_GENTYPE_FUNCS(atanh)
+
+DECLARE_BINARY_GENTYPE_FUNCS(pow)
+DECLARE_UNARY_GENTYPE_FUNCS(exp)
+DECLARE_UNARY_GENTYPE_FUNCS(log)
+DECLARE_UNARY_GENTYPE_FUNCS(exp2)
+DECLARE_UNARY_GENTYPE_FUNCS(log2)
+DECLARE_UNARY_GENTYPE_FUNCS(sqrt)
+DECLARE_UNARY_GENTYPE_FUNCS(inverseSqrt)
+
+DECLARE_UNARY_GENTYPE_FUNCS(abs)
+DECLARE_UNARY_GENTYPE_FUNCS(sign)
+DECLARE_UNARY_GENTYPE_FUNCS(floor)
+DECLARE_UNARY_GENTYPE_FUNCS(trunc)
+DECLARE_UNARY_GENTYPE_FUNCS(roundToEven)
+DECLARE_UNARY_GENTYPE_FUNCS(ceil)
+DECLARE_UNARY_GENTYPE_FUNCS(fract)
+DECLARE_BINARY_GENTYPE_FUNCS(mod)
+DECLARE_VEC_FLOAT_FUNCS(modVecScalar)
+DECLARE_BINARY_GENTYPE_FUNCS(min)
+DECLARE_VEC_FLOAT_FUNCS(minVecScalar)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(min)
+DECLARE_IVEC_INT_FUNCS(minVecScalar)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(min)
+DECLARE_UVEC_UINT_FUNCS(minVecScalar)
+DECLARE_BINARY_GENTYPE_FUNCS(max)
+DECLARE_VEC_FLOAT_FUNCS(maxVecScalar)
+DECLARE_BINARY_INT_GENTYPE_FUNCS(max)
+DECLARE_IVEC_INT_FUNCS(maxVecScalar)
+DECLARE_BINARY_UINT_GENTYPE_FUNCS(max)
+DECLARE_UVEC_UINT_FUNCS(maxVecScalar)
+DECLARE_TERNARY_GENTYPE_FUNCS(clamp)
+DECLARE_VEC_FLOAT_FLOAT_FUNCS(clampVecScalarScalar)
+DECLARE_TERNARY_INT_GENTYPE_FUNCS(clamp)
+DECLARE_IVEC_INT_INT_FUNCS(clampVecScalarScalar)
+DECLARE_TERNARY_UINT_GENTYPE_FUNCS(clamp)
+DECLARE_UVEC_UINT_UINT_FUNCS(clampVecScalarScalar)
+DECLARE_TERNARY_GENTYPE_FUNCS(mix)
+DECLARE_VEC_VEC_FLOAT_FUNCS(mixVecVecScalar)
+DECLARE_BINARY_GENTYPE_FUNCS(step)
+DECLARE_FLOAT_VEC_FUNCS(stepScalarVec)
+DECLARE_TERNARY_GENTYPE_FUNCS(smoothStep)
+DECLARE_FLOAT_FLOAT_VEC_FUNCS(smoothStepScalarScalarVec)
+
+DECLARE_UNARY_SCALAR_GENTYPE_FUNCS(length)
+DECLARE_BINARY_SCALAR_GENTYPE_FUNCS(distance)
+DECLARE_BINARY_SCALAR_GENTYPE_FUNCS(dot)
+void eval_cross_vec3 (ShaderEvalContext& c) { c.color.xyz()	= cross(c.in[0].swizzle(2, 0, 1), c.in[1].swizzle(1, 2, 0)); }
+
+DECLARE_UNARY_GENTYPE_FUNCS(normalize)
+DECLARE_TERNARY_GENTYPE_FUNCS(faceForward)
+DECLARE_BINARY_GENTYPE_FUNCS(reflect)
+
+void eval_refract_float	(ShaderEvalContext& c) { c.color.x()	= refract(c.in[0].z(),                 c.in[1].x(),                 c.in[2].y()); }
+void eval_refract_vec2	(ShaderEvalContext& c) { c.color.yz()	= refract(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0),       c.in[2].y()); }
+void eval_refract_vec3	(ShaderEvalContext& c) { c.color.xyz()	= refract(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0),    c.in[2].y()); }
+void eval_refract_vec4	(ShaderEvalContext& c) { c.color		= refract(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0), c.in[2].y()); }
+
+// Compare functions.
+
+#define DECLARE_FLOAT_COMPARE_FUNCS(FUNC_NAME)																											\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].z(),          c.in[1].x()); }						\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0)); }		\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0)); }	\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0)); }
+
+#define DECLARE_FLOAT_CWISE_COMPARE_FUNCS(FUNC_NAME)																											\
+	void eval_##FUNC_NAME##_float	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(c.in[0].z(),          c.in[1].x()); }							\
+	DECLARE_FLOAT_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)
+
+#define DECLARE_FLOAT_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)																											\
+	void eval_##FUNC_NAME##_vec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1),       c.in[1].swizzle(1, 0)).asFloat(); }		\
+	void eval_##FUNC_NAME##_vec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1),    c.in[1].swizzle(1, 2, 0)).asFloat(); }		\
+	void eval_##FUNC_NAME##_vec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0), c.in[1].swizzle(3, 2, 1, 0)).asFloat(); }
+
+#define DECLARE_INT_COMPARE_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_int		(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(chopToInt(c.in[0].z()), chopToInt(c.in[1].x())); }									\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(chopToInt(c.in[0].swizzle(3, 1)),       chopToInt(c.in[1].swizzle(1, 0))); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(chopToInt(c.in[0].swizzle(2, 0, 1)),    chopToInt(c.in[1].swizzle(1, 2, 0))); }		\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(chopToInt(c.in[0].swizzle(1, 2, 3, 0)), chopToInt(c.in[1].swizzle(3, 2, 1, 0))); }
+
+#define DECLARE_INT_CWISE_COMPARE_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_int		(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(chopToInt(c.in[0].z()), chopToInt(c.in[1].x())); }									\
+	DECLARE_INT_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)
+
+#define DECLARE_INT_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)																																	\
+	void eval_##FUNC_NAME##_ivec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(chopToInt(c.in[0].swizzle(3, 1)),       chopToInt(c.in[1].swizzle(1, 0))).asFloat(); }		\
+	void eval_##FUNC_NAME##_ivec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(chopToInt(c.in[0].swizzle(2, 0, 1)),    chopToInt(c.in[1].swizzle(1, 2, 0))).asFloat(); }	\
+	void eval_##FUNC_NAME##_ivec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(chopToInt(c.in[0].swizzle(1, 2, 3, 0)), chopToInt(c.in[1].swizzle(3, 2, 1, 0))).asFloat(); }
+
+#define DECLARE_UINT_COMPARE_FUNCS(FUNC_NAME)																																\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME((deUint32)c.in[0].z(), (deUint32)c.in[1].x()); }								\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),       c.in[1].swizzle(1, 0).asUint()); }		\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),    c.in[1].swizzle(1, 2, 0).asUint()); }		\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(), c.in[1].swizzle(3, 2, 1, 0).asUint()); }
+
+#define DECLARE_UINT_CWISE_COMPARE_FUNCS(FUNC_NAME)																																\
+	DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(FUNC_NAME)																															\
+	void eval_##FUNC_NAME##_uvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(c.in[0].swizzle(3, 1).asUint(),       c.in[1].swizzle(1, 0).asUint()).asFloat(); }		\
+	void eval_##FUNC_NAME##_uvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(c.in[0].swizzle(2, 0, 1).asUint(),    c.in[1].swizzle(1, 2, 0).asUint()).asFloat(); }	\
+	void eval_##FUNC_NAME##_uvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(c.in[0].swizzle(1, 2, 3, 0).asUint(), c.in[1].swizzle(3, 2, 1, 0).asUint()).asFloat(); }
+
+#define DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(FUNC_NAME)																																\
+	void eval_##FUNC_NAME##_uint	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME((deUint32)c.in[0].z(), (deUint32)c.in[1].x()); }
+
+#define DECLARE_BOOL_COMPARE_FUNCS(FUNC_NAME)																																								\
+	void eval_##FUNC_NAME##_bool	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(c.in[0].z() > 0.0f, c.in[1].x() > 0.0f); }																		\
+	void eval_##FUNC_NAME##_bvec2	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f)),       greaterThan(c.in[1].swizzle(1, 0), Vec2(0.0f))); }		\
+	void eval_##FUNC_NAME##_bvec3	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f)),    greaterThan(c.in[1].swizzle(1, 2, 0), Vec3(0.0f))); }		\
+	void eval_##FUNC_NAME##_bvec4	(ShaderEvalContext& c) { c.color.x() = (float)FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f)), greaterThan(c.in[1].swizzle(3, 2, 1, 0), Vec4(0.0f))); }
+
+#define DECLARE_BOOL_CWISE_COMPARE_FUNCS(FUNC_NAME)																																								\
+	void eval_##FUNC_NAME##_bool	(ShaderEvalContext& c) { c.color.x()	= (float)FUNC_NAME(c.in[0].z() > 0.0f, c.in[1].x() > 0.0f); }																		\
+	DECLARE_BOOL_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)
+
+#define DECLARE_BOOL_VEC_CWISE_COMPARE_FUNCS(FUNC_NAME)																																								\
+	void eval_##FUNC_NAME##_bvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f)),       greaterThan(c.in[1].swizzle(1, 0), Vec2(0.0f))).asFloat(); }		\
+	void eval_##FUNC_NAME##_bvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f)),    greaterThan(c.in[1].swizzle(1, 2, 0), Vec3(0.0f))).asFloat(); }	\
+	void eval_##FUNC_NAME##_bvec4	(ShaderEvalContext& c) { c.color		= FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f)), greaterThan(c.in[1].swizzle(3, 2, 1, 0), Vec4(0.0f))).asFloat(); }
+
+DECLARE_FLOAT_COMPARE_FUNCS(allEqual)
+DECLARE_FLOAT_COMPARE_FUNCS(anyNotEqual)
+DECLARE_FLOAT_CWISE_COMPARE_FUNCS(lessThan)
+DECLARE_FLOAT_CWISE_COMPARE_FUNCS(lessThanEqual)
+DECLARE_FLOAT_CWISE_COMPARE_FUNCS(greaterThan)
+DECLARE_FLOAT_CWISE_COMPARE_FUNCS(greaterThanEqual)
+DECLARE_FLOAT_VEC_CWISE_COMPARE_FUNCS(equal)
+DECLARE_FLOAT_VEC_CWISE_COMPARE_FUNCS(notEqual)
+
+DECLARE_INT_COMPARE_FUNCS(allEqual)
+DECLARE_INT_COMPARE_FUNCS(anyNotEqual)
+DECLARE_INT_CWISE_COMPARE_FUNCS(lessThan)
+DECLARE_INT_CWISE_COMPARE_FUNCS(lessThanEqual)
+DECLARE_INT_CWISE_COMPARE_FUNCS(greaterThan)
+DECLARE_INT_CWISE_COMPARE_FUNCS(greaterThanEqual)
+DECLARE_INT_VEC_CWISE_COMPARE_FUNCS(equal)
+DECLARE_INT_VEC_CWISE_COMPARE_FUNCS(notEqual)
+
+DECLARE_UINT_COMPARE_FUNCS(allEqual)
+DECLARE_UINT_COMPARE_FUNCS(anyNotEqual)
+DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(lessThan)
+DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(lessThanEqual)
+DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(greaterThan)
+DECLARE_UINT_SCALAR_CWISE_COMPARE_FUNCS(greaterThanEqual)
+
+DECLARE_BOOL_COMPARE_FUNCS(allEqual)
+DECLARE_BOOL_COMPARE_FUNCS(anyNotEqual)
+DECLARE_BOOL_VEC_CWISE_COMPARE_FUNCS(equal)
+DECLARE_BOOL_VEC_CWISE_COMPARE_FUNCS(notEqual)
+
+// Boolean functions.
+
+#define DECLARE_UNARY_SCALAR_BVEC_FUNCS(GLSL_NAME, FUNC_NAME)																							\
+	void eval_##GLSL_NAME##_bvec2	(ShaderEvalContext& c) { c.color.x()	= float(FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f)))); }		\
+	void eval_##GLSL_NAME##_bvec3	(ShaderEvalContext& c) { c.color.x()	= float(FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f)))); }	\
+	void eval_##GLSL_NAME##_bvec4	(ShaderEvalContext& c) { c.color.x()	= float(FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f)))); }
+
+#define DECLARE_UNARY_BVEC_BVEC_FUNCS(GLSL_NAME, FUNC_NAME)																								\
+	void eval_##GLSL_NAME##_bvec2	(ShaderEvalContext& c) { c.color.yz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(3, 1), Vec2(0.0f))).asFloat(); }	\
+	void eval_##GLSL_NAME##_bvec3	(ShaderEvalContext& c) { c.color.xyz()	= FUNC_NAME(greaterThan(c.in[0].swizzle(2, 0, 1), Vec3(0.0f))).asFloat(); }	\
+	void eval_##GLSL_NAME##_bvec4	(ShaderEvalContext& c) { c.color.xyzw()	= FUNC_NAME(greaterThan(c.in[0].swizzle(1, 2, 3, 0), Vec4(0.0f))).asFloat(); }
+
+DECLARE_UNARY_SCALAR_BVEC_FUNCS(any, boolAny);
+DECLARE_UNARY_SCALAR_BVEC_FUNCS(all, boolAll);
+
+// ShaderOperatorTests.
+
+class ShaderOperatorTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderOperatorTests		(tcu::TestContext& context);
+	virtual					~ShaderOperatorTests	(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderOperatorTests		(const ShaderOperatorTests&);		// not allowed!
+	ShaderOperatorTests&	operator=				(const ShaderOperatorTests&);		// not allowed!
+};
+
+ShaderOperatorTests::ShaderOperatorTests(tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "operator", "Operator tests.")
+{
+}
+
+ShaderOperatorTests::~ShaderOperatorTests (void)
+{
+}
+
+void ShaderOperatorTests::init (void)
+{
+	#define BOOL_FUNCS(FUNC_NAME)			eval_##FUNC_NAME##_bool, DE_NULL, DE_NULL, DE_NULL
+
+	#define FLOAT_VEC_FUNCS(FUNC_NAME)		DE_NULL, eval_##FUNC_NAME##_vec2, eval_##FUNC_NAME##_vec3, eval_##FUNC_NAME##_vec4
+	#define INT_VEC_FUNCS(FUNC_NAME)		DE_NULL, eval_##FUNC_NAME##_ivec2, eval_##FUNC_NAME##_ivec3, eval_##FUNC_NAME##_ivec4
+	#define UINT_VEC_FUNCS(FUNC_NAME)		DE_NULL, eval_##FUNC_NAME##_uvec2, eval_##FUNC_NAME##_uvec3, eval_##FUNC_NAME##_uvec4
+	#define BOOL_VEC_FUNCS(FUNC_NAME)		DE_NULL, eval_##FUNC_NAME##_bvec2, eval_##FUNC_NAME##_bvec3, eval_##FUNC_NAME##_bvec4
+
+	#define FLOAT_GENTYPE_FUNCS(FUNC_NAME)	eval_##FUNC_NAME##_float, eval_##FUNC_NAME##_vec2, eval_##FUNC_NAME##_vec3, eval_##FUNC_NAME##_vec4
+	#define INT_GENTYPE_FUNCS(FUNC_NAME)	eval_##FUNC_NAME##_int, eval_##FUNC_NAME##_ivec2, eval_##FUNC_NAME##_ivec3, eval_##FUNC_NAME##_ivec4
+	#define UINT_GENTYPE_FUNCS(FUNC_NAME)	eval_##FUNC_NAME##_uint, eval_##FUNC_NAME##_uvec2, eval_##FUNC_NAME##_uvec3, eval_##FUNC_NAME##_uvec4
+	#define BOOL_GENTYPE_FUNCS(FUNC_NAME)	eval_##FUNC_NAME##_bool, eval_##FUNC_NAME##_bvec2, eval_##FUNC_NAME##_bvec3, eval_##FUNC_NAME##_bvec4
+
+	// Shorthands.
+	Value					notUsed		= Value(VALUE_NONE, 0.0f, 0.0f);
+
+	std::vector<BuiltinFuncGroup> funcInfoGroups;
+
+	// Unary operators.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("unary_operator", "Unary operator tests")
+		<< BuiltinOperInfo						("plus",			"+",	GT,		Value(GT,  -1.0f, 1.0f),	notUsed,	notUsed,	0.5f,	0.5f,	PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(nop))
+		<< BuiltinOperInfo						("plus",			"+",	IGT,	Value(IGT, -5.0f, 5.0f),	notUsed,	notUsed,	0.1f,	0.5f,	PRECMASK_ALL,		INT_GENTYPE_FUNCS(nop))
+		<< BuiltinOperInfo						("plus",			"+",	UGT,	Value(UGT,  0.0f, 2e2f),	notUsed,	notUsed,	5e-3f,	0.0f,	PRECMASK_ALL,		UINT_GENTYPE_FUNCS(nop))
+		<< BuiltinOperInfo						("minus",			"-",	GT,		Value(GT,  -1.0f, 1.0f),	notUsed,	notUsed,	0.5f,	0.5f,	PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(negate))
+		<< BuiltinOperInfo						("minus",			"-",	IGT,	Value(IGT, -5.0f, 5.0f),	notUsed,	notUsed,	0.1f,	0.5f,	PRECMASK_ALL,		INT_GENTYPE_FUNCS(negate))
+		<< BuiltinOperInfo						("minus",			"-",	UGT,	Value(UGT,  0.0f, 4e9f),	notUsed,	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,		UINT_GENTYPE_FUNCS(negate))
+		<< BuiltinOperInfo						("not",				"!",	B,		Value(B,   -1.0f, 1.0f),	notUsed,	notUsed,	1.0f,	0.0f,	PRECMASK_NA,		eval_boolNot_bool, DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo						("bitwise_not",		"~",	IGT,	Value(IGT, -1e5f, 1e5f),	notUsed,	notUsed,	5e-5f,	0.5f,	PRECMASK_HIGHP,		INT_GENTYPE_FUNCS(bitwiseNot))
+		<< BuiltinOperInfo						("bitwise_not",		"~",	UGT,	Value(UGT,  0.0f, 2e9f),	notUsed,	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,		UINT_GENTYPE_FUNCS(bitwiseNot))
+
+		// Pre/post incr/decr side effect cases.
+		<< BuiltinSideEffOperInfo		("pre_increment_effect",	"++",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 0.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(addOne))
+		<< BuiltinSideEffOperInfo		("pre_increment_effect",	"++",	IGT,	Value(IGT,	-6.0f, 4.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(addOne))
+		<< BuiltinSideEffOperInfo		("pre_increment_effect",	"++",	UGT,	Value(UGT,	 0.0f, 9.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(addOne))
+		<< BuiltinSideEffOperInfo		("pre_decrement_effect",	"--",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 1.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(subOne))
+		<< BuiltinSideEffOperInfo		("pre_decrement_effect",	"--",	IGT,	Value(IGT,	-4.0f, 6.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(subOne))
+		<< BuiltinSideEffOperInfo		("pre_decrement_effect",	"--",	UGT,	Value(UGT,	 1.0f, 10.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(subOne))
+		<< BuiltinPostSideEffOperInfo	("post_increment_effect",	"++",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 0.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(addOne))
+		<< BuiltinPostSideEffOperInfo	("post_increment_effect",	"++",	IGT,	Value(IGT,	-6.0f, 4.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(addOne))
+		<< BuiltinPostSideEffOperInfo	("post_increment_effect",	"++",	UGT,	Value(UGT,	 0.0f, 9.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(addOne))
+		<< BuiltinPostSideEffOperInfo	("post_decrement_effect",	"--",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 1.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(subOne))
+		<< BuiltinPostSideEffOperInfo	("post_decrement_effect",	"--",	IGT,	Value(IGT,	-4.0f, 6.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(subOne))
+		<< BuiltinPostSideEffOperInfo	("post_decrement_effect",	"--",	UGT,	Value(UGT,	 1.0f, 10.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(subOne))
+
+		// Pre/post incr/decr result cases.
+		<< BuiltinOperInfo				("pre_increment_result",	"++",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 0.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(addOne))
+		<< BuiltinOperInfo				("pre_increment_result",	"++",	IGT,	Value(IGT,	-6.0f, 4.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(addOne))
+		<< BuiltinOperInfo				("pre_increment_result",	"++",	UGT,	Value(UGT,	 0.0f, 9.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(addOne))
+		<< BuiltinOperInfo				("pre_decrement_result",	"--",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 1.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(subOne))
+		<< BuiltinOperInfo				("pre_decrement_result",	"--",	IGT,	Value(IGT,	-4.0f, 6.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(subOne))
+		<< BuiltinOperInfo				("pre_decrement_result",	"--",	UGT,	Value(UGT,	 1.0f, 10.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(subOne))
+		<< BuiltinPostOperInfo			("post_increment_result",	"++",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 0.5f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(nop))
+		<< BuiltinPostOperInfo			("post_increment_result",	"++",	IGT,	Value(IGT,	-5.0f, 5.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(nop))
+		<< BuiltinPostOperInfo			("post_increment_result",	"++",	UGT,	Value(UGT,	 0.0f, 9.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(nop))
+		<< BuiltinPostOperInfo			("post_decrement_result",	"--",	GT,		Value(GT,	-1.0f, 1.0f),	notUsed,	notUsed,	0.5f, 0.5f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(nop))
+		<< BuiltinPostOperInfo			("post_decrement_result",	"--",	IGT,	Value(IGT,	-5.0f, 5.0f),	notUsed,	notUsed,	0.1f, 0.5f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(nop))
+		<< BuiltinPostOperInfo			("post_decrement_result",	"--",	UGT,	Value(UGT,	 1.0f, 10.0f),	notUsed,	notUsed,	0.1f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(nop))
+	);
+
+	BuiltinFuncGroup binaryOpGroup("binary_operator", "Binary operator tests");
+
+	// Normal binary operations and their corresponding assignment operations have lots in common; generate both in the following loop.
+
+	for (int binaryOperatorType = 0; binaryOperatorType <= 2; binaryOperatorType++) // 0: normal op test, 1: assignment op side-effect test, 2: assignment op result test
+	{
+		const bool	isNormalOp		= binaryOperatorType == 0;
+		const bool	isAssignEff		= binaryOperatorType == 1;
+		const bool	isAssignRes		= binaryOperatorType == 2;
+
+		DE_ASSERT(isNormalOp || isAssignEff || isAssignRes);
+		DE_UNREF(isAssignRes);
+
+		const char*	addName			= isNormalOp ? "add"			: isAssignEff ? "add_assign_effect"			: "add_assign_result";
+		const char*	subName			= isNormalOp ? "sub"			: isAssignEff ? "sub_assign_effect"			: "sub_assign_result";
+		const char*	mulName			= isNormalOp ? "mul"			: isAssignEff ? "mul_assign_effect"			: "mul_assign_result";
+		const char*	divName			= isNormalOp ? "div"			: isAssignEff ? "div_assign_effect"			: "div_assign_result";
+		const char* modName			= isNormalOp ? "mod"			: isAssignEff ? "mod_assign_effect"			: "mod_assign_result";
+		const char* andName			= isNormalOp ? "bitwise_and"	: isAssignEff ? "bitwise_and_assign_effect"	: "bitwise_and_assign_result";
+		const char* orName			= isNormalOp ? "bitwise_or"		: isAssignEff ? "bitwise_or_assign_effect"	: "bitwise_or_assign_result";
+		const char* xorName			= isNormalOp ? "bitwise_xor"	: isAssignEff ? "bitwise_xor_assign_effect"	: "bitwise_xor_assign_result";
+		const char* leftShiftName	= isNormalOp ? "left_shift"		: isAssignEff ? "left_shift_assign_effect"	: "left_shift_assign_result";
+		const char* rightShiftName	= isNormalOp ? "right_shift"	: isAssignEff ? "right_shift_assign_effect"	: "right_shift_assign_result";
+		const char*	addOp			= isNormalOp ? "+" : "+=";
+		const char*	subOp			= isNormalOp ? "-" : "-=";
+		const char*	mulOp			= isNormalOp ? "*" : "*=";
+		const char*	divOp			= isNormalOp ? "/" : "/=";
+		const char*	modOp			= isNormalOp ? "%" : "%=";
+		const char*	andOp			= isNormalOp ? "&" : "&=";
+		const char*	orOp			= isNormalOp ? "|" : "|=";
+		const char*	xorOp			= isNormalOp ? "^" : "^=";
+		const char*	leftShiftOp		= isNormalOp ? "<<" : "<<=";
+		const char*	rightShiftOp	= isNormalOp ? ">>" : ">>=";
+
+		// Pointer to appropriate OperInfo function.
+		BuiltinFuncInfo (*operInfoFunc)(const char*, const char*, ValueType, Value, Value, Value, const float, const float, deUint32, ShaderEvalFunc, ShaderEvalFunc, ShaderEvalFunc, ShaderEvalFunc) =
+			isAssignEff ? BuiltinSideEffOperInfo : BuiltinOperInfo;
+
+		DE_ASSERT(operInfoFunc != DE_NULL);
+
+		// The following cases will be added for each operator, precision and fundamental type (float, int, uint) combination, where applicable:
+		// gentype <op> gentype
+		// vector <op> scalar
+		// For normal (non-assigning) operators only:
+		//   scalar <op> vector
+
+		// The add operator.
+
+		binaryOpGroup
+			<< operInfoFunc(addName,	addOp,	GT,		Value(GT,  -1.0f, 1.0f),	Value(GT,  -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_GENTYPE_FUNCS(add))
+			<< operInfoFunc(addName,	addOp,	IGT,	Value(IGT, -4.0f, 6.0f),	Value(IGT, -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(add))
+			<< operInfoFunc(addName,	addOp,	IGT,	Value(IGT, -2e9f, 2e9f),	Value(IGT, -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(add))
+			<< operInfoFunc(addName,	addOp,	UGT,	Value(UGT,  0.0f, 1e2f),	Value(UGT,  0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(add))
+			<< operInfoFunc(addName,	addOp,	UGT,	Value(UGT,  0.0f, 4e9f),	Value(UGT,  0.0f, 4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(add))
+			<< operInfoFunc(addName,	addOp,	FV,		Value(FV,  -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(addVecScalar))
+			<< operInfoFunc(addName,	addOp,	IV,		Value(IV,  -4.0f, 6.0f),	Value(I,   -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(addVecScalar))
+			<< operInfoFunc(addName,	addOp,	IV,		Value(IV,  -2e9f, 2e9f),	Value(I,   -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(addVecScalar))
+			<< operInfoFunc(addName,	addOp,	UV,		Value(UV,   0.0f, 1e2f),	Value(U,    0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(addVecScalar))
+			<< operInfoFunc(addName,	addOp,	UV,		Value(UV,   0.0f, 4e9f),	Value(U,    0.0f, 4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(addVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(addName,	addOp,	FV,		Value(F,   -1.0f, 1.0f),	Value(FV,  -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(addScalarVec))
+				<< operInfoFunc(addName,	addOp,	IV,		Value(I,   -4.0f, 6.0f),	Value(IV,  -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(addScalarVec))
+				<< operInfoFunc(addName,	addOp,	IV,		Value(I,   -2e9f, 2e9f),	Value(IV,  -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(addScalarVec))
+				<< operInfoFunc(addName,	addOp,	UV,		Value(U,    0.0f, 1e2f),	Value(UV,   0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(addScalarVec))
+				<< operInfoFunc(addName,	addOp,	UV,		Value(U,    0.0f, 4e9f),	Value(UV,   0.0f, 4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(addScalarVec));
+
+		// The subtract operator.
+
+		binaryOpGroup
+			<< operInfoFunc(subName,	subOp,	GT,		Value(GT,  -1.0f, 1.0f),	Value(GT,  -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_GENTYPE_FUNCS(sub))
+			<< operInfoFunc(subName,	subOp,	IGT,	Value(IGT, -4.0f, 6.0f),	Value(IGT, -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(sub))
+			<< operInfoFunc(subName,	subOp,	IGT,	Value(IGT, -2e9f, 2e9f),	Value(IGT, -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(sub))
+			<< operInfoFunc(subName,	subOp,	UGT,	Value(UGT,  1e2f, 2e2f),	Value(UGT,  0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(sub))
+			<< operInfoFunc(subName,	subOp,	UGT,	Value(UGT,  .5e9f, 3.7e9f),	Value(UGT,  0.0f, 3.9e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(sub))
+			<< operInfoFunc(subName,	subOp,	FV,		Value(FV,  -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(subVecScalar))
+			<< operInfoFunc(subName,	subOp,	IV,		Value(IV,  -4.0f, 6.0f),	Value(I,   -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(subVecScalar))
+			<< operInfoFunc(subName,	subOp,	IV,		Value(IV,  -2e9f, 2e9f),	Value(I,   -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(subVecScalar))
+			<< operInfoFunc(subName,	subOp,	UV,		Value(UV,   1e2f, 2e2f),	Value(U,    0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(subVecScalar))
+			<< operInfoFunc(subName,	subOp,	UV,		Value(UV,   0.0f, 4e9f),	Value(U,    0.0f, 4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(subVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(subName,	subOp,	FV,		Value(F,   -1.0f, 1.0f),	Value(FV,  -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(subScalarVec))
+				<< operInfoFunc(subName,	subOp,	IV,		Value(I,   -4.0f, 6.0f),	Value(IV,  -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(subScalarVec))
+				<< operInfoFunc(subName,	subOp,	IV,		Value(I,   -2e9f, 2e9f),	Value(IV,  -2e9f, 2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(subScalarVec))
+				<< operInfoFunc(subName,	subOp,	UV,		Value(U,    1e2f, 2e2f),	Value(UV,    0.0f, 1e2f),	notUsed,	5e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(subScalarVec))
+				<< operInfoFunc(subName,	subOp,	UV,		Value(U,    0.0f, 4e9f),	Value(UV,    0.0f, 4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(subScalarVec));
+
+		// The multiply operator.
+
+		binaryOpGroup
+			<< operInfoFunc(mulName,	mulOp,	GT,		Value(GT,  -1.0f, 1.0f),	Value(GT,  -1.0f, 1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_GENTYPE_FUNCS(mul))
+			<< operInfoFunc(mulName,	mulOp,	IGT,	Value(IGT, -4.0f, 6.0f),	Value(IGT, -6.0f, 5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(mul))
+			<< operInfoFunc(mulName,	mulOp,	IGT,	Value(IGT, -3e5f, 3e5f),	Value(IGT, -3e4f, 3e4f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(mul))
+			<< operInfoFunc(mulName,	mulOp,	UGT,	Value(UGT,  0.0f, 16.0f),	Value(UGT,  0.0f, 16.0f),	notUsed,	4e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(mul))
+			<< operInfoFunc(mulName,	mulOp,	UGT,	Value(UGT,  0.0f, 6e5f),	Value(UGT,  0.0f, 6e4f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(mul))
+			<< operInfoFunc(mulName,	mulOp,	FV,		Value(FV,  -1.0f, 1.0f),	Value(F,   -1.0f,  1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(mulVecScalar))
+			<< operInfoFunc(mulName,	mulOp,	IV,		Value(IV,  -4.0f, 6.0f),	Value(I,   -6.0f,  5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(mulVecScalar))
+			<< operInfoFunc(mulName,	mulOp,	IV,		Value(IV,  -3e5f, 3e5f),	Value(I,   -3e4f,  3e4f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(mulVecScalar))
+			<< operInfoFunc(mulName,	mulOp,	UV,		Value(UV,   0.0f, 16.0f),	Value(U,    0.0f, 16.0f),	notUsed,	4e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(mulVecScalar))
+			<< operInfoFunc(mulName,	mulOp,	UV,		Value(UV,   0.0f, 6e5f),	Value(U,    0.0f, 6e4f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(mulVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(mulName,	mulOp,	FV,		Value(F,   -1.0f, 1.0f),	Value(FV,  -1.0f,  1.0f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(mulScalarVec))
+				<< operInfoFunc(mulName,	mulOp,	IV,		Value(I,   -4.0f, 6.0f),	Value(IV,  -6.0f,  5.0f),	notUsed,	0.1f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(mulScalarVec))
+				<< operInfoFunc(mulName,	mulOp,	IV,		Value(I,   -3e5f, 3e5f),	Value(IV,  -3e4f,  3e4f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(mulScalarVec))
+				<< operInfoFunc(mulName,	mulOp,	UV,		Value(U,    0.0f, 16.0f),	Value(UV,   0.0f, 16.0f),	notUsed,	4e-3f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(mulScalarVec))
+				<< operInfoFunc(mulName,	mulOp,	UV,		Value(U,    0.0f, 6e5f),	Value(UV,   0.0f, 6e4f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(mulScalarVec));
+
+		// The divide operator.
+
+		binaryOpGroup
+			<< operInfoFunc(divName,	divOp,	GT,		Value(GT,  -1.0f,    1.0f),		Value(GT,  -2.0f, -0.5f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_GENTYPE_FUNCS(div))
+			<< operInfoFunc(divName,	divOp,	IGT,	Value(IGT, 24.0f,    24.0f),	Value(IGT, -4.0f, -1.0f),	notUsed,	0.04f,	1.0f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(div))
+			<< operInfoFunc(divName,	divOp,	IGT,	Value(IGT, 40320.0f, 40320.0f),	Value(IGT, -8.0f, -1.0f),	notUsed,	1e-5f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(div))
+			<< operInfoFunc(divName,	divOp,	UGT,	Value(UGT,  0.0f,    24.0f),	Value(UGT,  1.0f,  4.0f),	notUsed,	0.04f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(div))
+			<< operInfoFunc(divName,	divOp,	UGT,	Value(UGT,  0.0f,    40320.0f),	Value(UGT,  1.0f,  8.0f),	notUsed,	1e-5f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(div))
+			<< operInfoFunc(divName,	divOp,	FV,		Value(FV,  -1.0f,    1.0f),		Value(F,   -2.0f, -0.5f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(divVecScalar))
+			<< operInfoFunc(divName,	divOp,	IV,		Value(IV,  24.0f,    24.0f),	Value(I,   -4.0f, -1.0f),	notUsed,	0.04f,	1.0f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(divVecScalar))
+			<< operInfoFunc(divName,	divOp,	IV,		Value(IV,  40320.0f, 40320.0f),	Value(I,   -8.0f, -1.0f),	notUsed,	1e-5f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(divVecScalar))
+			<< operInfoFunc(divName,	divOp,	UV,		Value(UV,   0.0f,    24.0f),	Value(U,    1.0f,  4.0f),	notUsed,	0.04f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(divVecScalar))
+			<< operInfoFunc(divName,	divOp,	UV,		Value(UV,   0.0f,    40320.0f),	Value(U,    1.0f,  8.0f),	notUsed,	1e-5f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(divVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(divName,	divOp,	FV,		Value(F,   -1.0f,    1.0f),		Value(FV,  -2.0f, -0.5f),	notUsed,	1.0f,	0.0f,	PRECMASK_ALL,			FLOAT_VEC_FUNCS(divScalarVec))
+				<< operInfoFunc(divName,	divOp,	IV,		Value(I,   24.0f,    24.0f),	Value(IV,  -4.0f, -1.0f),	notUsed,	0.04f,	1.0f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(divScalarVec))
+				<< operInfoFunc(divName,	divOp,	IV,		Value(I,   40320.0f, 40320.0f),	Value(IV,  -8.0f, -1.0f),	notUsed,	1e-5f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(divScalarVec))
+				<< operInfoFunc(divName,	divOp,	UV,		Value(U,    0.0f,    24.0f),	Value(UV,   1.0f,  4.0f),	notUsed,	0.04f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(divScalarVec))
+				<< operInfoFunc(divName,	divOp,	UV,		Value(U,    0.0f,    40320.0f),	Value(UV,   1.0f,  8.0f),	notUsed,	1e-5f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(divScalarVec));
+
+		// The modulus operator.
+
+		binaryOpGroup
+			<< operInfoFunc(modName,	modOp,	IGT,	Value(IGT,  0.0f, 6.0f),	Value(IGT,   1.1f,  6.1f),	notUsed,	0.25f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(mod))
+			<< operInfoFunc(modName,	modOp,	IGT,	Value(IGT,  0.0f, 14.0f),	Value(IGT,   1.1f, 11.1f),	notUsed,	0.1f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(mod))
+			<< operInfoFunc(modName,	modOp,	UGT,	Value(UGT,  0.0f, 6.0f),	Value(UGT,   1.1f,  6.1f),	notUsed,	0.25f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(mod))
+			<< operInfoFunc(modName,	modOp,	UGT,	Value(UGT,  0.0f, 24.0f),	Value(UGT,   1.1f, 11.1f),	notUsed,	0.1f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(mod))
+			<< operInfoFunc(modName,	modOp,	IV,		Value(IV,   0.0f, 6.0f),	Value(I,     1.1f,  6.1f),	notUsed,	0.25f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(modVecScalar))
+			<< operInfoFunc(modName,	modOp,	IV,		Value(IV,   0.0f, 6.0f),	Value(I,     1.1f, 11.1f),	notUsed,	0.1f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(modVecScalar))
+			<< operInfoFunc(modName,	modOp,	UV,		Value(UV,   0.0f, 6.0f),	Value(U,     1.1f,  6.1f),	notUsed,	0.25f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(modVecScalar))
+			<< operInfoFunc(modName,	modOp,	UV,		Value(UV,   0.0f, 24.0f),	Value(U,     1.1f, 11.1f),	notUsed,	0.1f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(modVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(modName,	modOp,	IV,		Value(I,   0.0f, 6.0f),		Value(IV,     1.1f,  6.1f),	notUsed,	0.25f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(modScalarVec))
+				<< operInfoFunc(modName,	modOp,	IV,		Value(I,   0.0f, 6.0f),		Value(IV,     1.1f, 11.1f),	notUsed,	0.1f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(modScalarVec))
+				<< operInfoFunc(modName,	modOp,	UV,		Value(U,   0.0f, 6.0f),		Value(UV,     1.1f,  6.1f),	notUsed,	0.25f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(modScalarVec))
+				<< operInfoFunc(modName,	modOp,	UV,		Value(U,   0.0f, 24.0f),	Value(UV,     1.1f, 11.1f),	notUsed,	0.1f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(modScalarVec));
+
+		// The bitwise and operator.
+
+		binaryOpGroup
+			<< operInfoFunc(andName,	andOp,	IGT,	Value(IGT, -16.0f, 16.0f),	Value(IGT, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(bitwiseAnd))
+			<< operInfoFunc(andName,	andOp,	IGT,	Value(IGT,  -2e9f,  2e9f),	Value(IGT,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(bitwiseAnd))
+			<< operInfoFunc(andName,	andOp,	UGT,	Value(UGT,   0.0f, 32.0f),	Value(UGT,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(bitwiseAnd))
+			<< operInfoFunc(andName,	andOp,	UGT,	Value(UGT,   0.0f,  4e9f),	Value(UGT,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(bitwiseAnd))
+			<< operInfoFunc(andName,	andOp,	IV,		Value(IV, -16.0f, 16.0f),	Value(I, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseAndVecScalar))
+			<< operInfoFunc(andName,	andOp,	IV,		Value(IV,  -2e9f,  2e9f),	Value(I,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseAndVecScalar))
+			<< operInfoFunc(andName,	andOp,	UV,		Value(UV,   0.0f, 32.0f),	Value(U,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseAndVecScalar))
+			<< operInfoFunc(andName,	andOp,	UV,		Value(UV,   0.0f,  4e9f),	Value(U,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseAndVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(andName,	andOp,	IV,		Value(I, -16.0f, 16.0f),	Value(IV, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseAndScalarVec))
+				<< operInfoFunc(andName,	andOp,	IV,		Value(I,  -2e9f,  2e9f),	Value(IV,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseAndScalarVec))
+				<< operInfoFunc(andName,	andOp,	UV,		Value(U,   0.0f, 32.0f),	Value(UV,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseAndScalarVec))
+				<< operInfoFunc(andName,	andOp,	UV,		Value(U,   0.0f,  4e9f),	Value(UV,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseAndScalarVec));
+
+		// The bitwise or operator.
+
+		binaryOpGroup
+			<< operInfoFunc(orName,	orOp,	IGT,	Value(IGT, -16.0f, 16.0f),	Value(IGT, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(bitwiseOr))
+			<< operInfoFunc(orName,	orOp,	IGT,	Value(IGT,  -2e9f,  2e9f),	Value(IGT,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(bitwiseOr))
+			<< operInfoFunc(orName,	orOp,	UGT,	Value(UGT,   0.0f, 32.0f),	Value(UGT,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(bitwiseOr))
+			<< operInfoFunc(orName,	orOp,	UGT,	Value(UGT,   0.0f,  4e9f),	Value(UGT,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(bitwiseOr))
+			<< operInfoFunc(orName,	orOp,	IV,		Value(IV, -16.0f, 16.0f),	Value(I, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseOrVecScalar))
+			<< operInfoFunc(orName,	orOp,	IV,		Value(IV,  -2e9f,  2e9f),	Value(I,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseOrVecScalar))
+			<< operInfoFunc(orName,	orOp,	UV,		Value(UV,   0.0f, 32.0f),	Value(U,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseOrVecScalar))
+			<< operInfoFunc(orName,	orOp,	UV,		Value(UV,   0.0f,  4e9f),	Value(U,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseOrVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(orName,	orOp,	IV,		Value(I, -16.0f, 16.0f),	Value(IV, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseOrScalarVec))
+				<< operInfoFunc(orName,	orOp,	IV,		Value(I,  -2e9f,  2e9f),	Value(IV,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseOrScalarVec))
+				<< operInfoFunc(orName,	orOp,	UV,		Value(U,   0.0f, 32.0f),	Value(UV,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseOrScalarVec))
+				<< operInfoFunc(orName,	orOp,	UV,		Value(U,   0.0f,  4e9f),	Value(UV,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseOrScalarVec));
+
+		// The bitwise xor operator.
+
+		binaryOpGroup
+			<< operInfoFunc(xorName,	xorOp,	IGT,	Value(IGT, -16.0f, 16.0f),	Value(IGT, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(bitwiseXor))
+			<< operInfoFunc(xorName,	xorOp,	IGT,	Value(IGT,  -2e9f,  2e9f),	Value(IGT,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(bitwiseXor))
+			<< operInfoFunc(xorName,	xorOp,	UGT,	Value(UGT,   0.0f, 32.0f),	Value(UGT,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(bitwiseXor))
+			<< operInfoFunc(xorName,	xorOp,	UGT,	Value(UGT,   0.0f,  4e9f),	Value(UGT,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(bitwiseXor))
+			<< operInfoFunc(xorName,	xorOp,	IV,		Value(IV, -16.0f, 16.0f),	Value(I, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseXorVecScalar))
+			<< operInfoFunc(xorName,	xorOp,	IV,		Value(IV,  -2e9f,  2e9f),	Value(I,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseXorVecScalar))
+			<< operInfoFunc(xorName,	xorOp,	UV,		Value(UV,   0.0f, 32.0f),	Value(U,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseXorVecScalar))
+			<< operInfoFunc(xorName,	xorOp,	UV,		Value(UV,   0.0f,  4e9f),	Value(U,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseXorVecScalar));
+
+		if (isNormalOp)
+			binaryOpGroup
+				<< operInfoFunc(xorName,	xorOp,	IV,		Value(I, -16.0f, 16.0f),	Value(IV, -16.0f, 16.0f),	notUsed,	 0.03f,	0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(bitwiseXorScalarVec))
+				<< operInfoFunc(xorName,	xorOp,	IV,		Value(I,  -2e9f,  2e9f),	Value(IV,  -2e9f,  2e9f),	notUsed,	4e-10f,	0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(bitwiseXorScalarVec))
+				<< operInfoFunc(xorName,	xorOp,	UV,		Value(U,   0.0f, 32.0f),	Value(UV,   0.0f, 32.0f),	notUsed,	 0.03f,	0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(bitwiseXorScalarVec))
+				<< operInfoFunc(xorName,	xorOp,	UV,		Value(U,   0.0f,  4e9f),	Value(UV,   0.0f,  4e9f),	notUsed,	2e-10f,	0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(bitwiseXorScalarVec));
+
+		// The left shift operator. Second operand (shift amount) can be either int or uint, even for uint and int first operand, respectively.
+
+		for (int isSignedAmount = 0; isSignedAmount <= 1; isSignedAmount++)
+		{
+			ValueType gType = isSignedAmount == 0 ? UGT	: IGT;
+			ValueType sType = isSignedAmount == 0 ? U	: I;
+			binaryOpGroup
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	IGT,	Value(IGT, -7.0f, 7.0f),	Value(gType, 0.0f, 4.0f),	notUsed,	4e-3f,  0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(leftShift))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	IGT,	Value(IGT, -7.0f, 7.0f),	Value(gType, 0.0f, 27.0f),	notUsed,	5e-10f, 0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(leftShift))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	UGT,	Value(UGT,  0.0f, 7.0f),	Value(gType, 0.0f, 5.0f),	notUsed,	4e-3f,  0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(leftShift))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	UGT,	Value(UGT,  0.0f, 7.0f),	Value(gType, 0.0f, 28.0f),	notUsed,	5e-10f, 0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(leftShift))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	IV,		Value(IV,  -7.0f, 7.0f),	Value(sType, 0.0f, 4.0f),	notUsed,	4e-3f,  0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(leftShiftVecScalar))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	IV,		Value(IV,  -7.0f, 7.0f),	Value(sType, 0.0f, 27.0f),	notUsed,	5e-10f, 0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(leftShiftVecScalar))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	UV,		Value(UV,   0.0f, 7.0f),	Value(sType, 0.0f, 5.0f),	notUsed,	4e-3f,  0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(leftShiftVecScalar))
+				<< operInfoFunc(leftShiftName,	leftShiftOp,	UV,		Value(UV,   0.0f, 7.0f),	Value(sType, 0.0f, 28.0f),	notUsed,	5e-10f, 0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(leftShiftVecScalar));
+		}
+
+		// The right shift operator. Second operand (shift amount) can be either int or uint, even for uint and int first operand, respectively.
+
+		for (int isSignedAmount = 0; isSignedAmount <= 1; isSignedAmount++)
+		{
+			ValueType gType = isSignedAmount == 0 ? UGT	: IGT;
+			ValueType sType = isSignedAmount == 0 ? U	: I;
+			binaryOpGroup
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	IGT,	Value(IGT, -127.0f, 127.0f),	Value(gType, 0.0f, 8.0f),	notUsed,	4e-3f,  0.5f,	PRECMASK_MEDIUMP,	INT_GENTYPE_FUNCS(rightShift))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	IGT,	Value(IGT, -2e9f, 2e9f),		Value(gType, 0.0f, 31.0f),	notUsed,	5e-10f, 0.5f,	PRECMASK_HIGHP,			INT_GENTYPE_FUNCS(rightShift))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	UGT,	Value(UGT,  0.0f, 255.0f),		Value(gType, 0.0f, 8.0f),	notUsed,	4e-3f,  0.0f,	PRECMASK_MEDIUMP,	UINT_GENTYPE_FUNCS(rightShift))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	UGT,	Value(UGT,  0.0f, 4e9f),		Value(gType, 0.0f, 31.0f),	notUsed,	5e-10f, 0.0f,	PRECMASK_HIGHP,			UINT_GENTYPE_FUNCS(rightShift))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	IV,		Value(IV,  -127.0f, 127.0f),	Value(sType, 0.0f, 8.0f),	notUsed,	4e-3f,  0.5f,	PRECMASK_MEDIUMP,	INT_VEC_FUNCS(rightShiftVecScalar))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	IV,		Value(IV,  -2e9f, 2e9f),		Value(sType, 0.0f, 31.0f),	notUsed,	5e-10f, 0.5f,	PRECMASK_HIGHP,			INT_VEC_FUNCS(rightShiftVecScalar))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	UV,		Value(UV,   0.0f, 255.0f),		Value(sType, 0.0f, 8.0f),	notUsed,	4e-3f,  0.0f,	PRECMASK_MEDIUMP,	UINT_VEC_FUNCS(rightShiftVecScalar))
+				<< operInfoFunc(rightShiftName,	rightShiftOp,	UV,		Value(UV,   0.0f, 4e9f),		Value(sType, 0.0f, 31.0f),	notUsed,	5e-10f, 0.0f,	PRECMASK_HIGHP,			UINT_VEC_FUNCS(rightShiftVecScalar));
+		}
+	}
+
+	// Rest of binary operators.
+
+	binaryOpGroup
+		// Scalar relational operators.
+		<< BuiltinOperInfo("less",				"<",	B,		Value(F,   -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThan_float,			DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("less",				"<",	B,		Value(I,   -5.0f, 5.0f),	Value(I,   -5.0f, 5.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThan_int,				DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("less",				"<",	B,		Value(U,    0.0f, 16.0f),	Value(U,    0.0f, 16.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThan_uint,				DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("less_or_equal",		"<=",	B,		Value(F,   -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThanEqual_float,		DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("less_or_equal",		"<=",	B,		Value(I,   -5.0f, 5.0f),	Value(I,   -5.0f, 5.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThanEqual_int,			DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("less_or_equal",		"<=",	B,		Value(U,    0.0f, 16.0f),	Value(U,    0.0f, 16.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_lessThanEqual_uint,		DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater",			">",	B,		Value(F,   -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThan_float,			DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater",			">",	B,		Value(I,   -5.0f, 5.0f),	Value(I,   -5.0f, 5.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThan_int,			DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater",			">",	B,		Value(U,    0.0f, 16.0f),	Value(U,    0.0f, 16.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThan_uint,			DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater_or_equal",	">=",	B,		Value(F,   -1.0f, 1.0f),	Value(F,   -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThanEqual_float,	DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater_or_equal",	">=",	B,		Value(I,   -5.0f, 5.0f),	Value(I,   -5.0f, 5.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThanEqual_int,		DE_NULL, DE_NULL, DE_NULL)
+		<< BuiltinOperInfo("greater_or_equal",	">=",	B,		Value(U,    0.0f, 16.0f),	Value(U,    0.0f, 16.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	eval_greaterThanEqual_uint,		DE_NULL, DE_NULL, DE_NULL)
+
+		// Equality comparison operators.
+		<< BuiltinOperInfo("equal",				"==",	B,		Value(GT,  -1.0f, 1.0f),	Value(GT,  -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(allEqual))
+		<< BuiltinOperInfo("equal",				"==",	B,		Value(IGT, -5.5f, 4.7f),	Value(IGT, -2.1f, 0.1f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(allEqual))
+		<< BuiltinOperInfo("equal",				"==",	B,		Value(UGT,  0.0f, 8.0f),	Value(UGT,  3.5f, 4.5f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(allEqual))
+		<< BuiltinOperInfo("equal",				"==",	B,		Value(BGT, -2.1f, 2.1f),	Value(BGT, -1.1f, 3.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_NA,	BOOL_GENTYPE_FUNCS(allEqual))
+		<< BuiltinOperInfo("not_equal",			"!=",	B,		Value(GT,  -1.0f, 1.0f),	Value(GT,  -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	FLOAT_GENTYPE_FUNCS(anyNotEqual))
+		<< BuiltinOperInfo("not_equal",			"!=",	B,		Value(IGT, -5.5f, 4.7f),	Value(IGT, -2.1f, 0.1f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	INT_GENTYPE_FUNCS(anyNotEqual))
+		<< BuiltinOperInfo("not_equal",			"!=",	B,		Value(UGT,  0.0f, 8.0f),	Value(UGT,  3.5f, 4.5f),	notUsed,	1.0f, 0.0f,		PRECMASK_ALL,	UINT_GENTYPE_FUNCS(anyNotEqual))
+		<< BuiltinOperInfo("not_equal",			"!=",	B,		Value(BGT, -2.1f, 2.1f),	Value(BGT, -1.1f, 3.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_NA,	BOOL_GENTYPE_FUNCS(anyNotEqual))
+
+		// Logical operators.
+		<< BuiltinOperInfo("logical_and",	"&&",	B,	Value(B, -1.0f, 1.0f),	Value(B, -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_NA,	BOOL_FUNCS(logicalAnd))
+		<< BuiltinOperInfo("logical_or",	"||",	B,	Value(B, -1.0f, 1.0f),	Value(B, -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_NA,	BOOL_FUNCS(logicalOr))
+		<< BuiltinOperInfo("logical_xor",	"^^",	B,	Value(B, -1.0f, 1.0f),	Value(B, -1.0f, 1.0f),	notUsed,	1.0f, 0.0f,		PRECMASK_NA,	BOOL_FUNCS(logicalXor));
+
+	funcInfoGroups.push_back(binaryOpGroup);
+
+	// Angle and Trigonometry Functions.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("angle_and_trigonometry", "Angle and trigonometry function tests.")
+		<< BuiltinFuncInfo("radians",		"radians",		GT,	Value(GT, -1.0f, 1.0f),		notUsed,					notUsed,					25.0f, 0.5f,	PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(radians) )
+		<< BuiltinFuncInfo("degrees",		"degrees",		GT,	Value(GT, -1.0f, 1.0f),		notUsed,					notUsed,					0.04f, 0.5f,	PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(degrees) )
+		<< BuiltinFuncInfo("sin",			"sin",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(sin) )
+		<< BuiltinFuncInfo("sin2",			"sin",			GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(sin) )
+		<< BuiltinFuncInfo("cos",			"cos",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(cos) )
+		<< BuiltinFuncInfo("cos2",			"cos",			GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(cos) )
+		<< BuiltinFuncInfo("tan",			"tan",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(tan) )
+		<< BuiltinFuncInfo("tan2",			"tan",			GT,	Value(GT, -1.5f, 5.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(tan) )
+		<< BuiltinFuncInfo("asin",			"asin",			GT,	Value(GT, -1.0f, 1.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(asin) )
+		<< BuiltinFuncInfo("acos",			"acos",			GT,	Value(GT, -1.0f, 1.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(acos) )
+		<< BuiltinFuncInfo("atan",			"atan",			GT,	Value(GT, -4.0f, 4.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_HIGHP,		FLOAT_GENTYPE_FUNCS(atan) )
+		<< BuiltinFuncInfo("atan2",			"atan",			GT,	Value(GT, -4.0f, 4.0f),		Value(GT, 0.5f, 2.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(atan2) )
+		<< BuiltinFuncInfo("sinh",			"sinh",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(sinh) )
+		<< BuiltinFuncInfo("sinh2",			"sinh",			GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(sinh) )
+		<< BuiltinFuncInfo("cosh",			"cosh",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(cosh) )
+		<< BuiltinFuncInfo("cosh2",			"cosh",			GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(cosh) )
+		<< BuiltinFuncInfo("tanh",			"tanh",			GT,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(tanh) )
+		<< BuiltinFuncInfo("tanh2",			"tanh",			GT,	Value(GT, -1.5f, 5.5f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_MEDIUMP,				FLOAT_GENTYPE_FUNCS(tanh) )
+		<< BuiltinFuncInfo("asinh",			"asinh",		GT,	Value(GT, -1.0f, 1.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(asinh) )
+		<< BuiltinFuncInfo("acosh",			"acosh",		GT,	Value(GT, 1.0f, 2.2f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(acosh) )
+		<< BuiltinFuncInfo("atanh",			"atanh",		GT,	Value(GT, -0.99f, 0.99f),	notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(atanh) )
+	);
+
+	// Exponential Functions.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("exponential", "Exponential function tests")
+		<< BuiltinFuncInfo("pow",			"pow",			GT,	Value(GT, 0.1f, 8.0f),		Value(GT, -4.0f, 2.0f),		notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(pow) )
+		<< BuiltinFuncInfo("exp",			"exp",			GT,	Value(GT, -6.0f, 3.0f),		notUsed,					notUsed,					0.5f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(exp) )
+		<< BuiltinFuncInfo("log",			"log",			GT,	Value(GT, 0.1f, 10.0f),		notUsed,					notUsed,					0.5f, 0.3f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(log) )
+		<< BuiltinFuncInfo("exp2",			"exp2",			GT,	Value(GT, -7.0f, 2.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(exp2) )
+		<< BuiltinFuncInfo("log2",			"log2",			GT,	Value(GT, 0.1f, 10.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(log2) )
+		<< BuiltinFuncInfo("sqrt",			"sqrt",			GT,	Value(GT, 0.0f, 10.0f),		notUsed,					notUsed,					0.3f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(sqrt) )
+		<< BuiltinFuncInfo("inversesqrt",	"inversesqrt",	GT,	Value(GT, 0.5f, 10.0f),		notUsed,					notUsed,					1.0f, 0.0f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(inverseSqrt) )
+	);
+
+	// Common Functions.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("common_functions", "Common function tests.")
+		<< BuiltinFuncInfo("abs",			"abs",			GT,	Value(GT, -2.0f, 2.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(abs) )
+		<< BuiltinFuncInfo("sign",			"sign",			GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.3f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(sign) )
+		<< BuiltinFuncInfo("floor",			"floor",		GT,	Value(GT, -2.5f, 2.5f),		notUsed,					notUsed,					0.2f, 0.7f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(floor) )
+		<< BuiltinFuncInfo("trunc",			"trunc",		GT,	Value(GT, -2.5f, 2.5f),		notUsed,					notUsed,					0.2f, 0.7f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(trunc) )
+		<< BuiltinFuncInfo("round",			"round",		GT,	Value(GT, -2.5f, 2.5f),		notUsed,					notUsed,					0.2f, 0.7f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(roundToEven) )
+		<< BuiltinFuncInfo("roundEven",		"roundEven",	GT,	Value(GT, -2.5f, 2.5f),		notUsed,					notUsed,					0.2f, 0.7f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(roundToEven) )
+		<< BuiltinFuncInfo("ceil",			"ceil",			GT,	Value(GT, -2.5f, 2.5f),		notUsed,					notUsed,					0.2f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(ceil) )
+		<< BuiltinFuncInfo("fract",			"fract",		GT,	Value(GT, -1.5f, 1.5f),		notUsed,					notUsed,					0.8f, 0.1f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(fract) )
+		<< BuiltinFuncInfo("mod",			"mod",			GT,	Value(GT, -2.0f, 2.0f),		Value(GT, 0.9f, 6.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(mod) )
+		<< BuiltinFuncInfo("mod",			"mod",			GT,	Value(FV, -2.0f, 2.0f),		Value(F, 0.9f, 6.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_VEC_FUNCS(modVecScalar) )
+		<< BuiltinFuncInfo("min",			"min",			GT,	Value(GT, -1.0f, 1.0f),		Value(GT, -1.0f, 1.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(min) )
+		<< BuiltinFuncInfo("min",			"min",			GT,	Value(FV, -1.0f, 1.0f),		Value(F, -1.0f, 1.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_VEC_FUNCS(minVecScalar) )
+		<< BuiltinFuncInfo("min",			"min",			IGT,Value(IGT, -4.0f, 4.0f),	Value(IGT, -4.0f, 4.0f),	notUsed,					0.125f, 0.5f,	PRECMASK_ALL,				INT_GENTYPE_FUNCS(min) )
+		<< BuiltinFuncInfo("min",			"min",			IGT,Value(IV,  -4.0f, 4.0f),	Value(I, -4.0f, 4.0f),		notUsed,					0.125f, 0.5f,	PRECMASK_ALL,				INT_VEC_FUNCS(minVecScalar) )
+		<< BuiltinFuncInfo("min",			"min",			UGT,Value(UGT, 0.0f, 8.0f),		Value(UGT, 0.0f, 8.0f),		notUsed,					0.125f, 0.0f,	PRECMASK_ALL,				UINT_GENTYPE_FUNCS(min) )
+		<< BuiltinFuncInfo("min",			"min",			UGT,Value(UV,  0.0f, 8.0f),		Value(U, 0.0f, 8.0f),		notUsed,					0.125f, 0.0f,	PRECMASK_ALL,				UINT_VEC_FUNCS(minVecScalar) )
+		<< BuiltinFuncInfo("max",			"max",			GT,	Value(GT, -1.0f, 1.0f),		Value(GT, -1.0f, 1.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(max) )
+		<< BuiltinFuncInfo("max",			"max",			GT,	Value(FV, -1.0f, 1.0f),		Value(F, -1.0f, 1.0f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_VEC_FUNCS(maxVecScalar) )
+		<< BuiltinFuncInfo("max",			"max",			IGT,Value(IGT, -4.0f, 4.0f),	Value(IGT, -4.0f, 4.0f),	notUsed,					0.125f, 0.5f,	PRECMASK_ALL,				INT_GENTYPE_FUNCS(max) )
+		<< BuiltinFuncInfo("max",			"max",			IGT,Value(IV,  -4.0f, 4.0f),	Value(I, -4.0f, 4.0f),		notUsed,					0.125f, 0.5f,	PRECMASK_ALL,				INT_VEC_FUNCS(maxVecScalar) )
+		<< BuiltinFuncInfo("max",			"max",			UGT,Value(UGT, 0.0f, 8.0f),		Value(UGT, 0.0f, 8.0f),		notUsed,					0.125f, 0.0f,	PRECMASK_ALL,				UINT_GENTYPE_FUNCS(max) )
+		<< BuiltinFuncInfo("max",			"max",			UGT,Value(UV,  0.0f, 8.0f),		Value(U, 0.0f, 8.0f),		notUsed,					0.125f, 0.0f,	PRECMASK_ALL,				UINT_VEC_FUNCS(maxVecScalar) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		GT,	Value(GT, -1.0f, 1.0f),		Value(GT, -0.5f, 0.5f),		Value(GT, 0.5f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(clamp) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		GT,	Value(FV, -1.0f, 1.0f),		Value(F, -0.5f, 0.5f),		Value(F, 0.5f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_VEC_FUNCS(clampVecScalarScalar) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		IGT,Value(IGT, -4.0f, 4.0f),	Value(IGT, -2.0f, 2.0f),	Value(IGT, 2.0f, 4.0f),		0.125f, 0.5f,	PRECMASK_ALL,				INT_GENTYPE_FUNCS(clamp) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		IGT,Value(IV,  -4.0f, 4.0f),	Value(I, -2.0f, 2.0f),		Value(I, 2.0f, 4.0f),		0.125f, 0.5f,	PRECMASK_ALL,				INT_VEC_FUNCS(clampVecScalarScalar) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		UGT,Value(UGT, 0.0f, 8.0f),		Value(UGT, 2.0f, 6.0f),		Value(UGT, 6.0f, 8.0f),		0.125f, 0.0f,	PRECMASK_ALL,				UINT_GENTYPE_FUNCS(clamp) )
+		<< BuiltinFuncInfo("clamp",			"clamp",		UGT,Value(UV,  0.0f, 8.0f),		Value(U,   2.0f, 6.0f),		Value(U, 6.0f, 8.0f),		0.125f, 0.0f,	PRECMASK_ALL,				UINT_VEC_FUNCS(clampVecScalarScalar) )
+		<< BuiltinFuncInfo("mix",			"mix",			GT,	Value(GT, -1.0f, 1.0f),		Value(GT, -1.0f, 1.0f),		Value(GT, 0.0f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(mix) )
+		<< BuiltinFuncInfo("mix",			"mix",			GT,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),		Value(F, 0.0f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_VEC_FUNCS(mixVecVecScalar) )
+		<< BuiltinFuncInfo("step",			"step",			GT,	Value(GT, -1.0f, 1.0f),		Value(GT, -1.0f, 0.0f),		notUsed,					0.5f, 0.25f,	PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(step) )
+		<< BuiltinFuncInfo("step",			"step",			GT,	Value(F, -1.0f, 1.0f),		Value(FV, -1.0f, 0.0f),		notUsed,					0.5f, 0.25f,	PRECMASK_ALL,				FLOAT_VEC_FUNCS(stepScalarVec) )
+		<< BuiltinFuncInfo("smoothstep",	"smoothstep",	GT,	Value(GT, -0.5f, 0.0f),		Value(GT, 0.1f, 1.0f),		Value(GT, -1.0f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_GENTYPE_FUNCS(smoothStep) )
+		<< BuiltinFuncInfo("smoothstep",	"smoothstep",	GT,	Value(F, -0.5f, 0.0f),		Value(F, 0.1f, 1.0f),		Value(FV, -1.0f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,				FLOAT_VEC_FUNCS(smoothStepScalarScalarVec) )
+	);
+
+	// Geometric Functions.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("geometric", "Geometric function tests.")
+		<< BuiltinFuncInfo("length",		"length",		F,	Value(GT, -5.0f, 5.0f),		notUsed,					notUsed,					0.1f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(length) )
+		<< BuiltinFuncInfo("distance",		"distance",		F,	Value(GT, -5.0f, 5.0f),		Value(GT, -5.0f, 5.0f),		notUsed,					0.1f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(distance) )
+		<< BuiltinFuncInfo("dot",			"dot",			F,	Value(GT, -5.0f, 5.0f),		Value(GT, -5.0f, 5.0f),		notUsed,					0.1f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(dot) )
+		<< BuiltinFuncInfo("cross",			"cross",		V3,	Value(GT, -5.0f, 5.0f),		Value(GT, -5.0f, 5.0f),		notUsed,					0.1f, 0.5f,		PRECMASK_ALL,		DE_NULL, DE_NULL, eval_cross_vec3, DE_NULL )
+		<< BuiltinFuncInfo("normalize",		"normalize",	GT,	Value(GT, 0.1f, 4.0f),		notUsed,					notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(normalize) )
+		<< BuiltinFuncInfo("faceforward",	"faceforward",	GT,	Value(GT, -5.0f, 5.0f),		Value(GT, -5.0f, 5.0f),		Value(GT, -1.0f, 1.0f),		0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(faceForward) )
+		<< BuiltinFuncInfo("reflect",		"reflect",		GT,	Value(GT, -0.8f, -0.5f),	Value(GT, 0.5f, 0.8f),		notUsed,					0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(reflect) )
+		<< BuiltinFuncInfo("refract",		"refract",		GT,	Value(GT, -0.8f, 1.2f),		Value(GT, -1.1f, 0.5f),		Value(F, 0.2f, 1.5f),		0.5f, 0.5f,		PRECMASK_ALL,		FLOAT_GENTYPE_FUNCS(refract) )
+	);
+
+	// Vector Relational Functions.
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("float_compare", "Floating point comparison tests.")
+		<< BuiltinFuncInfo("lessThan",			"lessThan",			BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(lessThan) )
+		<< BuiltinFuncInfo("lessThanEqual",		"lessThanEqual",	BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(lessThanEqual) )
+		<< BuiltinFuncInfo("greaterThan",		"greaterThan",		BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(greaterThan) )
+		<< BuiltinFuncInfo("greaterThanEqual",	"greaterThanEqual",	BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(greaterThanEqual) )
+		<< BuiltinFuncInfo("equal",				"equal",			BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(equal) )
+		<< BuiltinFuncInfo("notEqual",			"notEqual",			BV,	Value(FV, -1.0f, 1.0f),		Value(FV, -1.0f, 1.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	FLOAT_VEC_FUNCS(notEqual) )
+	);
+
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("int_compare", "Integer comparison tests.")
+		<< BuiltinFuncInfo("lessThan",			"lessThan",			BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(lessThan) )
+		<< BuiltinFuncInfo("lessThanEqual",		"lessThanEqual",	BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(lessThanEqual) )
+		<< BuiltinFuncInfo("greaterThan",		"greaterThan",		BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(greaterThan) )
+		<< BuiltinFuncInfo("greaterThanEqual",	"greaterThanEqual",	BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(greaterThanEqual) )
+		<< BuiltinFuncInfo("equal",				"equal",			BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(equal) )
+		<< BuiltinFuncInfo("notEqual",			"notEqual",			BV,	Value(IV, -5.2f, 4.9f),		Value(IV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_ALL,	INT_VEC_FUNCS(notEqual) )
+	);
+
+	funcInfoGroups.push_back(
+		BuiltinFuncGroup("bool_compare", "Boolean comparison tests.")
+		<< BuiltinFuncInfo("equal",				"equal",			BV,	Value(BV, -5.2f, 4.9f),		Value(BV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_NA,	BOOL_VEC_FUNCS(equal) )
+		<< BuiltinFuncInfo("notEqual",			"notEqual",			BV,	Value(BV, -5.2f, 4.9f),		Value(BV, -5.0f, 5.0f),	notUsed, 1.0f, 0.0f, PRECMASK_NA,	BOOL_VEC_FUNCS(notEqual) )
+		<< BuiltinFuncInfo("any",				"any",				B,	Value(BV, -1.0f, 0.3f),		notUsed,				notUsed, 1.0f, 0.0f, PRECMASK_NA,	BOOL_VEC_FUNCS(any) )
+		<< BuiltinFuncInfo("all",				"all",				B,	Value(BV, -0.3f, 1.0f),		notUsed,				notUsed, 1.0f, 0.0f, PRECMASK_NA,	BOOL_VEC_FUNCS(all) )
+		<< BuiltinFuncInfo("not",				"not",				BV,	Value(BV, -1.0f, 1.0f),		notUsed,				notUsed, 1.0f, 0.0f, PRECMASK_NA,	BOOL_VEC_FUNCS(boolNot) )
+	);
+
+	static const ShaderType s_shaderTypes[] =
+	{
+		SHADERTYPE_VERTEX,
+		SHADERTYPE_FRAGMENT
+	};
+
+	static const DataType s_floatTypes[] =
+	{
+		TYPE_FLOAT,
+		TYPE_FLOAT_VEC2,
+		TYPE_FLOAT_VEC3,
+		TYPE_FLOAT_VEC4
+	};
+
+	static const DataType s_intTypes[] =
+	{
+		TYPE_INT,
+		TYPE_INT_VEC2,
+		TYPE_INT_VEC3,
+		TYPE_INT_VEC4
+	};
+
+	static const DataType s_uintTypes[] =
+	{
+		TYPE_UINT,
+		TYPE_UINT_VEC2,
+		TYPE_UINT_VEC3,
+		TYPE_UINT_VEC4
+	};
+
+	static const DataType s_boolTypes[] =
+	{
+		TYPE_BOOL,
+		TYPE_BOOL_VEC2,
+		TYPE_BOOL_VEC3,
+		TYPE_BOOL_VEC4
+	};
+
+	for (int outerGroupNdx = 0; outerGroupNdx < (int)funcInfoGroups.size(); outerGroupNdx++)
+	{
+		// Create outer group.
+		const BuiltinFuncGroup& outerGroupInfo = funcInfoGroups[outerGroupNdx];
+		TestCaseGroup* outerGroup = new TestCaseGroup(m_testCtx, outerGroupInfo.name, outerGroupInfo.description);
+		addChild(outerGroup);
+
+		// Only create new group if name differs from previous one.
+		TestCaseGroup* innerGroup = DE_NULL;
+
+		for (int funcInfoNdx = 0; funcInfoNdx < (int)outerGroupInfo.funcInfos.size(); funcInfoNdx++)
+		{
+			const BuiltinFuncInfo&	funcInfo		= outerGroupInfo.funcInfos[funcInfoNdx];
+			const char*				shaderFuncName	= funcInfo.shaderFuncName;
+			const bool				isBoolCase		= (funcInfo.precisionMask == PRECMASK_NA);
+			const bool				isBoolOut		= (funcInfo.outValue & (VALUE_BOOL | VALUE_BOOL_VEC | VALUE_BOOL_GENTYPE)) != 0;
+			const bool				isIntOut		= (funcInfo.outValue & (VALUE_INT | VALUE_INT_VEC | VALUE_INT_GENTYPE)) != 0;
+			const bool				isUintOut		= (funcInfo.outValue & (VALUE_UINT | VALUE_UINT_VEC | VALUE_UINT_GENTYPE)) != 0;
+			const bool				isFloatOut		= !isBoolOut && !isIntOut && !isUintOut;
+
+			if (!innerGroup || (std::string(innerGroup->getName()) != funcInfo.caseName))
+			{
+				std::string groupDesc = std::string("Built-in function ") + shaderFuncName + "() tests.";
+				innerGroup = new TestCaseGroup(m_testCtx, funcInfo.caseName, groupDesc.c_str());
+				outerGroup->addChild(innerGroup);
+			}
+
+			for (int inScalarSize = 1; inScalarSize <= 4; inScalarSize++)
+			{
+				const int		outScalarSize	= ((funcInfo.outValue == VALUE_FLOAT) || (funcInfo.outValue == VALUE_BOOL)) ? 1 : inScalarSize;
+				const DataType	outDataType		= isFloatOut	? s_floatTypes[outScalarSize - 1]
+												: isIntOut		? s_intTypes[outScalarSize - 1]
+												: isUintOut		? s_uintTypes[outScalarSize - 1]
+												: isBoolOut		? s_boolTypes[outScalarSize - 1]
+												: TYPE_LAST;
+
+				ShaderEvalFunc evalFunc = DE_NULL;
+				if      (inScalarSize == 1)	evalFunc = funcInfo.evalFuncScalar;
+				else if (inScalarSize == 2)	evalFunc = funcInfo.evalFuncVec2;
+				else if (inScalarSize == 3)	evalFunc = funcInfo.evalFuncVec3;
+				else if (inScalarSize == 4)	evalFunc = funcInfo.evalFuncVec4;
+				else DE_ASSERT(false);
+
+				// Skip if no valid eval func.
+				if (evalFunc == DE_NULL)
+					continue;
+
+				for (int precision = PRECISION_MEDIUMP; precision < PRECISION_LAST; precision++)
+				{
+					if ((funcInfo.precisionMask & (1<<precision)) ||
+						(funcInfo.precisionMask == PRECMASK_NA && precision == PRECISION_MEDIUMP)) // use mediump interpolators for booleans
+					{
+						const char*			precisionStr	= getPrecisionName((Precision)precision);
+						const std::string	precisionPrefix	= isBoolCase ? "" : (std::string(precisionStr) + "_");
+
+						for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+						{
+							const ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+							const char*			shaderTypeName	= getShaderTypeName(shaderType);
+							const bool			isVertexCase	= (ShaderType)shaderType == SHADERTYPE_VERTEX;
+							const bool			isUnaryOp		= (funcInfo.input1.valueType == VALUE_NONE);
+
+							// \note Data type names will be added to description and name in a following loop.
+							std::string			desc			= std::string("Built-in function ") + shaderFuncName + "(";
+							std::string			name			= precisionPrefix;
+
+							// Generate shader op.
+							std::string			shaderOp		= std::string("res = ");
+
+							// Setup shader data info.
+							ShaderDataSpec		shaderSpec;
+							shaderSpec.numInputs			= 0;
+							shaderSpec.precision			= isBoolCase ? PRECISION_LAST : (Precision)precision;
+							shaderSpec.output				= outDataType;
+							shaderSpec.resultScale			= funcInfo.resultScale;
+							shaderSpec.resultBias			= funcInfo.resultBias;
+							shaderSpec.referenceScale		= funcInfo.referenceScale;
+							shaderSpec.referenceBias		= funcInfo.referenceBias;
+
+							if (funcInfo.type == OPERATOR)
+							{
+								if (isUnaryOp && funcInfo.isUnaryPrefix)
+									shaderOp += shaderFuncName;
+							}
+							else if (funcInfo.type == FUNCTION)
+								shaderOp += std::string(shaderFuncName) + "(";
+							else // SIDE_EFFECT_OPERATOR
+								shaderOp += "in0;\n\t";
+
+							for (int inputNdx = 0; inputNdx < MAX_INPUTS; inputNdx++)
+							{
+								const Value&	prevV			= (inputNdx == 1) ? funcInfo.input0 : (inputNdx == 2) ? funcInfo.input1 : funcInfo.input2;
+								const Value&	v				= (inputNdx == 0) ? funcInfo.input0 : (inputNdx == 1) ? funcInfo.input1 : funcInfo.input2;
+
+								if (v.valueType == VALUE_NONE)
+									continue; // Skip unused input.
+
+								const int		prevInScalarSize	= isScalarType(prevV.valueType) ? 1 : inScalarSize;
+								const DataType	prevInDataType		= isFloatType(prevV.valueType)	? s_floatTypes[prevInScalarSize - 1]
+																	: isIntType(prevV.valueType)	? s_intTypes[prevInScalarSize - 1]
+																	: isUintType(prevV.valueType)	? s_uintTypes[prevInScalarSize - 1]
+																	: isBoolType(prevV.valueType)	? s_boolTypes[prevInScalarSize - 1]
+																	: TYPE_LAST;
+
+								const int		curInScalarSize		= isScalarType(v.valueType) ? 1 : inScalarSize;
+								const DataType	curInDataType		= isFloatType(v.valueType)	? s_floatTypes[curInScalarSize - 1]
+																	: isIntType(v.valueType)	? s_intTypes[curInScalarSize - 1]
+																	: isUintType(v.valueType)	? s_uintTypes[curInScalarSize - 1]
+																	: isBoolType(v.valueType)	? s_boolTypes[curInScalarSize - 1]
+																	: TYPE_LAST;
+
+								// Write input type(s) to case description and name.
+
+								if (inputNdx > 0)
+									desc += ", ";
+
+								desc += getDataTypeName(curInDataType);
+
+								if (inputNdx == 0 || prevInDataType != curInDataType) // \note Only write input type to case name if different from previous input type (avoid overly long names).
+									name += std::string("") + getDataTypeName(curInDataType) + "_";
+
+								// Generate op input source.
+
+								if (funcInfo.type == OPERATOR || funcInfo.type == FUNCTION)
+								{
+									if (inputNdx != 0)
+									{
+										if (funcInfo.type == OPERATOR && !isUnaryOp)
+											shaderOp += " " + std::string(shaderFuncName) + " ";
+										else
+											shaderOp += ", ";
+									}
+
+									shaderOp += "in" + de::toString(inputNdx);
+
+									if (funcInfo.type == OPERATOR && isUnaryOp && !funcInfo.isUnaryPrefix)
+										shaderOp += std::string(shaderFuncName);
+								}
+								else
+								{
+									DE_ASSERT(funcInfo.type == SIDE_EFFECT_OPERATOR);
+
+									if (inputNdx != 0 || (isUnaryOp && funcInfo.isUnaryPrefix))
+										shaderOp += std::string("") + (isUnaryOp ? "" : " ") + shaderFuncName + (isUnaryOp ? "" : " ");
+
+									shaderOp += inputNdx == 0 ? "res" : "in" + de::toString(inputNdx); // \note in0 has already been assigned to res, so start from in1.
+
+									if (isUnaryOp && !funcInfo.isUnaryPrefix)
+										shaderOp += shaderFuncName;
+								}
+
+								// Fill in shader info.
+								shaderSpec.inputs[shaderSpec.numInputs++] = ShaderValue(curInDataType, v.rangeMin, v.rangeMax);
+							}
+
+							if (funcInfo.type == FUNCTION)
+								shaderOp += ")";
+
+							shaderOp += ";";
+
+							desc += ").";
+							name += shaderTypeName;
+
+							// Create the test case.
+							innerGroup->addChild(new ShaderOperatorCase(m_testCtx, name.c_str(), desc.c_str(), isVertexCase, evalFunc, shaderOp, shaderSpec));
+						}
+					}
+				}
+			}
+		}
+	}
+
+	// The ?: selection operator.
+
+	static const struct
+	{
+		const DataType			type; // The type of "Y" and "Z" operands in "X ? Y : Z" (X is always bool).
+		const ShaderEvalFunc	evalFunc;
+	} s_selectionInfo[] =
+	{
+		{ TYPE_FLOAT,		eval_selection_float	},
+		{ TYPE_FLOAT_VEC2,	eval_selection_vec2		},
+		{ TYPE_FLOAT_VEC3,	eval_selection_vec3		},
+		{ TYPE_FLOAT_VEC4,	eval_selection_vec4		},
+		{ TYPE_INT,			eval_selection_int		},
+		{ TYPE_INT_VEC2,	eval_selection_ivec2	},
+		{ TYPE_INT_VEC3,	eval_selection_ivec3	},
+		{ TYPE_INT_VEC4,	eval_selection_ivec4	},
+		{ TYPE_UINT,		eval_selection_uint		},
+		{ TYPE_UINT_VEC2,	eval_selection_uvec2	},
+		{ TYPE_UINT_VEC3,	eval_selection_uvec3	},
+		{ TYPE_UINT_VEC4,	eval_selection_uvec4	},
+		{ TYPE_BOOL,		eval_selection_bool		},
+		{ TYPE_BOOL_VEC2,	eval_selection_bvec2	},
+		{ TYPE_BOOL_VEC3,	eval_selection_bvec3	},
+		{ TYPE_BOOL_VEC4,	eval_selection_bvec4	}
+	};
+
+	TestCaseGroup* selectionGroup = new TestCaseGroup(m_testCtx, "selection", "Selection operator tests");
+	addChild(selectionGroup);
+
+	for (int typeNdx = 0; typeNdx < DE_LENGTH_OF_ARRAY(s_selectionInfo); typeNdx++)
+	{
+		const DataType			curType			= s_selectionInfo[typeNdx].type;
+		const ShaderEvalFunc	evalFunc		= s_selectionInfo[typeNdx].evalFunc;
+		const bool				isBoolCase		= isDataTypeBoolOrBVec(curType);
+		const bool				isFloatCase		= isDataTypeFloatOrVec(curType);
+		const bool				isIntCase		= isDataTypeIntOrIVec(curType);
+		const bool				isUintCase		= isDataTypeUintOrUVec(curType);
+		const char*				dataTypeStr		= getDataTypeName(curType);
+
+		DE_ASSERT(isBoolCase || isFloatCase || isIntCase || isUintCase);
+		DE_UNREF(isIntCase);
+
+		for (int precision = (int)PRECISION_MEDIUMP; precision < (int)PRECISION_LAST; precision++)
+		{
+			if (isBoolCase && precision != PRECISION_MEDIUMP) // Use mediump interpolators for booleans.
+				continue;
+
+			const char*		precisionStr	= getPrecisionName((Precision)precision);
+			std::string		precisionPrefix	= isBoolCase ? "" : (std::string(precisionStr) + "_");
+
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+			{
+				const ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+				const char*			shaderTypeName	= getShaderTypeName(shaderType);
+				const bool			isVertexCase	= (ShaderType)shaderType == SHADERTYPE_VERTEX;
+
+				std::string			name			= precisionPrefix + dataTypeStr + "_" + shaderTypeName;
+
+				ShaderDataSpec		shaderSpec;
+				shaderSpec.numInputs		= 3;
+				shaderSpec.precision		= isBoolCase ? PRECISION_LAST : (Precision)precision;
+				shaderSpec.output			= curType;
+				shaderSpec.resultScale		= (isBoolCase ? 1.0f : (isFloatCase ? 0.5f : (isUintCase ? 0.5f : 0.1f)));
+				shaderSpec.resultBias		= (isBoolCase ? 0.0f : (isFloatCase ? 0.5f : (isUintCase ? 0.0f : 0.5f)));
+				shaderSpec.referenceScale	= shaderSpec.resultScale;
+				shaderSpec.referenceBias	= shaderSpec.resultBias;
+
+				const float rangeMin = (isBoolCase ? -1.0f : (isFloatCase ? -1.0f : (isUintCase ? 0.0f : -5.0f)));
+				const float rangeMax = (isBoolCase ?  1.0f : (isFloatCase ?  1.0f : (isUintCase ? 2.0f :  5.0f)));
+
+				shaderSpec.inputs[0] = ShaderValue(TYPE_BOOL, -1.0f, 1.0f);
+				shaderSpec.inputs[1] = ShaderValue(curType, rangeMin, rangeMax);
+				shaderSpec.inputs[2] = ShaderValue(curType, rangeMin, rangeMax);
+
+				selectionGroup->addChild(new ShaderOperatorCase(m_testCtx, name.c_str(), "", isVertexCase, evalFunc, "res = in0 ? in1 : in2;", shaderSpec));
+			}
+		}
+	}
+
+	// The sequence operator (comma).
+
+	TestCaseGroup* sequenceGroup = new TestCaseGroup(m_testCtx, "sequence", "Sequence operator tests");
+	addChild(sequenceGroup);
+
+	TestCaseGroup* sequenceNoSideEffGroup = new TestCaseGroup(m_testCtx, "no_side_effects", "Sequence tests without side-effects");
+	TestCaseGroup* sequenceSideEffGroup = new TestCaseGroup(m_testCtx, "side_effects", "Sequence tests with side-effects");
+	sequenceGroup->addChild(sequenceNoSideEffGroup);
+	sequenceGroup->addChild(sequenceSideEffGroup);
+
+	static const struct
+	{
+		const bool				containsSideEffects;
+		const char*				caseName;
+		const char*				expressionStr;
+		const int				numInputs;
+		const DataType			inputTypes[MAX_INPUTS];
+		const DataType			resultType;
+		const ShaderEvalFunc	evalFunc;
+	} s_sequenceCases[] =
+	{
+		{ false,	"vec4",					"in0, in2 + in1, in1 + in0",							3,	{ TYPE_FLOAT_VEC4,	TYPE_FLOAT_VEC4,	TYPE_FLOAT_VEC4	},	TYPE_FLOAT_VEC4,	evalSequenceNoSideEffCase0 },
+		{ false,	"float_uint",			"in0 + in2, in1 + in1",									3,	{ TYPE_FLOAT,		TYPE_UINT,			TYPE_FLOAT		},	TYPE_UINT,			evalSequenceNoSideEffCase1 },
+		{ false,	"bool_vec2",			"in0 && in1, in0, ivec2(vec2(in0) + in2)",				3,	{ TYPE_BOOL,		TYPE_BOOL,			TYPE_FLOAT_VEC2	},	TYPE_INT_VEC2,		evalSequenceNoSideEffCase2 },
+		{ false,	"vec4_ivec4_bvec4",		"in0 + vec4(in1), in2, in1",							3,	{ TYPE_FLOAT_VEC4,	TYPE_INT_VEC4,		TYPE_BOOL_VEC4	},	TYPE_INT_VEC4,		evalSequenceNoSideEffCase3 },
+
+		{ true,		"vec4",					"in0++, in1 = in0 + in2, in2 = in1",					3,	{ TYPE_FLOAT_VEC4,	TYPE_FLOAT_VEC4,	TYPE_FLOAT_VEC4	},	TYPE_FLOAT_VEC4,	evalSequenceSideEffCase0 },
+		{ true,		"float_uint",			"in1++, in0 = float(in1), in1 = uint(in0 + in2)",		3,	{ TYPE_FLOAT,		TYPE_UINT,			TYPE_FLOAT		},	TYPE_UINT,			evalSequenceSideEffCase1 },
+		{ true,		"bool_vec2",			"in1 = in0, in2++, in2 = in2 + vec2(in1), ivec2(in2)",	3,	{ TYPE_BOOL,		TYPE_BOOL,			TYPE_FLOAT_VEC2	},	TYPE_INT_VEC2,		evalSequenceSideEffCase2 },
+		{ true,		"vec4_ivec4_bvec4",		"in0 = in0 + vec4(in2), in1 = in1 + ivec4(in0), in1++",	3,	{ TYPE_FLOAT_VEC4,	TYPE_INT_VEC4,		TYPE_BOOL_VEC4	},	TYPE_INT_VEC4,		evalSequenceSideEffCase3 }
+	};
+
+	for (int caseNdx = 0; caseNdx < DE_LENGTH_OF_ARRAY(s_sequenceCases); caseNdx++)
+	{
+		for (int precision = (int)PRECISION_MEDIUMP; precision < (int)PRECISION_LAST; precision++)
+		{
+			for (int shaderTypeNdx = 0; shaderTypeNdx < DE_LENGTH_OF_ARRAY(s_shaderTypes); shaderTypeNdx++)
+			{
+				const ShaderType	shaderType		= s_shaderTypes[shaderTypeNdx];
+				const char*			shaderTypeName	= getShaderTypeName(shaderType);
+				const bool			isVertexCase	= (ShaderType)shaderType == SHADERTYPE_VERTEX;
+
+				std::string			name			= std::string("") + getPrecisionName((Precision)precision) + "_" + s_sequenceCases[caseNdx].caseName + "_" + shaderTypeName;
+
+				ShaderDataSpec		shaderSpec;
+				shaderSpec.numInputs		= s_sequenceCases[caseNdx].numInputs;
+				shaderSpec.precision		= (Precision)precision;
+				shaderSpec.output			= s_sequenceCases[caseNdx].resultType;
+				shaderSpec.resultScale		= 0.5f;
+				shaderSpec.resultBias		= 0.0f;
+				shaderSpec.referenceScale	= shaderSpec.resultScale;
+				shaderSpec.referenceBias	= shaderSpec.resultBias;
+
+				for (int inputNdx = 0; inputNdx < s_sequenceCases[caseNdx].numInputs; inputNdx++)
+				{
+					const DataType	type			= s_sequenceCases[caseNdx].inputTypes[inputNdx];
+					const float		rangeMin		= (isDataTypeFloatOrVec(type) ? -0.5f : (isDataTypeIntOrIVec(type) ? -2.0f : (isDataTypeUintOrUVec(type) ? 0.0f : -1.0f)));
+					const float		rangeMax		= (isDataTypeFloatOrVec(type) ?  0.5f : (isDataTypeIntOrIVec(type) ?  2.0f : (isDataTypeUintOrUVec(type) ? 2.0f :  1.0f)));
+
+					shaderSpec.inputs[inputNdx] = ShaderValue(type, rangeMin, rangeMax);
+				}
+
+				const std::string expression = std::string("") + "res = (" + s_sequenceCases[caseNdx].expressionStr + ");";
+
+				TestCaseGroup* group = s_sequenceCases[caseNdx].containsSideEffects ? sequenceSideEffGroup : sequenceNoSideEffGroup;
+				group->addChild(new ShaderOperatorCase(m_testCtx, name.c_str(), "", isVertexCase, s_sequenceCases[caseNdx].evalFunc, expression.c_str(), shaderSpec));
+			}
+		}
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createOperatorTests (tcu::TestContext& testCtx)
+{
+	return new ShaderOperatorTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.hpp
new file mode 100644
index 0000000..e94594a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderOperatorTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDEROPERATORTESTS_HPP
+#define _VKTSHADERRENDEROPERATORTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader operators tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createOperatorTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDEROPERATORTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.cpp
new file mode 100644
index 0000000..34fe0ea
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.cpp
@@ -0,0 +1,505 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader return statement tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderReturnTests.hpp"
+#include "vktShaderRender.hpp"
+#include "tcuStringTemplate.hpp"
+
+#include <map>
+#include <string>
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+enum ReturnMode
+{
+	RETURNMODE_ALWAYS = 0,
+	RETURNMODE_NEVER,
+	RETURNMODE_DYNAMIC,
+
+	RETURNMODE_LAST
+};
+
+// Evaluation functions
+inline void evalReturnAlways	(ShaderEvalContext& c) { c.color.xyz() = c.coords.swizzle(0,1,2); }
+inline void evalReturnNever		(ShaderEvalContext& c) { c.color.xyz() = c.coords.swizzle(3,2,1); }
+inline void evalReturnDynamic	(ShaderEvalContext& c) { c.color.xyz() = (c.coords.x()+c.coords.y() >= 0.0f) ? c.coords.swizzle(0,1,2) : c.coords.swizzle(3,2,1); }
+
+static ShaderEvalFunc getEvalFunc (ReturnMode mode)
+{
+	switch (mode)
+	{
+		case RETURNMODE_ALWAYS:		return evalReturnAlways;
+		case RETURNMODE_NEVER:		return evalReturnNever;
+		case RETURNMODE_DYNAMIC:	return evalReturnDynamic;
+		default:
+			DE_ASSERT(DE_FALSE);
+			return (ShaderEvalFunc)DE_NULL;
+	}
+}
+
+class ShaderReturnCase : public ShaderRenderCase
+{
+public:
+								ShaderReturnCase		(tcu::TestContext&			testCtx,
+														 const std::string&			name,
+														 const std::string&			description,
+														 bool						isVertexCase,
+														 const std::string&			shaderSource,
+														 const ShaderEvalFunc		evalFunc,
+														 const UniformSetup*		uniformFunc);
+	virtual						~ShaderReturnCase		(void);
+};
+
+ShaderReturnCase::ShaderReturnCase (tcu::TestContext&			testCtx,
+									const std::string&			name,
+									const std::string&			description,
+									bool						isVertexCase,
+									const std::string&			shaderSource,
+									const ShaderEvalFunc		evalFunc,
+									const UniformSetup*			uniformFunc)
+	: ShaderRenderCase(testCtx, name, description, isVertexCase, evalFunc, uniformFunc, DE_NULL)
+{
+	if (isVertexCase)
+	{
+		m_vertShaderSource = shaderSource;
+		m_fragShaderSource =
+			"#version 310 es\n"
+			"layout(location = 0) in mediump vec4 v_color;\n"
+			"layout(location = 0) out mediump vec4 o_color;\n\n"
+			"void main (void)\n"
+			"{\n"
+			"    o_color = v_color;\n"
+			"}\n";
+	}
+	else
+	{
+		m_fragShaderSource = shaderSource;
+		m_vertShaderSource =
+			"#version 310 es\n"
+			"layout(location = 0) in  highp   vec4 a_position;\n"
+			"layout(location = 1) in  highp   vec4 a_coords;\n"
+			"layout(location = 0) out mediump vec4 v_coords;\n\n"
+			"void main (void)\n"
+			"{\n"
+			"    gl_Position = a_position;\n"
+			"    v_coords = a_coords;\n"
+			"}\n";
+	}
+}
+
+ShaderReturnCase::~ShaderReturnCase (void)
+{
+}
+
+class ReturnTestUniformSetup : public UniformSetup
+{
+public:
+								ReturnTestUniformSetup	(const BaseUniformType uniformType)
+									: m_uniformType(uniformType)
+								{}
+	virtual void				setup					(ShaderRenderCaseInstance& instance, const tcu::Vec4&) const
+								{
+									instance.useUniform(0u, m_uniformType);
+								}
+
+private:
+	const BaseUniformType		m_uniformType;
+};
+
+// Test case builders.
+
+de::MovePtr<ShaderReturnCase> makeConditionalReturnInFuncCase (tcu::TestContext& context, const std::string& name, const std::string& description, ReturnMode returnMode, bool isVertex)
+{
+	tcu::StringTemplate tmpl(
+		"#version 310 es\n"
+		"layout(location = ${COORDLOC}) in ${COORDPREC} vec4 ${COORDS};\n"
+		"${EXTRADECL}\n"
+		"${COORDPREC} vec4 getColor (void)\n"
+		"{\n"
+		"    if (${RETURNCOND})\n"
+		"        return vec4(${COORDS}.xyz, 1.0);\n"
+		"    return vec4(${COORDS}.wzy, 1.0);\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"${POSITIONWRITE}"
+		"    ${OUTPUT} = getColor();\n"
+		"}\n");
+
+	const char* coords = isVertex ? "a_coords" : "v_coords";
+
+	std::map<std::string, std::string> params;
+
+	params["COORDLOC"]		= isVertex ? "1"			: "0";
+	params["COORDPREC"]		= isVertex ? "highp"		: "mediump";
+	params["OUTPUT"]		= isVertex ? "v_color"		: "o_color";
+	params["COORDS"]		= coords;
+	params["EXTRADECL"]		= isVertex ? "layout(location = 0) in highp vec4 a_position;\nlayout(location = 0) out mediump vec4 v_color;\n" : "layout(location = 0) out mediump vec4 o_color;\n";
+	params["POSITIONWRITE"]	= isVertex ? "    gl_Position = a_position;\n" : "";
+
+	switch (returnMode)
+	{
+		case RETURNMODE_ALWAYS:		params["RETURNCOND"] = "true";											break;
+		case RETURNMODE_NEVER:		params["RETURNCOND"] = "false";											break;
+		case RETURNMODE_DYNAMIC:	params["RETURNCOND"] = std::string(coords) + ".x+" + coords + ".y >= 0.0";	break;
+		default:					DE_ASSERT(DE_FALSE);
+	}
+
+	return de::MovePtr<ShaderReturnCase>(new ShaderReturnCase(context, name, description, isVertex, tmpl.specialize(params), getEvalFunc(returnMode), DE_NULL));
+}
+
+de::MovePtr<ShaderReturnCase> makeOutputWriteReturnCase (tcu::TestContext& context, const std::string& name, const std::string& description, bool inFunction, ReturnMode returnMode, bool isVertex)
+{
+	tcu::StringTemplate tmpl(
+		inFunction
+		?
+			"#version 310 es\n"
+			"layout(location = ${COORDLOC}) in ${COORDPREC} vec4 ${COORDS};\n"
+			"${EXTRADECL}\n"
+			"void myfunc (void)\n"
+			"{\n"
+			"    ${OUTPUT} = vec4(${COORDS}.xyz, 1.0);\n"
+			"    if (${RETURNCOND})\n"
+			"        return;\n"
+			"    ${OUTPUT} = vec4(${COORDS}.wzy, 1.0);\n"
+			"}\n\n"
+			"void main (void)\n"
+			"{\n"
+			"${POSITIONWRITE}"
+			"    myfunc();\n"
+			"}\n"
+		:
+			"#version 310 es\n"
+			"layout(location = ${COORDLOC}) in ${COORDPREC} vec4 ${COORDS};\n"
+			"${EXTRADECL}\n"
+			"void main ()\n"
+			"{\n"
+			"${POSITIONWRITE}"
+			"    ${OUTPUT} = vec4(${COORDS}.xyz, 1.0);\n"
+			"    if (${RETURNCOND})\n"
+			"        return;\n"
+			"    ${OUTPUT} = vec4(${COORDS}.wzy, 1.0);\n"
+			"}\n");
+
+	const char* coords = isVertex ? "a_coords" : "v_coords";
+
+	std::map<std::string, std::string> params;
+
+	params["COORDLOC"]		= isVertex ? "1"			: "0";
+	params["COORDPREC"]		= isVertex ? "highp"		: "mediump";
+	params["COORDS"]		= coords;
+	params["OUTPUT"]		= isVertex ? "v_color"		: "o_color";
+	params["EXTRADECL"]		= isVertex ? "layout(location = 0) in highp vec4 a_position;\nlayout(location = 0) out mediump vec4 v_color;\n" : "layout(location = 0) out mediump vec4 o_color;\n";
+	params["POSITIONWRITE"]	= isVertex ? "    gl_Position = a_position;\n" : "";
+
+	switch (returnMode)
+	{
+		case RETURNMODE_ALWAYS:		params["RETURNCOND"] = "true";											break;
+		case RETURNMODE_NEVER:		params["RETURNCOND"] = "false";											break;
+		case RETURNMODE_DYNAMIC:	params["RETURNCOND"] = std::string(coords) + ".x+" + coords + ".y >= 0.0";	break;
+		default:					DE_ASSERT(DE_FALSE);
+	}
+
+	return de::MovePtr<ShaderReturnCase>(new ShaderReturnCase(context, name, description, isVertex, tmpl.specialize(params), getEvalFunc(returnMode), DE_NULL));
+}
+
+de::MovePtr<ShaderReturnCase> makeReturnInLoopCase (tcu::TestContext& context, const std::string& name, const std::string& description, bool isDynamicLoop, ReturnMode returnMode, bool isVertex)
+{
+	tcu::StringTemplate tmpl(
+		"#version 310 es\n"
+		"layout(location = ${COORDLOC}) in ${COORDPREC} vec4 ${COORDS};\n"
+		"layout(binding = 0, std140) uniform something { mediump int ui_one; };\n"
+		"${EXTRADECL}\n"
+		"${COORDPREC} vec4 getCoords (void)\n"
+		"{\n"
+		"    ${COORDPREC} vec4 coords = ${COORDS};\n"
+		"    for (int i = 0; i < ${ITERLIMIT}; i++)\n"
+		"    {\n"
+		"        if (${RETURNCOND})\n"
+		"            return coords;\n"
+		"        coords = coords.wzyx;\n"
+		"    }\n"
+		"    return coords;\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"${POSITIONWRITE}"
+		"    ${OUTPUT} = vec4(getCoords().xyz, 1.0);\n"
+		"}\n");
+
+	const char* coords = isVertex ? "a_coords" : "v_coords";
+
+	std::map<std::string, std::string> params;
+
+	params["COORDLOC"]		= isVertex ? "1"			: "0";
+	params["COORDPREC"]		= isVertex ? "highp"		: "mediump";
+	params["OUTPUT"]		= isVertex ? "v_color"		: "o_color";
+	params["COORDS"]		= coords;
+	params["EXTRADECL"]		= isVertex ? "layout(location = 0) in highp vec4 a_position;\nlayout(location = 0) out mediump vec4 v_color;\n" : "layout(location = 0) out mediump vec4 o_color;\n";
+	params["POSITIONWRITE"]	= isVertex ? "    gl_Position = a_position;\n" : "";
+	params["ITERLIMIT"]		= isDynamicLoop ? "ui_one" : "1";
+
+	switch (returnMode)
+	{
+		case RETURNMODE_ALWAYS:		params["RETURNCOND"] = "true";											break;
+		case RETURNMODE_NEVER:		params["RETURNCOND"] = "false";											break;
+		case RETURNMODE_DYNAMIC:	params["RETURNCOND"] = std::string(coords) + ".x+" + coords + ".y >= 0.0";	break;
+		default:					DE_ASSERT(DE_FALSE);
+	}
+
+	return de::MovePtr<ShaderReturnCase>(new ShaderReturnCase(context, name, description, isVertex, tmpl.specialize(params), getEvalFunc(returnMode), new ReturnTestUniformSetup(UI_ONE)));
+}
+
+static const char* getReturnModeName (ReturnMode mode)
+{
+	switch (mode)
+	{
+		case RETURNMODE_ALWAYS:		return "always";
+		case RETURNMODE_NEVER:		return "never";
+		case RETURNMODE_DYNAMIC:	return "dynamic";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+static const char* getReturnModeDesc (ReturnMode mode)
+{
+	switch (mode)
+	{
+		case RETURNMODE_ALWAYS:		return "Always return";
+		case RETURNMODE_NEVER:		return "Never return";
+		case RETURNMODE_DYNAMIC:	return "Return based on coords";
+		default:
+			DE_ASSERT(DE_FALSE);
+			return DE_NULL;
+	}
+}
+
+class ShaderReturnTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderReturnTests		(tcu::TestContext& context);
+	virtual					~ShaderReturnTests		(void);
+	virtual void			init					(void);
+
+private:
+							ShaderReturnTests		(const ShaderReturnTests&);		// not allowed!
+	ShaderReturnTests&		operator=				(const ShaderReturnTests&);		// not allowed!
+};
+
+ShaderReturnTests::ShaderReturnTests (tcu::TestContext& context)
+	: TestCaseGroup(context, "return", "Return Statement Tests")
+{
+}
+
+ShaderReturnTests::~ShaderReturnTests (void)
+{
+}
+
+void ShaderReturnTests::init (void)
+{
+	addChild(new ShaderReturnCase(m_testCtx, "single_return_vertex", "Single return statement in function", true,
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"layout(location = 1) in highp vec4 a_coords;\n"
+		"layout(location = 0) out highp vec4 v_color;\n\n"
+		"vec4 getColor (void)\n"
+		"{\n"
+		"    return vec4(a_coords.xyz, 1.0);\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    gl_Position = a_position;\n"
+		"    v_color = getColor();\n"
+		"}\n", evalReturnAlways, DE_NULL));
+	addChild(new ShaderReturnCase(m_testCtx, "single_return_fragment", "Single return statement in function", false,
+		"#version 310 es\n"
+		"layout(location = 0) in mediump vec4 v_coords;\n"
+		"layout(location = 0) out mediump vec4 o_color;\n"
+		"mediump vec4 getColor (void)\n"
+		"{\n"
+		"    return vec4(v_coords.xyz, 1.0);\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    o_color = getColor();\n"
+		"}\n", evalReturnAlways, DE_NULL));
+
+	// Conditional return statement in function.
+	for (int returnMode = 0; returnMode < RETURNMODE_LAST; returnMode++)
+	{
+		for (int isFragment = 0; isFragment < 2; isFragment++)
+		{
+			std::string						name		= std::string("conditional_return_") + getReturnModeName((ReturnMode)returnMode) + (isFragment ? "_fragment" : "_vertex");
+			std::string						description	= std::string(getReturnModeDesc((ReturnMode)returnMode)) + " in function";
+			de::MovePtr<ShaderReturnCase>	testCase	(makeConditionalReturnInFuncCase(m_testCtx, name, description, (ReturnMode)returnMode, isFragment == 0));
+			addChild(testCase.release());
+		}
+	}
+
+	// Unconditional double return in function.
+	addChild(new ShaderReturnCase(m_testCtx, "double_return_vertex", "Unconditional double return in function", true,
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"layout(location = 1) in highp vec4 a_coords;\n"
+		"layout(location = 0) out highp vec4 v_color;\n\n"
+		"vec4 getColor (void)\n"
+		"{\n"
+		"    return vec4(a_coords.xyz, 1.0);\n"
+		"    return vec4(a_coords.wzy, 1.0);\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    gl_Position = a_position;\n"
+		"    v_color = getColor();\n"
+		"}\n", evalReturnAlways, DE_NULL));
+	addChild(new ShaderReturnCase(m_testCtx, "double_return_fragment", "Unconditional double return in function", false,
+		"#version 310 es\n"
+		"layout(location = 0) in mediump vec4 v_coords;\n"
+		"layout(location = 0) out mediump vec4 o_color;\n\n"
+		"mediump vec4 getColor (void)\n"
+		"{\n"
+		"    return vec4(v_coords.xyz, 1.0);\n"
+		"    return vec4(v_coords.wzy, 1.0);\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    o_color = getColor();\n"
+		"}\n", evalReturnAlways, DE_NULL));
+
+	// Last statement in main.
+	addChild(new ShaderReturnCase(m_testCtx, "last_statement_in_main_vertex", "Return as a final statement in main()", true,
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"layout(location = 1) in highp vec4 a_coords;\n"
+		"layout(location = 0) out highp vec4 v_color;\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    gl_Position = a_position;\n"
+		"    v_color = vec4(a_coords.xyz, 1.0);\n"
+		"    return;\n"
+		"}\n", evalReturnAlways, DE_NULL));
+	addChild(new ShaderReturnCase(m_testCtx, "last_statement_in_main_fragment", "Return as a final statement in main()", false,
+		"#version 310 es\n"
+		"layout(location = 0) in mediump vec4 v_coords;\n"
+		"layout(location = 0) out mediump vec4 o_color;\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    o_color = vec4(v_coords.xyz, 1.0);\n"
+		"    return;\n"
+		"}\n", evalReturnAlways, DE_NULL));
+
+	// Return between output variable writes.
+	for (int inFunc = 0; inFunc < 2; inFunc++)
+	{
+		for (int returnMode = 0; returnMode < RETURNMODE_LAST; returnMode++)
+		{
+			for (int isFragment = 0; isFragment < 2; isFragment++)
+			{
+				std::string						name		= std::string("output_write_") + (inFunc ? "in_func_" : "") + getReturnModeName((ReturnMode)returnMode) + (isFragment ? "_fragment" : "_vertex");
+				std::string						desc		= std::string(getReturnModeDesc((ReturnMode)returnMode)) + (inFunc ? " in user-defined function" : " in main()") + " between output writes";
+				de::MovePtr<ShaderReturnCase>	testCase	= (makeOutputWriteReturnCase(m_testCtx, name, desc, inFunc != 0, (ReturnMode)returnMode, isFragment == 0));
+				addChild(testCase.release());
+			}
+		}
+	}
+
+	// Conditional return statement in loop.
+	for (int isDynamicLoop = 0; isDynamicLoop < 2; isDynamicLoop++)
+	{
+		for (int returnMode = 0; returnMode < RETURNMODE_LAST; returnMode++)
+		{
+			for (int isFragment = 0; isFragment < 2; isFragment++)
+			{
+				std::string						name		= std::string("return_in_") + (isDynamicLoop ? "dynamic" : "static") + "_loop_" + getReturnModeName((ReturnMode)returnMode) + (isFragment ? "_fragment" : "_vertex");
+				std::string						description	= std::string(getReturnModeDesc((ReturnMode)returnMode)) + " in loop";
+				de::MovePtr<ShaderReturnCase>	testCase	(makeReturnInLoopCase(m_testCtx, name, description, isDynamicLoop != 0, (ReturnMode)returnMode, isFragment == 0));
+				addChild(testCase.release());
+			}
+		}
+	}
+
+	// Unconditional return in infinite loop.
+	addChild(new ShaderReturnCase(m_testCtx, "return_in_infinite_loop_vertex", "Return in infinite loop", true,
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"layout(location = 1) in highp vec4 a_coords;\n"
+		"layout(location = 0) out highp vec4 v_color;\n"
+		"layout(binding = 0, std140) uniform something { int ui_zero; };\n"
+		"highp vec4 getCoords (void)\n"
+		"{\n"
+		"	for (int i = 1; i < 10; i += ui_zero)\n"
+		"		return a_coords;\n"
+		"	return a_coords.wzyx;\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    gl_Position = a_position;\n"
+		"    v_color = vec4(getCoords().xyz, 1.0);\n"
+		"    return;\n"
+		"}\n", evalReturnAlways, new ReturnTestUniformSetup(UI_ZERO)));
+	addChild(new ShaderReturnCase(m_testCtx, "return_in_infinite_loop_fragment", "Return in infinite loop", false,
+		"#version 310 es\n"
+		"layout(location = 0) in mediump vec4 v_coords;\n"
+		"layout(location = 0) out mediump vec4 o_color;\n"
+		"layout(binding = 0, std140) uniform something { int ui_zero; };\n\n"
+		"mediump vec4 getCoords (void)\n"
+		"{\n"
+		"	for (int i = 1; i < 10; i += ui_zero)\n"
+		"		return v_coords;\n"
+		"	return v_coords.wzyx;\n"
+		"}\n\n"
+		"void main (void)\n"
+		"{\n"
+		"    o_color = vec4(getCoords().xyz, 1.0);\n"
+		"    return;\n"
+		"}\n", evalReturnAlways, new ReturnTestUniformSetup(UI_ZERO)));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createReturnTests (tcu::TestContext& testCtx)
+{
+	return new ShaderReturnTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.hpp
new file mode 100644
index 0000000..1e4df36
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderReturnTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERRETURNTESTS_HPP
+#define _VKTSHADERRENDERRETURNTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader return statement tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createReturnTests	(tcu::TestContext& testCtx);
+
+}
+}
+
+#endif // _VKTSHADERRENDERRETURNTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.cpp
new file mode 100644
index 0000000..89e878b
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.cpp
@@ -0,0 +1,2135 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader struct tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderStructTests.hpp"
+#include "vktShaderRender.hpp"
+#include "tcuStringTemplate.hpp"
+#include "deMath.h"
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+class ShaderStructCase : public ShaderRenderCase
+{
+public:
+						ShaderStructCase		(tcu::TestContext&	testCtx,
+												 const std::string&	name,
+												 const std::string&	description,
+												 bool				isVertexCase,
+												 ShaderEvalFunc		evalFunc,
+												 UniformSetupFunc	setupUniformsFunc,
+												 const std::string&	vertShaderSource,
+												 const std::string&	fragShaderSource);
+						~ShaderStructCase		(void);
+
+private:
+						ShaderStructCase		(const ShaderStructCase&);
+	ShaderStructCase&	operator=				(const ShaderStructCase&);
+};
+
+ShaderStructCase::ShaderStructCase (tcu::TestContext&	testCtx,
+									const std::string&	name,
+									const std::string&	description,
+									bool				isVertexCase,
+									ShaderEvalFunc		evalFunc,
+									UniformSetupFunc	setupUniformsFunc,
+									const std::string&	vertShaderSource,
+									const std::string&	fragShaderSource)
+	: ShaderRenderCase	(testCtx, name, description, isVertexCase, evalFunc, new UniformSetup(setupUniformsFunc), DE_NULL)
+{
+	m_vertShaderSource	= vertShaderSource;
+	m_fragShaderSource	= fragShaderSource;
+}
+
+ShaderStructCase::~ShaderStructCase (void)
+{
+}
+
+static de::MovePtr<ShaderStructCase> createStructCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, bool isVertexCase, ShaderEvalFunc evalFunc, UniformSetupFunc uniformFunc, const LineStream& shaderSrc)
+{
+	static std::string defaultVertSrc =
+		"#version 310 es\n"
+		"layout(location = 0) in highp vec4 a_position;\n"
+		"layout(location = 1) in highp vec4 a_coords;\n"
+		"layout(location = 0) out mediump vec4 v_coords;\n\n"
+		"void main (void)\n"
+		"{\n"
+		"	v_coords = a_coords;\n"
+		"	gl_Position = a_position;\n"
+		"}\n";
+	static std::string defaultFragSrc =
+		"#version 310 es\n"
+		"layout(location = 0) in mediump vec4 v_color;\n"
+		"layout(location = 0) out mediump vec4 o_color;\n\n"
+		"void main (void)\n"
+		"{\n"
+		"	o_color = v_color;\n"
+		"}\n";
+
+	// Fill in specialization parameters and build the shader source.
+	std::string vertSrc;
+	std::string fragSrc;
+	std::map<std::string, std::string> spParams;
+
+	if (isVertexCase)
+	{
+		spParams["HEADER"] =
+			"#version 310 es\n"
+			"layout(location = 0) in highp vec4 a_position;\n"
+			"layout(location = 1) in highp vec4 a_coords;\n"
+			"layout(location = 0) out mediump vec4 v_color;";
+		spParams["COORDS"]		= "a_coords";
+		spParams["DST"]			= "v_color";
+		spParams["ASSIGN_POS"]	= "gl_Position = a_position;";
+
+		vertSrc = tcu::StringTemplate(shaderSrc.str()).specialize(spParams);
+		fragSrc = defaultFragSrc;
+	}
+	else
+	{
+		spParams["HEADER"]	=
+			"#version 310 es\n"
+			"layout(location = 0) in mediump vec4 v_coords;\n"
+			"layout(location = 0) out mediump vec4 o_color;";
+		spParams["COORDS"]			= "v_coords";
+		spParams["DST"]				= "o_color";
+		spParams["ASSIGN_POS"]		= "";
+
+		vertSrc = defaultVertSrc;
+		fragSrc = tcu::StringTemplate(shaderSrc.str()).specialize(spParams);
+	}
+
+	return de::MovePtr<ShaderStructCase>(new ShaderStructCase(testCtx, name, description, isVertexCase, evalFunc, uniformFunc, vertSrc, fragSrc));
+}
+
+class LocalStructTests : public tcu::TestCaseGroup
+{
+public:
+	LocalStructTests (tcu::TestContext& testCtx)
+		: TestCaseGroup(testCtx, "local", "Local structs")
+	{
+	}
+
+	~LocalStructTests (void)
+	{
+	}
+
+	virtual void init (void);
+};
+
+void LocalStructTests::init (void)
+{
+	#define LOCAL_STRUCT_CASE(NAME, DESCRIPTION, SHADER_SRC, SET_UNIFORMS_BODY, EVAL_FUNC_BODY)																	\
+		do {																																	\
+			struct SetUniforms_##NAME { static void setUniforms (ShaderRenderCaseInstance& instance, const tcu::Vec4&) SET_UNIFORMS_BODY };		\
+			struct Eval_##NAME { static void eval (ShaderEvalContext& c) EVAL_FUNC_BODY };														\
+			addChild(createStructCase(m_testCtx, #NAME "_vertex", DESCRIPTION, true, &Eval_##NAME::eval, &SetUniforms_##NAME::setUniforms, SHADER_SRC).release());			\
+			addChild(createStructCase(m_testCtx, #NAME "_fragment", DESCRIPTION, false, &Eval_##NAME::eval, &SetUniforms_##NAME::setUniforms, SHADER_SRC).release());		\
+		} while (deGetFalse())
+
+	LOCAL_STRUCT_CASE(basic, "Basic struct usage",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, vec3(0.0), ui_one);"
+		<< "	s.b = ${COORDS}.yzw;"
+		<< "	${DST} = vec4(s.a, s.b.x, s.b.y, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(nested, "Nested struct",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(0, vec2(0.0)), ui_one);"
+		<< "	s.b = T(ui_zero, ${COORDS}.yz);"
+		<< "	${DST} = vec4(s.a, s.b.b, s.b.a + s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(array_member, "Struct with array member",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump float	b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s;"
+		<< "	s.a = ${COORDS}.w;"
+		<< "	s.c = ui_one;"
+		<< "	s.b[0] = ${COORDS}.z;"
+		<< "	s.b[1] = ${COORDS}.y;"
+		<< "	s.b[2] = ${COORDS}.x;"
+		<< "	${DST} = vec4(s.a, s.b[0], s.b[1], s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(3, 2, 1);
+		});
+
+	LOCAL_STRUCT_CASE(array_member_dynamic_index, "Struct with array member, dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump float	b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s;"
+		<< "	s.a = ${COORDS}.w;"
+		<< "	s.c = ui_one;"
+		<< "	s.b[0] = ${COORDS}.z;"
+		<< "	s.b[1] = ${COORDS}.y;"
+		<< "	s.b[2] = ${COORDS}.x;"
+		<< "	${DST} = vec4(s.b[ui_one], s.b[ui_zero], s.b[ui_two], s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(1,2,0);
+		});
+
+	LOCAL_STRUCT_CASE(struct_array, "Struct array",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[3];"
+		<< "	s[0] = S(${COORDS}.x, ui_zero);"
+		<< "	s[1].a = ${COORDS}.y;"
+		<< "	s[1].b = ui_one;"
+		<< "	s[2] = S(${COORDS}.z, ui_two);"
+		<< "	${DST} = vec4(s[2].a, s[1].a, s[0].a, s[2].b - s[1].b + s[0].b);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 1, 0);
+		});
+
+	LOCAL_STRUCT_CASE(struct_array_dynamic_index, "Struct array with dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[3];"
+		<< "	s[0] = S(${COORDS}.x, ui_zero);"
+		<< "	s[1].a = ${COORDS}.y;"
+		<< "	s[1].b = ui_one;"
+		<< "	s[2] = S(${COORDS}.z, ui_two);"
+		<< "	${DST} = vec4(s[ui_two].a, s[ui_one].a, s[ui_zero].a, s[ui_two].b - s[ui_one].b + s[ui_zero].b);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 1, 0);
+		});
+
+	LOCAL_STRUCT_CASE(nested_struct_array, "Nested struct array",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_fourth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[2];"
+		<< ""
+		<< "	// S[0]"
+		<< "	s[0].a         = ${COORDS}.x;"
+		<< "	s[0].b[0].a    = uf_half;"
+		<< "	s[0].b[0].b[0] = ${COORDS}.xy;"
+		<< "	s[0].b[0].b[1] = ${COORDS}.zw;"
+		<< "	s[0].b[1].a    = uf_third;"
+		<< "	s[0].b[1].b[0] = ${COORDS}.zw;"
+		<< "	s[0].b[1].b[1] = ${COORDS}.xy;"
+		<< "	s[0].b[2].a    = uf_fourth;"
+		<< "	s[0].b[2].b[0] = ${COORDS}.xz;"
+		<< "	s[0].b[2].b[1] = ${COORDS}.yw;"
+		<< "	s[0].c         = ui_zero;"
+		<< ""
+		<< "	// S[1]"
+		<< "	s[1].a         = ${COORDS}.w;"
+		<< "	s[1].b[0].a    = uf_two;"
+		<< "	s[1].b[0].b[0] = ${COORDS}.xx;"
+		<< "	s[1].b[0].b[1] = ${COORDS}.yy;"
+		<< "	s[1].b[1].a    = uf_three;"
+		<< "	s[1].b[1].b[0] = ${COORDS}.zz;"
+		<< "	s[1].b[1].b[1] = ${COORDS}.ww;"
+		<< "	s[1].b[2].a    = uf_four;"
+		<< "	s[1].b[2].b[0] = ${COORDS}.yx;"
+		<< "	s[1].b[2].b[1] = ${COORDS}.wz;"
+		<< "	s[1].c         = ui_one;"
+		<< ""
+		<< "	mediump float r = (s[0].b[1].b[0].x + s[1].b[2].b[1].y) * s[0].b[0].a; // (z + z) * 0.5"
+		<< "	mediump float g = s[1].b[0].b[0].y * s[0].b[2].a * s[1].b[2].a; // x * 0.25 * 4"
+		<< "	mediump float b = (s[0].b[2].b[1].y + s[0].b[1].b[0].y + s[1].a) * s[0].b[1].a; // (w + w + w) * 0.333"
+		<< "	mediump float a = float(s[0].c) + s[1].b[2].a - s[1].b[1].a; // 0 + 4.0 - 3.0"
+		<< "	${DST} = vec4(r, g, b, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UF_TWO);
+			instance.useUniform(4u, UF_THREE);
+			instance.useUniform(5u, UF_FOUR);
+			instance.useUniform(6u, UF_HALF);
+			instance.useUniform(7u, UF_THIRD);
+			instance.useUniform(8u, UF_FOURTH);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 0, 3);
+		});
+
+	LOCAL_STRUCT_CASE(nested_struct_array_dynamic_index, "Nested struct array with dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_fourth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[2];"
+		<< ""
+		<< "	// S[0]"
+		<< "	s[0].a         = ${COORDS}.x;"
+		<< "	s[0].b[0].a    = uf_half;"
+		<< "	s[0].b[0].b[0] = ${COORDS}.xy;"
+		<< "	s[0].b[0].b[1] = ${COORDS}.zw;"
+		<< "	s[0].b[1].a    = uf_third;"
+		<< "	s[0].b[1].b[0] = ${COORDS}.zw;"
+		<< "	s[0].b[1].b[1] = ${COORDS}.xy;"
+		<< "	s[0].b[2].a    = uf_fourth;"
+		<< "	s[0].b[2].b[0] = ${COORDS}.xz;"
+		<< "	s[0].b[2].b[1] = ${COORDS}.yw;"
+		<< "	s[0].c         = ui_zero;"
+		<< ""
+		<< "	// S[1]"
+		<< "	s[1].a         = ${COORDS}.w;"
+		<< "	s[1].b[0].a    = uf_two;"
+		<< "	s[1].b[0].b[0] = ${COORDS}.xx;"
+		<< "	s[1].b[0].b[1] = ${COORDS}.yy;"
+		<< "	s[1].b[1].a    = uf_three;"
+		<< "	s[1].b[1].b[0] = ${COORDS}.zz;"
+		<< "	s[1].b[1].b[1] = ${COORDS}.ww;"
+		<< "	s[1].b[2].a    = uf_four;"
+		<< "	s[1].b[2].b[0] = ${COORDS}.yx;"
+		<< "	s[1].b[2].b[1] = ${COORDS}.wz;"
+		<< "	s[1].c         = ui_one;"
+		<< ""
+		<< "	mediump float r = (s[0].b[ui_one].b[ui_one-1].x + s[ui_one].b[ui_two].b[ui_zero+1].y) * s[0].b[0].a; // (z + z) * 0.5"
+		<< "	mediump float g = s[ui_two-1].b[ui_two-2].b[ui_zero].y * s[0].b[ui_two].a * s[ui_one].b[2].a; // x * 0.25 * 4"
+		<< "	mediump float b = (s[ui_zero].b[ui_one+1].b[1].y + s[0].b[ui_one*ui_one].b[0].y + s[ui_one].a) * s[0].b[ui_two-ui_one].a; // (w + w + w) * 0.333"
+		<< "	mediump float a = float(s[ui_zero].c) + s[ui_one-ui_zero].b[ui_two].a - s[ui_zero+ui_one].b[ui_two-ui_one].a; // 0 + 4.0 - 3.0"
+		<< "	${DST} = vec4(r, g, b, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UF_TWO);
+			instance.useUniform(4u, UF_THREE);
+			instance.useUniform(5u, UF_FOUR);
+			instance.useUniform(6u, UF_HALF);
+			instance.useUniform(7u, UF_THIRD);
+			instance.useUniform(8u, UF_FOURTH);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 0, 3);
+		});
+
+	LOCAL_STRUCT_CASE(parameter, "Struct as a function parameter",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "mediump vec4 myFunc (S s)"
+		<< "{"
+		<< "	return vec4(s.a, s.b.x, s.b.y, s.c);"
+		<< "}"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, vec3(0.0), ui_one);"
+		<< "	s.b = ${COORDS}.yzw;"
+		<< "	${DST} = myFunc(s);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(parameter_nested, "Nested struct as a function parameter",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "mediump vec4 myFunc (S s)"
+		<< "{"
+		<< "	return vec4(s.a, s.b.b, s.b.a + s.c);"
+		<< "}"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(0, vec2(0.0)), ui_one);"
+		<< "	s.b = T(ui_zero, ${COORDS}.yz);"
+		<< "	${DST} = myFunc(s);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(return, "Struct as a return value",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "S myFunc (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, vec3(0.0), ui_one);"
+		<< "	s.b = ${COORDS}.yzw;"
+		<< "	return s;"
+		<< "}"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = myFunc();"
+		<< "	${DST} = vec4(s.a, s.b.x, s.b.y, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(return_nested, "Nested struct",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "S myFunc (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(0, vec2(0.0)), ui_one);"
+		<< "	s.b = T(ui_zero, ${COORDS}.yz);"
+		<< "	return s;"
+		<< "}"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = myFunc();"
+		<< "	${DST} = vec4(s.a, s.b.b, s.b.a + s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 1, 2);
+		});
+
+	LOCAL_STRUCT_CASE(conditional_assignment, "Conditional struct assignment",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { mediump float uf_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, ${COORDS}.yzw, ui_zero);"
+		<< "	if (uf_one > 0.0)"
+		<< "		s = S(${COORDS}.w, ${COORDS}.zyx, ui_one);"
+		<< "	${DST} = vec4(s.a, s.b.xy, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UF_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(3, 2, 1);
+		});
+
+	LOCAL_STRUCT_CASE(loop_assignment, "Struct assignment in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, ${COORDS}.yzw, ui_zero);"
+		<< "	for (int i = 0; i < 3; i++)"
+		<< "	{"
+		<< "		if (i == 1)"
+		<< "			s = S(${COORDS}.w, ${COORDS}.zyx, ui_one);"
+		<< "	}"
+		<< "	${DST} = vec4(s.a, s.b.xy, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(3, 2, 1);
+		});
+
+	LOCAL_STRUCT_CASE(dynamic_loop_assignment, "Struct assignment in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_three; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, ${COORDS}.yzw, ui_zero);"
+		<< "	for (int i = 0; i < ui_three; i++)"
+		<< "	{"
+		<< "		if (i == ui_one)"
+		<< "			s = S(${COORDS}.w, ${COORDS}.zyx, ui_one);"
+		<< "	}"
+		<< "	${DST} = vec4(s.a, s.b.xy, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_THREE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(3, 2, 1);
+		});
+
+	LOCAL_STRUCT_CASE(nested_conditional_assignment, "Conditional assignment of nested struct",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { mediump float uf_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(ui_one, ${COORDS}.yz), ui_one);"
+		<< "	if (uf_one > 0.0)"
+		<< "		s.b = T(ui_zero, ${COORDS}.zw);"
+		<< "	${DST} = vec4(s.a, s.b.b, s.c - s.b.a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UF_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 2, 3);
+		});
+
+	LOCAL_STRUCT_CASE(nested_loop_assignment, "Nested struct assignment in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { mediump float uf_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(ui_one, ${COORDS}.yz), ui_one);"
+		<< "	for (int i = 0; i < 3; i++)"
+		<< "	{"
+		<< "		if (i == 1)"
+		<< "			s.b = T(ui_zero, ${COORDS}.zw);"
+		<< "	}"
+		<< "	${DST} = vec4(s.a, s.b.b, s.c - s.b.a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UF_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 2, 3);
+		});
+
+	LOCAL_STRUCT_CASE(nested_dynamic_loop_assignment, "Nested struct assignment in dynamic loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_three; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { mediump float uf_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s = S(${COORDS}.x, T(ui_one, ${COORDS}.yz), ui_one);"
+		<< "	for (int i = 0; i < ui_three; i++)"
+		<< "	{"
+		<< "		if (i == ui_one)"
+		<< "			s.b = T(ui_zero, ${COORDS}.zw);"
+		<< "	}"
+		<< "	${DST} = vec4(s.a, s.b.b, s.c - s.b.a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_THREE);
+			instance.useUniform(3u, UF_ONE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(0, 2, 3);
+		});
+
+	LOCAL_STRUCT_CASE(loop_struct_array, "Struct array usage in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[3];"
+		<< "	s[0] = S(${COORDS}.x, ui_zero);"
+		<< "	s[1].a = ${COORDS}.y;"
+		<< "	s[1].b = -ui_one;"
+		<< "	s[2] = S(${COORDS}.z, ui_two);"
+		<< ""
+		<< "	mediump float rgb[3];"
+		<< "	int alpha = 0;"
+		<< "	for (int i = 0; i < 3; i++)"
+		<< "	{"
+		<< "		rgb[i] = s[2-i].a;"
+		<< "		alpha += s[i].b;"
+		<< "	}"
+		<< "	${DST} = vec4(rgb[0], rgb[1], rgb[2], alpha);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 1, 0);
+		});
+
+	LOCAL_STRUCT_CASE(loop_nested_struct_array, "Nested struct array usage in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_fourth; };"
+		<< "layout (std140, set = 0, binding = 9) uniform buffer9 { mediump float uf_sixth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[2];"
+		<< ""
+		<< "	// S[0]"
+		<< "	s[0].a         = ${COORDS}.x;"
+		<< "	s[0].b[0].a    = uf_half;"
+		<< "	s[0].b[0].b[0] = ${COORDS}.yx;"
+		<< "	s[0].b[0].b[1] = ${COORDS}.zx;"
+		<< "	s[0].b[1].a    = uf_third;"
+		<< "	s[0].b[1].b[0] = ${COORDS}.yy;"
+		<< "	s[0].b[1].b[1] = ${COORDS}.wy;"
+		<< "	s[0].b[2].a    = uf_fourth;"
+		<< "	s[0].b[2].b[0] = ${COORDS}.zx;"
+		<< "	s[0].b[2].b[1] = ${COORDS}.zy;"
+		<< "	s[0].c         = ui_zero;"
+		<< ""
+		<< "	// S[1]"
+		<< "	s[1].a         = ${COORDS}.w;"
+		<< "	s[1].b[0].a    = uf_two;"
+		<< "	s[1].b[0].b[0] = ${COORDS}.zx;"
+		<< "	s[1].b[0].b[1] = ${COORDS}.zy;"
+		<< "	s[1].b[1].a    = uf_three;"
+		<< "	s[1].b[1].b[0] = ${COORDS}.zz;"
+		<< "	s[1].b[1].b[1] = ${COORDS}.ww;"
+		<< "	s[1].b[2].a    = uf_four;"
+		<< "	s[1].b[2].b[0] = ${COORDS}.yx;"
+		<< "	s[1].b[2].b[1] = ${COORDS}.wz;"
+		<< "	s[1].c         = ui_one;"
+		<< ""
+		<< "	mediump float r = 0.0; // (x*3 + y*3) / 6.0"
+		<< "	mediump float g = 0.0; // (y*3 + z*3) / 6.0"
+		<< "	mediump float b = 0.0; // (z*3 + w*3) / 6.0"
+		<< "	mediump float a = 1.0;"
+		<< "	for (int i = 0; i < 2; i++)"
+		<< "	{"
+		<< "		for (int j = 0; j < 3; j++)"
+		<< "		{"
+		<< "			r += s[0].b[j].b[i].y;"
+		<< "			g += s[i].b[j].b[0].x;"
+		<< "			b += s[i].b[j].b[1].x;"
+		<< "			a *= s[i].b[j].a;"
+		<< "		}"
+		<< "	}"
+		<< "	${DST} = vec4(r*uf_sixth, g*uf_sixth, b*uf_sixth, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UF_TWO);
+			instance.useUniform(4u, UF_THREE);
+			instance.useUniform(5u, UF_FOUR);
+			instance.useUniform(6u, UF_HALF);
+			instance.useUniform(7u, UF_THIRD);
+			instance.useUniform(8u, UF_FOURTH);
+			instance.useUniform(9u, UF_SIXTH);
+		},
+		{
+			c.color.xyz() = (c.coords.swizzle(0, 1, 2) + c.coords.swizzle(1, 2, 3)) * 0.5f;
+		});
+
+	LOCAL_STRUCT_CASE(dynamic_loop_struct_array, "Struct array usage in dynamic loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { int ui_three; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[3];"
+		<< "	s[0] = S(${COORDS}.x, ui_zero);"
+		<< "	s[1].a = ${COORDS}.y;"
+		<< "	s[1].b = -ui_one;"
+		<< "	s[2] = S(${COORDS}.z, ui_two);"
+		<< ""
+		<< "	mediump float rgb[3];"
+		<< "	int alpha = 0;"
+		<< "	for (int i = 0; i < ui_three; i++)"
+		<< "	{"
+		<< "		rgb[i] = s[2-i].a;"
+		<< "		alpha += s[i].b;"
+		<< "	}"
+		<< "	${DST} = vec4(rgb[0], rgb[1], rgb[2], alpha);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UI_THREE);
+		},
+		{
+			c.color.xyz() = c.coords.swizzle(2, 1, 0);
+		});
+
+	LOCAL_STRUCT_CASE(dynamic_loop_nested_struct_array, "Nested struct array usage in dynamic loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { int ui_three; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 9) uniform buffer9 { mediump float uf_fourth; };"
+		<< "layout (std140, set = 0, binding = 10) uniform buffer10 { mediump float uf_sixth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S s[2];"
+		<< ""
+		<< "	s[0].a         = ${COORDS}.x;"
+		<< "	s[0].b[0].a    = uf_half;"
+		<< "	s[0].b[0].b[0] = ${COORDS}.yx;"
+		<< "	s[0].b[0].b[1] = ${COORDS}.zx;"
+		<< "	s[0].b[1].a    = uf_third;"
+		<< "	s[0].b[1].b[0] = ${COORDS}.yy;"
+		<< "	s[0].b[1].b[1] = ${COORDS}.wy;"
+		<< "	s[0].b[2].a    = uf_fourth;"
+		<< "	s[0].b[2].b[0] = ${COORDS}.zx;"
+		<< "	s[0].b[2].b[1] = ${COORDS}.zy;"
+		<< "	s[0].c         = ui_zero;"
+		<< ""
+		<< "	s[1].a         = ${COORDS}.w;"
+		<< "	s[1].b[0].a    = uf_two;"
+		<< "	s[1].b[0].b[0] = ${COORDS}.zx;"
+		<< "	s[1].b[0].b[1] = ${COORDS}.zy;"
+		<< "	s[1].b[1].a    = uf_three;"
+		<< "	s[1].b[1].b[0] = ${COORDS}.zz;"
+		<< "	s[1].b[1].b[1] = ${COORDS}.ww;"
+		<< "	s[1].b[2].a    = uf_four;"
+		<< "	s[1].b[2].b[0] = ${COORDS}.yx;"
+		<< "	s[1].b[2].b[1] = ${COORDS}.wz;"
+		<< "	s[1].c         = ui_one;"
+		<< ""
+		<< "	mediump float r = 0.0; // (x*3 + y*3) / 6.0"
+		<< "	mediump float g = 0.0; // (y*3 + z*3) / 6.0"
+		<< "	mediump float b = 0.0; // (z*3 + w*3) / 6.0"
+		<< "	mediump float a = 1.0;"
+		<< "	for (int i = 0; i < ui_two; i++)"
+		<< "	{"
+		<< "		for (int j = 0; j < ui_three; j++)"
+		<< "		{"
+		<< "			r += s[0].b[j].b[i].y;"
+		<< "			g += s[i].b[j].b[0].x;"
+		<< "			b += s[i].b[j].b[1].x;"
+		<< "			a *= s[i].b[j].a;"
+		<< "		}"
+		<< "	}"
+		<< "	${DST} = vec4(r*uf_sixth, g*uf_sixth, b*uf_sixth, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UI_THREE);
+			instance.useUniform(4u, UF_TWO);
+			instance.useUniform(5u, UF_THREE);
+			instance.useUniform(6u, UF_FOUR);
+			instance.useUniform(7u, UF_HALF);
+			instance.useUniform(8u, UF_THIRD);
+			instance.useUniform(9u, UF_FOURTH);
+			instance.useUniform(10u, UF_SIXTH);
+		},
+		{
+			c.color.xyz() = (c.coords.swizzle(0, 1, 2) + c.coords.swizzle(1, 2, 3)) * 0.5f;
+		});
+
+	LOCAL_STRUCT_CASE(basic_equal, "Basic struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S a = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y), 2.3), ui_one);"
+		<< "	S b = S(floor(${COORDS}.x+0.5), vec3(0.0, floor(${COORDS}.y), 2.3), ui_one);"
+		<< "	S c = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y+0.5), 2.3), ui_one);"
+		<< "	S d = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y), 2.3), ui_two);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a == b) ${DST}.x = 1.0;"
+		<< "	if (a == c) ${DST}.y = 1.0;"
+		<< "	if (a == d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+			instance.useUniform(1u, UI_TWO);
+		},
+		{
+			if (deFloatFloor(c.coords[0]) == deFloatFloor(c.coords[0] + 0.5f))
+				c.color.x() = 1.0f;
+			if (deFloatFloor(c.coords[1]) == deFloatFloor(c.coords[1] + 0.5f))
+				c.color.y() = 1.0f;
+		});
+
+	LOCAL_STRUCT_CASE(basic_not_equal, "Basic struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S a = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y), 2.3), ui_one);"
+		<< "	S b = S(floor(${COORDS}.x+0.5), vec3(0.0, floor(${COORDS}.y), 2.3), ui_one);"
+		<< "	S c = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y+0.5), 2.3), ui_one);"
+		<< "	S d = S(floor(${COORDS}.x), vec3(0.0, floor(${COORDS}.y), 2.3), ui_two);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a != b) ${DST}.x = 1.0;"
+		<< "	if (a != c) ${DST}.y = 1.0;"
+		<< "	if (a != d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+			instance.useUniform(1u, UI_TWO);
+		},
+		{
+			if (deFloatFloor(c.coords[0]) != deFloatFloor(c.coords[0] + 0.5f))
+				c.color.x() = 1.0f;
+			if (deFloatFloor(c.coords[1]) != deFloatFloor(c.coords[1] + 0.5f))
+				c.color.y() = 1.0f;
+			c.color.z() = 1.0f;
+		});
+
+	LOCAL_STRUCT_CASE(nested_equal, "Nested struct struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump vec3	a;"
+		<< "	int				b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S a = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_one), 1);"
+		<< "	S b = S(floor(${COORDS}.x+0.5), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_one), 1);"
+		<< "	S c = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y+0.5), 2.3), ui_one), 1);"
+		<< "	S d = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_two), 1);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a == b) ${DST}.x = 1.0;"
+		<< "	if (a == c) ${DST}.y = 1.0;"
+		<< "	if (a == d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+			instance.useUniform(1u, UI_TWO);
+		},
+		{
+			if (deFloatFloor(c.coords[0]) == deFloatFloor(c.coords[0] + 0.5f))
+				c.color.x() = 1.0f;
+			if (deFloatFloor(c.coords[1]) == deFloatFloor(c.coords[1] + 0.5f))
+				c.color.y() = 1.0f;
+		});
+
+	LOCAL_STRUCT_CASE(nested_not_equal, "Nested struct struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump vec3	a;"
+		<< "	int				b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S a = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_one), 1);"
+		<< "	S b = S(floor(${COORDS}.x+0.5), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_one), 1);"
+		<< "	S c = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y+0.5), 2.3), ui_one), 1);"
+		<< "	S d = S(floor(${COORDS}.x), T(vec3(0.0, floor(${COORDS}.y), 2.3), ui_two), 1);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a != b) ${DST}.x = 1.0;"
+		<< "	if (a != c) ${DST}.y = 1.0;"
+		<< "	if (a != d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+			instance.useUniform(1u, UI_TWO);
+		},
+		{
+			if (deFloatFloor(c.coords[0]) != deFloatFloor(c.coords[0] + 0.5f))
+				c.color.x() = 1.0f;
+			if (deFloatFloor(c.coords[1]) != deFloatFloor(c.coords[1] + 0.5f))
+				c.color.y() = 1.0f;
+			c.color.z() = 1.0f;
+		});
+}
+
+class UniformStructTests : public tcu::TestCaseGroup
+{
+public:
+	UniformStructTests (tcu::TestContext& testCtx)
+		: TestCaseGroup(testCtx, "uniform", "Uniform structs")
+	{
+	}
+
+	~UniformStructTests (void)
+	{
+	}
+
+	virtual void init (void);
+};
+
+void UniformStructTests::init (void)
+{
+	#define UNIFORM_STRUCT_CASE(NAME, DESCRIPTION, SHADER_SRC, SET_UNIFORMS_BODY, EVAL_FUNC_BODY)																\
+		do {																																							\
+			struct SetUniforms_##NAME { static void setUniforms (ShaderRenderCaseInstance& instance, const tcu::Vec4& constCoords) SET_UNIFORMS_BODY };		\
+			struct Eval_##NAME { static void eval (ShaderEvalContext& c) EVAL_FUNC_BODY };																				\
+			addChild(createStructCase(m_testCtx, #NAME "_vertex", DESCRIPTION, true, Eval_##NAME::eval, SetUniforms_##NAME::setUniforms, SHADER_SRC).release());		\
+			addChild(createStructCase(m_testCtx, #NAME "_fragment", DESCRIPTION, false, Eval_##NAME::eval, SetUniforms_##NAME::setUniforms, SHADER_SRC).release());		\
+		} while (deGetFalse())
+
+	UNIFORM_STRUCT_CASE(basic, "Basic struct usage",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { S s; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s.a, s.b.x, s.b.y, s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+
+			struct S {
+				float			a;
+				float			_padding1[3];
+				tcu::Vec3		b;
+				int				c;
+			};
+
+			S s;
+			s.a = constCoords.x();
+			s.b = constCoords.swizzle(1, 2, 3);
+			s.c = 1;
+			instance.addUniform(1u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(0, 1, 2);
+		});
+
+	UNIFORM_STRUCT_CASE(nested, "Nested struct",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< ""
+		<< "struct T {"
+		<< "	int				a;"
+		<< "	mediump vec2	b;"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b;"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { S s; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s.a, s.b.b, s.b.a + s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+
+			struct T {
+				int				a;
+				float			_padding1[1];
+				tcu::Vec2		b;
+			};
+
+			struct S {
+				float			a;
+				float			_padding1[3];
+				T				b;
+				int				c;
+				float			_padding2[3];
+			};
+
+			S s;
+			s.a = constCoords.x();
+			s.b.a = 0;
+			s.b.b = constCoords.swizzle(1, 2);
+			s.c = 1;
+			instance.addUniform(2u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,sizeof(S), &s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(0, 1, 2);
+		});
+
+	UNIFORM_STRUCT_CASE(array_member, "Struct with array member",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_one; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump float	b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { S s; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s.a, s.b[0], s.b[1], s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ONE);
+
+			struct paddedFloat {
+				float value;
+				float _padding[3];
+			};
+
+			struct S {
+				paddedFloat	a;
+				paddedFloat	b[3];
+				int			c;
+			};
+
+			S s;
+			s.a.value = constCoords.w();
+			s.b[0].value = constCoords.z();
+			s.b[1].value = constCoords.y();
+			s.b[2].value = constCoords.x();
+			s.c = 1;
+			instance.addUniform(1u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,sizeof(S), &s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(3, 2, 1);
+		});
+
+	UNIFORM_STRUCT_CASE(array_member_dynamic_index, "Struct with array member, dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump float	b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S s; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s.b[ui_one], s.b[ui_zero], s.b[ui_two], s.c);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+
+			struct paddedFloat {
+				float value;
+				float _padding[3];
+			};
+
+			struct S {
+				paddedFloat	a;
+				paddedFloat	b[3];
+				int			c;
+			};
+
+			S s;
+			s.a.value = constCoords.w();
+			s.b[0].value = constCoords.z();
+			s.b[1].value = constCoords.y();
+			s.b[2].value = constCoords.x();
+			s.c = 1;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(1, 2, 0);
+		});
+
+	UNIFORM_STRUCT_CASE(struct_array, "Struct array",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S s[3]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s[2].a, s[1].a, s[0].a, s[2].b - s[1].b + s[0].b);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+
+			struct S {
+				float	a;
+				int		b;
+				float	_padding1[2];
+			};
+
+			S s[3];
+			s[0].a = constCoords.x();
+			s[0].b = 0;
+			s[1].a = constCoords.y();
+			s[1].b = 1;
+			s[2].a = constCoords.z();
+			s[2].b = 2;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3 * sizeof(S), s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 1, 0);
+		});
+
+	UNIFORM_STRUCT_CASE(struct_array_dynamic_index, "Struct array with dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S s[3]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	${DST} = vec4(s[ui_two].a, s[ui_one].a, s[ui_zero].a, s[ui_two].b - s[ui_one].b + s[ui_zero].b);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+
+			struct S {
+				float	a;
+				int		b;
+				float	_padding1[2];
+			};
+
+			S s[3];
+			s[0].a = constCoords.x();
+			s[0].b = 0;
+			s[1].a = constCoords.y();
+			s[1].b = 1;
+			s[2].a = constCoords.z();
+			s[2].b = 2;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3 * sizeof(S), s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 1, 0);
+		});
+
+	UNIFORM_STRUCT_CASE(nested_struct_array, "Nested struct array",
+		LineStream()
+		<< "${HEADER}"
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { S s[2]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float r = (s[0].b[1].b[0].x + s[1].b[2].b[1].y) * s[0].b[0].a; // (z + z) * 0.5"
+		<< "	mediump float g = s[1].b[0].b[0].y * s[0].b[2].a * s[1].b[2].a; // x * 0.25 * 4"
+		<< "	mediump float b = (s[0].b[2].b[1].y + s[0].b[1].b[0].y + s[1].a) * s[0].b[1].a; // (w + w + w) * 0.333"
+		<< "	mediump float a = float(s[0].c) + s[1].b[2].a - s[1].b[1].a; // 0 + 4.0 - 3.0"
+		<< "	${DST} = vec4(r, g, b, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+
+			struct T {
+				float		a;
+				float		_padding1[3];
+				tcu::Vec4	b[2];
+			};
+
+			struct S {
+				float	a;
+				float	_padding1[3];
+				T		b[3];
+				int		c;
+				float	_padding2[3];
+			};
+
+			S s[2];
+			s[0].a = constCoords.x();
+			s[0].b[0].a = 0.5f;
+			s[0].b[0].b[0] = constCoords.swizzle(0,1,0,0);
+			s[0].b[0].b[1] = constCoords.swizzle(2,3,0,0);
+			s[0].b[1].a = 1.0f / 3.0f;
+			s[0].b[1].b[0] = constCoords.swizzle(2,3,0,0);
+			s[0].b[1].b[1] = constCoords.swizzle(0,1,0,0);
+			s[0].b[2].a = 1.0f / 4.0f;
+			s[0].b[2].b[0] = constCoords.swizzle(0,2,0,0);
+			s[0].b[2].b[1] = constCoords.swizzle(1,3,0,0);
+			s[0].c = 0;
+
+			s[1].a = constCoords.w();
+			s[1].b[0].a = 2.0f;
+			s[1].b[0].b[0] = constCoords.swizzle(0,0,0,0);
+			s[1].b[0].b[1] = constCoords.swizzle(1,1,0,0);
+			s[1].b[1].a = 3.0f;
+			s[1].b[1].b[0] = constCoords.swizzle(2,2,0,0);
+			s[1].b[1].b[1] = constCoords.swizzle(3,3,0,0);
+			s[1].b[2].a = 4.0f;
+			s[1].b[2].b[0] = constCoords.swizzle(1,0,0,0);
+			s[1].b[2].b[1] = constCoords.swizzle(3,2,0,0);
+			s[1].c = 1;
+
+			instance.addUniform(0u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2 * sizeof(S), s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 0, 3);
+		});
+
+	UNIFORM_STRUCT_CASE(nested_struct_array_dynamic_index, "Nested struct array with dynamic indexing",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (set = 0, binding = 3) uniform buffer3 { S s[2]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float r = (s[0].b[ui_one].b[ui_one-1].x + s[ui_one].b[ui_two].b[ui_zero+1].y) * s[0].b[0].a; // (z + z) * 0.5"
+		<< "	mediump float g = s[ui_two-1].b[ui_two-2].b[ui_zero].y * s[0].b[ui_two].a * s[ui_one].b[2].a; // x * 0.25 * 4"
+		<< "	mediump float b = (s[ui_zero].b[ui_one+1].b[1].y + s[0].b[ui_one*ui_one].b[0].y + s[ui_one].a) * s[0].b[ui_two-ui_one].a; // (w + w + w) * 0.333"
+		<< "	mediump float a = float(s[ui_zero].c) + s[ui_one-ui_zero].b[ui_two].a - s[ui_zero+ui_one].b[ui_two-ui_one].a; // 0 + 4.0 - 3.0"
+		<< "	${DST} = vec4(r, g, b, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			struct T {
+				float		a;
+				float		_padding1[3];
+				tcu::Vec4	b[2];
+			};
+
+			struct S {
+				float	a;
+				float	_padding1[3];
+				T		b[3];
+				int		c;
+				float	_padding2[3];
+			};
+
+			S s[2];
+			s[0].a = constCoords.x();
+			s[0].b[0].a = 0.5f;
+			s[0].b[0].b[0] = constCoords.swizzle(0,1,0,0);
+			s[0].b[0].b[1] = constCoords.swizzle(2,3,0,0);
+			s[0].b[1].a = 1.0f / 3.0f;
+			s[0].b[1].b[0] = constCoords.swizzle(2,3,0,0);
+			s[0].b[1].b[1] = constCoords.swizzle(0,1,0,0);
+			s[0].b[2].a = 1.0f / 4.0f;
+			s[0].b[2].b[0] = constCoords.swizzle(0,2,0,0);
+			s[0].b[2].b[1] = constCoords.swizzle(1,3,0,0);
+			s[0].c = 0;
+
+			s[1].a = constCoords.w();
+			s[1].b[0].a = 2.0f;
+			s[1].b[0].b[0] = constCoords.swizzle(0,0,0,0);
+			s[1].b[0].b[1] = constCoords.swizzle(1,1,0,0);
+			s[1].b[1].a = 3.0f;
+			s[1].b[1].b[0] = constCoords.swizzle(2,2,0,0);
+			s[1].b[1].b[1] = constCoords.swizzle(3,3,0,0);
+			s[1].b[2].a = 4.0f;
+			s[1].b[2].b[0] = constCoords.swizzle(1,0,0,0);
+			s[1].b[2].b[1] = constCoords.swizzle(3,2,0,0);
+			s[1].c = 1;
+
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2 * sizeof(S), s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 0, 3);
+		});
+	UNIFORM_STRUCT_CASE(loop_struct_array, "Struct array usage in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S s[3]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float rgb[3];"
+		<< "	int alpha = 0;"
+		<< "	for (int i = 0; i < 3; i++)"
+		<< "	{"
+		<< "		rgb[i] = s[2-i].a;"
+		<< "		alpha += s[i].b;"
+		<< "	}"
+		<< "	${DST} = vec4(rgb[0], rgb[1], rgb[2], alpha);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+
+			struct S {
+				float	a;
+				int		b;
+				float	_padding1[2];
+			};
+
+			S s[3];
+			s[0].a = constCoords.x();
+			s[0].b = 0;
+			s[1].a = constCoords.y();
+			s[1].b = -1;
+			s[2].a = constCoords.z();
+			s[2].b = 2;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3u * sizeof(S), s);
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 1, 0);
+		});
+
+	UNIFORM_STRUCT_CASE(loop_nested_struct_array, "Nested struct array usage in loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_fourth; };"
+		<< "layout (std140, set = 0, binding = 9) uniform buffer9 { mediump float uf_sixth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 10) uniform buffer10 { S s[2]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float r = 0.0; // (x*3 + y*3) / 6.0"
+		<< "	mediump float g = 0.0; // (y*3 + z*3) / 6.0"
+		<< "	mediump float b = 0.0; // (z*3 + w*3) / 6.0"
+		<< "	mediump float a = 1.0;"
+		<< "	for (int i = 0; i < 2; i++)"
+		<< "	{"
+		<< "		for (int j = 0; j < 3; j++)"
+		<< "		{"
+		<< "			r += s[0].b[j].b[i].y;"
+		<< "			g += s[i].b[j].b[0].x;"
+		<< "			b += s[i].b[j].b[1].x;"
+		<< "			a *= s[i].b[j].a;"
+		<< "		}"
+		<< "	}"
+		<< "	${DST} = vec4(r*uf_sixth, g*uf_sixth, b*uf_sixth, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UF_TWO);
+			instance.useUniform(4u, UF_THREE);
+			instance.useUniform(5u, UF_FOUR);
+			instance.useUniform(6u, UF_HALF);
+			instance.useUniform(7u, UF_THIRD);
+			instance.useUniform(8u, UF_FOURTH);
+			instance.useUniform(9u, UF_SIXTH);
+
+			struct T {
+				float		a;
+				float		_padding1[3];
+				tcu::Vec4	b[2];
+			};
+
+			struct S {
+				float	a;
+				float	_padding1[3];
+				T		b[3];
+				int		c;
+				float	_padding2[3];
+			};
+
+			S s[2];
+			s[0].a = constCoords.x();
+			s[0].b[0].a = 0.5f;
+			s[0].b[0].b[0] = constCoords.swizzle(1,0,0,0);
+			s[0].b[0].b[1] = constCoords.swizzle(2,0,0,0);
+			s[0].b[1].a = 1.0f / 3.0f;
+			s[0].b[1].b[0] = constCoords.swizzle(1,1,0,0);
+			s[0].b[1].b[1] = constCoords.swizzle(3,1,0,0);
+			s[0].b[2].a = 1.0f / 4.0f;
+			s[0].b[2].b[0] = constCoords.swizzle(2,1,0,0);
+			s[0].b[2].b[1] = constCoords.swizzle(2,1,0,0);
+			s[0].c = 0;
+
+			s[1].a = constCoords.w();
+			s[1].b[0].a = 2.0f;
+			s[1].b[0].b[0] = constCoords.swizzle(2,0,0,0);
+			s[1].b[0].b[1] = constCoords.swizzle(2,1,0,0);
+			s[1].b[1].a = 3.0f;
+			s[1].b[1].b[0] = constCoords.swizzle(2,2,0,0);
+			s[1].b[1].b[1] = constCoords.swizzle(3,3,0,0);
+			s[1].b[2].a = 4.0f;
+			s[1].b[2].b[0] = constCoords.swizzle(1,0,0,0);
+			s[1].b[2].b[1] = constCoords.swizzle(3,2,0,0);
+			s[1].c = 1;
+
+			instance.addUniform(10u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2 * sizeof(S), s);
+
+		},
+		{
+			c.color.xyz() = (c.constCoords.swizzle(0, 1, 2) + c.constCoords.swizzle(1, 2, 3)) * 0.5f;
+		});
+
+	UNIFORM_STRUCT_CASE(dynamic_loop_struct_array, "Struct array usage in dynamic loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { int ui_three; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump int		b;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { S s[3]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float rgb[3];"
+		<< "	int alpha = 0;"
+		<< "	for (int i = 0; i < ui_three; i++)"
+		<< "	{"
+		<< "		rgb[i] = s[2-i].a;"
+		<< "		alpha += s[i].b;"
+		<< "	}"
+		<< "	${DST} = vec4(rgb[0], rgb[1], rgb[2], alpha);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UI_THREE);
+
+			struct S {
+				float	a;
+				int		b;
+				float	_padding1[2];
+			};
+
+			S s[3];
+			s[0].a = constCoords.x();
+			s[0].b = 0;
+			s[1].a = constCoords.y();
+			s[1].b = -1;
+			s[2].a = constCoords.z();
+			s[2].b = 2;
+			instance.addUniform(4u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 3u * sizeof(S), s);
+
+		},
+		{
+			c.color.xyz() = c.constCoords.swizzle(2, 1, 0);
+		});
+
+	UNIFORM_STRUCT_CASE(dynamic_loop_nested_struct_array, "Nested struct array usage in dynamic loop",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { int ui_zero; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_one; };"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { int ui_two; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { int ui_three; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { mediump float uf_two; };"
+		<< "layout (std140, set = 0, binding = 5) uniform buffer5 { mediump float uf_three; };"
+		<< "layout (std140, set = 0, binding = 6) uniform buffer6 { mediump float uf_four; };"
+		<< "layout (std140, set = 0, binding = 7) uniform buffer7 { mediump float uf_half; };"
+		<< "layout (std140, set = 0, binding = 8) uniform buffer8 { mediump float uf_third; };"
+		<< "layout (std140, set = 0, binding = 9) uniform buffer9 { mediump float uf_fourth; };"
+		<< "layout (std140, set = 0, binding = 10) uniform buffer10 { mediump float uf_sixth; };"
+		<< ""
+		<< "struct T {"
+		<< "	mediump float	a;"
+		<< "	mediump vec2	b[2];"
+		<< "};"
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	T				b[3];"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 11) uniform buffer11 { S s[2]; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	mediump float r = 0.0; // (x*3 + y*3) / 6.0"
+		<< "	mediump float g = 0.0; // (y*3 + z*3) / 6.0"
+		<< "	mediump float b = 0.0; // (z*3 + w*3) / 6.0"
+		<< "	mediump float a = 1.0;"
+		<< "	for (int i = 0; i < ui_two; i++)"
+		<< "	{"
+		<< "		for (int j = 0; j < ui_three; j++)"
+		<< "		{"
+		<< "			r += s[0].b[j].b[i].y;"
+		<< "			g += s[i].b[j].b[0].x;"
+		<< "			b += s[i].b[j].b[1].x;"
+		<< "			a *= s[i].b[j].a;"
+		<< "		}"
+		<< "	}"
+		<< "	${DST} = vec4(r*uf_sixth, g*uf_sixth, b*uf_sixth, a);"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			instance.useUniform(0u, UI_ZERO);
+			instance.useUniform(1u, UI_ONE);
+			instance.useUniform(2u, UI_TWO);
+			instance.useUniform(3u, UI_THREE);
+			instance.useUniform(4u, UF_TWO);
+			instance.useUniform(5u, UF_THREE);
+			instance.useUniform(6u, UF_FOUR);
+			instance.useUniform(7u, UF_HALF);
+			instance.useUniform(8u, UF_THIRD);
+			instance.useUniform(9u, UF_FOURTH);
+			instance.useUniform(10u, UF_SIXTH);
+
+			struct T {
+				float		a;
+				float		_padding1[3];
+				tcu::Vec4	b[2];
+			};
+
+			struct S {
+				float	a;
+				float	_padding1[3];
+				T		b[3];
+				int		c;
+				float	_padding2[3];
+			};
+
+			S s[2];
+			s[0].a = constCoords.x();
+			s[0].b[0].a = 0.5f;
+			s[0].b[0].b[0] = constCoords.swizzle(1,0,0,0);
+			s[0].b[0].b[1] = constCoords.swizzle(2,0,0,0);
+			s[0].b[1].a = 1.0f / 3.0f;
+			s[0].b[1].b[0] = constCoords.swizzle(1,1,0,0);
+			s[0].b[1].b[1] = constCoords.swizzle(3,1,0,0);
+			s[0].b[2].a = 1.0f / 4.0f;
+			s[0].b[2].b[0] = constCoords.swizzle(2,1,0,0);
+			s[0].b[2].b[1] = constCoords.swizzle(2,1,0,0);
+			s[0].c = 0;
+
+			s[1].a = constCoords.w();
+			s[1].b[0].a = 2.0f;
+			s[1].b[0].b[0] = constCoords.swizzle(2,0,0,0);
+			s[1].b[0].b[1] = constCoords.swizzle(2,1,0,0);
+			s[1].b[1].a = 3.0f;
+			s[1].b[1].b[0] = constCoords.swizzle(2,2,0,0);
+			s[1].b[1].b[1] = constCoords.swizzle(3,3,0,0);
+			s[1].b[2].a = 4.0f;
+			s[1].b[2].b[0] = constCoords.swizzle(1,0,0,0);
+			s[1].b[2].b[1] = constCoords.swizzle(3,2,0,0);
+			s[1].c = 1;
+
+			instance.addUniform(11u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2 * sizeof(S), s);
+
+		},
+		{
+			c.color.xyz() = (c.constCoords.swizzle(0, 1, 2) + c.constCoords.swizzle(1, 2, 3)) * 0.5f;
+		});
+
+	UNIFORM_STRUCT_CASE(equal, "Struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { mediump float uf_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { S a; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S b; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { S c; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S d = S(uf_one, vec3(0.0, floor(${COORDS}.y+1.0), 2.0), ui_two);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a == b) ${DST}.x = 1.0;"
+		<< "	if (a == c) ${DST}.y = 1.0;"
+		<< "	if (a == d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			DE_UNREF(constCoords);
+			instance.useUniform(0u, UF_ONE);
+			instance.useUniform(1u, UI_TWO);
+
+			struct S {
+				float			a;
+				float			_padding1[3];
+				tcu::Vec3		b;
+				int				c;
+			};
+
+			S sa;
+			sa.a = 1.0f;
+			sa.b = tcu::Vec3(0.0f, 1.0f, 2.0f);
+			sa.c = 2;
+			instance.addUniform(2u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sa);
+
+			S sb;
+			sb.a = 1.0f;
+			sb.b = tcu::Vec3(0.0f, 1.0f, 2.0f);
+			sb.c = 2;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sb);
+
+			S sc;
+			sc.a = 1.0f;
+			sc.b = tcu::Vec3(0.0f, 1.1f, 2.0f);
+			sc.c = 2;
+			instance.addUniform(4u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sc);
+		},
+		{
+			c.color.xy() = tcu::Vec2(1.0f, 0.0f);
+			if (deFloatFloor(c.coords[1] + 1.0f) == deFloatFloor(1.1f))
+				c.color.z() = 1.0f;
+		});
+
+	UNIFORM_STRUCT_CASE(not_equal, "Struct equality",
+		LineStream()
+		<< "${HEADER}"
+		<< "layout (std140, set = 0, binding = 0) uniform buffer0 { mediump float uf_one; };"
+		<< "layout (std140, set = 0, binding = 1) uniform buffer1 { int ui_two; };"
+		<< ""
+		<< "struct S {"
+		<< "	mediump float	a;"
+		<< "	mediump vec3	b;"
+		<< "	int				c;"
+		<< "};"
+		<< "layout (std140, set = 0, binding = 2) uniform buffer2 { S a; };"
+		<< "layout (std140, set = 0, binding = 3) uniform buffer3 { S b; };"
+		<< "layout (std140, set = 0, binding = 4) uniform buffer4 { S c; };"
+		<< ""
+		<< "void main (void)"
+		<< "{"
+		<< "	S d = S(uf_one, vec3(0.0, floor(${COORDS}.y+1.0), 2.0), ui_two);"
+		<< "	${DST} = vec4(0.0, 0.0, 0.0, 1.0);"
+		<< "	if (a != b) ${DST}.x = 1.0;"
+		<< "	if (a != c) ${DST}.y = 1.0;"
+		<< "	if (a != d) ${DST}.z = 1.0;"
+		<< "	${ASSIGN_POS}"
+		<< "}",
+		{
+			DE_UNREF(constCoords);
+			instance.useUniform(0u, UF_ONE);
+			instance.useUniform(1u, UI_TWO);
+
+			struct S {
+				float			a;
+				float			_padding1[3];
+				tcu::Vec3		b;
+				int				c;
+			};
+
+			S sa;
+			sa.a = 1.0f;
+			sa.b = tcu::Vec3(0.0f, 1.0f, 2.0f);
+			sa.c = 2;
+			instance.addUniform(2u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sa);
+
+			S sb;
+			sb.a = 1.0f;
+			sb.b = tcu::Vec3(0.0f, 1.0f, 2.0f);
+			sb.c = 2;
+			instance.addUniform(3u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sb);
+
+			S sc;
+			sc.a = 1.0f;
+			sc.b = tcu::Vec3(0.0f, 1.1f, 2.0f);
+			sc.c = 2;
+			instance.addUniform(4u, vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, sizeof(S), &sc);
+		},
+		{
+			c.color.xy() = tcu::Vec2(0.0f, 1.0f);
+			if (deFloatFloor(c.coords[1] + 1.0f) != deFloatFloor(1.1f))
+				c.color.z() = 1.0f;
+		});
+}
+
+class ShaderStructTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderStructTests		(tcu::TestContext& context);
+	virtual					~ShaderStructTests		(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderStructTests		(const ShaderStructTests&);		// not allowed!
+	ShaderStructTests&		operator=				(const ShaderStructTests&);		// not allowed!
+};
+
+ShaderStructTests::ShaderStructTests (tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "struct", "Struct Tests")
+{
+}
+
+ShaderStructTests::~ShaderStructTests (void)
+{
+}
+
+void ShaderStructTests::init (void)
+{
+	addChild(new LocalStructTests(m_testCtx));
+	addChild(new UniformStructTests(m_testCtx));
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createStructTests (tcu::TestContext& testCtx)
+{
+	return new ShaderStructTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.hpp
new file mode 100644
index 0000000..a2d3f24
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderStructTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERSTRUCTTESTS_HPP
+#define _VKTSHADERRENDERSTRUCTTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader struct tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createStructTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERSTRUCTTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.cpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.cpp
new file mode 100644
index 0000000..31cc526
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.cpp
@@ -0,0 +1,505 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader switch statement tests.
+ *
+ * Variables:
+ *  + Selection expression type: static, uniform, dynamic
+ *  + Switch layout - fall-through or use of default label
+ *  + Switch nested in loop/conditional statement
+ *  + Loop/conditional statement nested in switch
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderRenderSwitchTests.hpp"
+#include "vktShaderRender.hpp"
+#include "tcuStringTemplate.hpp"
+#include "deMath.h"
+
+namespace vkt
+{
+namespace sr
+{
+namespace
+{
+
+static void setUniforms(ShaderRenderCaseInstance& instance, const tcu::Vec4&)
+{
+	instance.useUniform(0u, UI_TWO);
+}
+
+using std::string;
+
+class ShaderSwitchCase : public ShaderRenderCase
+{
+public:
+						ShaderSwitchCase			(tcu::TestContext&	testCtx,
+													 const string&		name,
+													 const string&		description,
+													 bool				isVertexCase,
+													 const string&		vtxSource,
+													 const string&		fragSource,
+													 ShaderEvalFunc		evalFunc,
+													 UniformSetupFunc	setupUniformsFunc);
+	virtual				~ShaderSwitchCase			(void);
+};
+
+ShaderSwitchCase::ShaderSwitchCase (tcu::TestContext&	testCtx,
+									const string&		name,
+									const string&		description,
+									bool				isVertexCase,
+									const string&		vtxSource,
+									const string&		fragSource,
+									ShaderEvalFunc		evalFunc,
+									UniformSetupFunc	setupUniformsFunc)
+	: ShaderRenderCase (testCtx, name, description, isVertexCase, evalFunc, new UniformSetup(setupUniformsFunc), DE_NULL)
+{
+	m_vertShaderSource	= vtxSource;
+	m_fragShaderSource	= fragSource;
+}
+
+ShaderSwitchCase::~ShaderSwitchCase (void)
+{
+}
+
+enum SwitchType
+{
+	SWITCHTYPE_STATIC = 0,
+	SWITCHTYPE_UNIFORM,
+	SWITCHTYPE_DYNAMIC,
+
+	SWITCHTYPE_LAST
+};
+
+static void evalSwitchStatic	(ShaderEvalContext& evalCtx)	{ evalCtx.color.xyz() = evalCtx.coords.swizzle(1,2,3); }
+static void evalSwitchUniform	(ShaderEvalContext& evalCtx)	{ evalCtx.color.xyz() = evalCtx.coords.swizzle(1,2,3); }
+static void evalSwitchDynamic	(ShaderEvalContext& evalCtx)
+{
+	switch (int(deFloatFloor(evalCtx.coords.z()*1.5f + 2.0f)))
+	{
+		case 0:		evalCtx.color.xyz() = evalCtx.coords.swizzle(0,1,2);	break;
+		case 1:		evalCtx.color.xyz() = evalCtx.coords.swizzle(3,2,1);	break;
+		case 2:		evalCtx.color.xyz() = evalCtx.coords.swizzle(1,2,3);	break;
+		case 3:		evalCtx.color.xyz() = evalCtx.coords.swizzle(2,1,0);	break;
+		default:	evalCtx.color.xyz() = evalCtx.coords.swizzle(0,0,0);	break;
+	}
+}
+
+static de::MovePtr<ShaderSwitchCase> makeSwitchCase (tcu::TestContext& testCtx, const string& name, const string& desc, SwitchType type, bool isVertex, const LineStream& switchBody)
+{
+	std::ostringstream	vtx;
+	std::ostringstream	frag;
+	std::ostringstream&	op		= isVertex ? vtx : frag;
+
+	vtx << "#version 310 es\n"
+		<< "layout(location = 0) in highp vec4 a_position;\n"
+		<< "layout(location = 1) in highp vec4 a_coords;\n\n";
+	frag	<< "#version 310 es\n"
+			<< "layout(location = 0) out mediump vec4 o_color;\n";
+
+	if (isVertex)
+	{
+		vtx << "layout(location = 0) out mediump vec4 v_color;\n";
+		frag << "layout(location = 0) in mediump vec4 v_color;\n";
+	}
+	else
+	{
+		vtx << "layout(location = 0) out highp vec4 v_coords;\n";
+		frag << "layout(location = 0) in highp vec4 v_coords;\n";
+	}
+
+	if (type == SWITCHTYPE_UNIFORM)
+		op << "layout (std140, set=0, binding=0) uniform buffer0 { highp int ui_two; };\n";
+
+	vtx << "\n"
+		<< "void main (void)\n"
+		<< "{\n"
+		<< "	gl_Position = a_position;\n";
+	frag << "\n"
+		 << "void main (void)\n"
+		 << "{\n";
+
+	// Setup.
+	op << "	highp vec4 coords = " << (isVertex ? "a_coords" : "v_coords") << ";\n";
+	op << "	mediump vec3 res = vec3(0.0);\n\n";
+
+	// Switch body.
+	std::map<string, string> params;
+	params["CONDITION"] = type == SWITCHTYPE_STATIC		? "2"								:
+						  type == SWITCHTYPE_UNIFORM	? "ui_two"							:
+						  type == SWITCHTYPE_DYNAMIC	? "int(floor(coords.z*1.5 + 2.0))"	: "???";
+
+	op << tcu::StringTemplate(switchBody.str()).specialize(params);
+	op << "\n";
+
+	if (isVertex)
+	{
+		vtx << "	v_color = vec4(res, 1.0);\n";
+		frag << "	o_color = v_color;\n";
+	}
+	else
+	{
+		vtx << "	v_coords = a_coords;\n";
+		frag << "	o_color = vec4(res, 1.0);\n";
+	}
+
+	vtx << "}\n";
+	frag << "}\n";
+
+	return de::MovePtr<ShaderSwitchCase>(new ShaderSwitchCase(testCtx, name, desc, isVertex, vtx.str(), frag.str(),
+															type == SWITCHTYPE_STATIC	? evalSwitchStatic	:
+															type == SWITCHTYPE_UNIFORM	? evalSwitchUniform	:
+															type == SWITCHTYPE_DYNAMIC	? evalSwitchDynamic	: (ShaderEvalFunc)DE_NULL,
+															type == SWITCHTYPE_UNIFORM	? setUniforms : DE_NULL));
+}
+
+class ShaderSwitchTests : public tcu::TestCaseGroup
+{
+public:
+							ShaderSwitchTests		(tcu::TestContext& context);
+	virtual					~ShaderSwitchTests		(void);
+
+	virtual void			init					(void);
+
+private:
+							ShaderSwitchTests		(const ShaderSwitchTests&);		// not allowed!
+	ShaderSwitchTests&		operator=				(const ShaderSwitchTests&);		// not allowed!
+
+	void					makeSwitchCases			(const string& name, const string& desc, const LineStream& switchBody);
+};
+
+ShaderSwitchTests::ShaderSwitchTests (tcu::TestContext& testCtx)
+	: tcu::TestCaseGroup (testCtx, "switch", "Switch statement tests")
+{
+}
+
+ShaderSwitchTests::~ShaderSwitchTests (void)
+{
+}
+
+void ShaderSwitchTests::makeSwitchCases (const string& name, const string& desc, const LineStream& switchBody)
+{
+	static const char* switchTypeNames[] = { "static", "uniform", "dynamic" };
+	DE_STATIC_ASSERT(DE_LENGTH_OF_ARRAY(switchTypeNames) == SWITCHTYPE_LAST);
+
+	for (int type = 0; type < SWITCHTYPE_LAST; type++)
+	{
+		addChild(makeSwitchCase(m_testCtx, (name + "_" + switchTypeNames[type] + "_vertex"),	desc, (SwitchType)type, true,	switchBody).release());
+		addChild(makeSwitchCase(m_testCtx, (name + "_" + switchTypeNames[type] + "_fragment"),	desc, (SwitchType)type, false,	switchBody).release());
+	}
+}
+
+void ShaderSwitchTests::init (void)
+{
+	// Expected swizzles:
+	// 0: xyz
+	// 1: wzy
+	// 2: yzw
+	// 3: zyx
+
+	makeSwitchCases("basic", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 2:		res = coords.yzw;	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("const_expr_in_label", "Constant expression in label",
+		LineStream(1)
+		<< "const int t = 2;"
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case int(0.0):	res = coords.xyz;	break;"
+		<< "	case 2-1:		res = coords.wzy;	break;"
+		<< "	case 3&(1<<1):	res = coords.yzw;	break;"
+		<< "	case t+1:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("default_label", "Default label usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "	default:	res = coords.yzw;"
+		<< "}");
+
+	makeSwitchCases("default_not_last", "Default label usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	default:	res = coords.yzw;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("no_default_label", "No match in switch without default label",
+		LineStream(1)
+		<< "res = coords.yzw;\n"
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("fall_through", "Fall-through",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 2:		coords = coords.yzwx;"
+		<< "	case 4:		res = vec3(coords);	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("fall_through_default", "Fall-through",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "	case 2:		coords = coords.yzwx;"
+		<< "	default:	res = vec3(coords);"
+		<< "}");
+
+	makeSwitchCases("conditional_fall_through", "Fall-through",
+		LineStream(1)
+		<< "highp vec4 tmp = coords;"
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 2:"
+		<< "		tmp = coords.yzwx;"
+		<< "	case 3:"
+		<< "		res = vec3(tmp);"
+		<< "		if (${CONDITION} != 3)"
+		<< "			break;"
+		<< "	default:	res = tmp.zyx;		break;"
+		<< "}");
+
+	makeSwitchCases("conditional_fall_through_2", "Fall-through",
+		LineStream(1)
+		<< "highp vec4 tmp = coords;"
+		<< "mediump int c = ${CONDITION};"
+		<< "switch (c)"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 2:"
+		<< "		c += ${CONDITION};"
+		<< "		tmp = coords.yzwx;"
+		<< "	case 3:"
+		<< "		res = vec3(tmp);"
+		<< "		if (c == 4)"
+		<< "			break;"
+		<< "	default:	res = tmp.zyx;		break;"
+		<< "}");
+
+	makeSwitchCases("scope", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	case 2:"
+		<< "	{"
+		<< "		mediump vec3 t = coords.yzw;"
+		<< "		res = t;"
+		<< "		break;"
+		<< "	}"
+		<< "	case 3:		res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("switch_in_if", "Switch in for loop",
+		LineStream(1)
+		<< "if (${CONDITION} >= 0)"
+		<< "{"
+		<< "	switch (${CONDITION})"
+		<< "	{"
+		<< "		case 0:		res = coords.xyz;	break;"
+		<< "		case 1:		res = coords.wzy;	break;"
+		<< "		case 2:		res = coords.yzw;	break;"
+		<< "		case 3:		res = coords.zyx;	break;"
+		<< "	}"
+		<< "}");
+
+	makeSwitchCases("switch_in_for_loop", "Switch in for loop",
+		LineStream(1)
+		<< "for (int i = 0; i <= ${CONDITION}; i++)"
+		<< "{"
+		<< "	switch (i)"
+		<< "	{"
+		<< "		case 0:		res = coords.xyz;	break;"
+		<< "		case 1:		res = coords.wzy;	break;"
+		<< "		case 2:		res = coords.yzw;	break;"
+		<< "		case 3:		res = coords.zyx;	break;"
+		<< "	}"
+		<< "}");
+
+
+	makeSwitchCases("switch_in_while_loop", "Switch in while loop",
+		LineStream(1)
+		<< "int i = 0;"
+		<< "while (i <= ${CONDITION})"
+		<< "{"
+		<< "	switch (i)"
+		<< "	{"
+		<< "		case 0:		res = coords.xyz;	break;"
+		<< "		case 1:		res = coords.wzy;	break;"
+		<< "		case 2:		res = coords.yzw;	break;"
+		<< "		case 3:		res = coords.zyx;	break;"
+		<< "	}"
+		<< "	i += 1;"
+		<< "}");
+
+	makeSwitchCases("switch_in_do_while_loop", "Switch in do-while loop",
+		LineStream(1)
+		<< "int i = 0;"
+		<< "do"
+		<< "{"
+		<< "	switch (i)"
+		<< "	{"
+		<< "		case 0:		res = coords.xyz;	break;"
+		<< "		case 1:		res = coords.wzy;	break;"
+		<< "		case 2:		res = coords.yzw;	break;"
+		<< "		case 3:		res = coords.zyx;	break;"
+		<< "	}"
+		<< "	i += 1;"
+		<< "} while (i <= ${CONDITION});");
+
+	makeSwitchCases("if_in_switch", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:		res = coords.wzy;	break;"
+		<< "	default:"
+		<< "		if (${CONDITION} == 2)"
+		<< "			res = coords.yzw;"
+		<< "		else"
+		<< "			res = coords.zyx;"
+		<< "		break;"
+		<< "}");
+
+	makeSwitchCases("for_loop_in_switch", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:"
+		<< "	case 2:"
+		<< "	{"
+		<< "		highp vec3 t = coords.yzw;"
+		<< "		for (int i = 0; i < ${CONDITION}; i++)"
+		<< "			t = t.zyx;"
+		<< "		res = t;"
+		<< "		break;"
+		<< "	}"
+		<< "	default:	res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("while_loop_in_switch", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:"
+		<< "	case 2:"
+		<< "	{"
+		<< "		highp vec3 t = coords.yzw;"
+		<< "		int i = 0;"
+		<< "		while (i < ${CONDITION})"
+		<< "		{"
+		<< "			t = t.zyx;"
+		<< "			i += 1;"
+		<< "		}"
+		<< "		res = t;"
+		<< "		break;"
+		<< "	}"
+		<< "	default:	res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("do_while_loop_in_switch", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:"
+		<< "	case 2:"
+		<< "	{"
+		<< "		highp vec3 t = coords.yzw;"
+		<< "		int i = 0;"
+		<< "		do"
+		<< "		{"
+		<< "			t = t.zyx;"
+		<< "			i += 1;"
+		<< "		} while (i < ${CONDITION});"
+		<< "		res = t;"
+		<< "		break;"
+		<< "	}"
+		<< "	default:	res = coords.zyx;	break;"
+		<< "}");
+
+	makeSwitchCases("switch_in_switch", "Basic switch statement usage",
+		LineStream(1)
+		<< "switch (${CONDITION})"
+		<< "{"
+		<< "	case 0:		res = coords.xyz;	break;"
+		<< "	case 1:"
+		<< "	case 2:"
+		<< "		switch (${CONDITION} - 1)"
+		<< "		{"
+		<< "			case 0:		res = coords.wzy;	break;"
+		<< "			case 1:		res = coords.yzw;	break;"
+		<< "		}"
+		<< "		break;"
+		<< "	default:	res = coords.zyx;	break;"
+		<< "}");
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createSwitchTests (tcu::TestContext& testCtx)
+{
+	return new ShaderSwitchTests(testCtx);
+}
+
+} // sr
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.hpp b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.hpp
new file mode 100644
index 0000000..8845681
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/shaderrender/vktShaderRenderSwitchTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTSHADERRENDERSWITCHTESTS_HPP
+#define _VKTSHADERRENDERSWITCHTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Shader switch statement tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace sr
+{
+
+tcu::TestCaseGroup*	createSwitchTests	(tcu::TestContext& testCtx);
+
+} // sr
+} // vkt
+
+#endif // _VKTSHADERRENDERSWITCHTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/CMakeLists.txt b/external/vulkancts/modules/vulkan/spirv_assembly/CMakeLists.txt
new file mode 100644
index 0000000..22eff93
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/CMakeLists.txt
@@ -0,0 +1,23 @@
+# SPIR-V assembly tests
+
+include_directories(..)
+
+set(DEQP_VK_SPIRV_ASSEMBLY_SRCS
+	vktSpvAsmComputeShaderCase.cpp
+	vktSpvAsmComputeShaderCase.hpp
+	vktSpvAsmComputeShaderTestUtil.cpp
+	vktSpvAsmComputeShaderTestUtil.hpp
+	vktSpvAsmInstructionTests.cpp
+	vktSpvAsmInstructionTests.hpp
+	vktSpvAsmTests.cpp
+	vktSpvAsmTests.hpp
+	)
+
+set(DEQP_VK_SPIRV_ASSEMBLY_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+	)
+
+add_library(deqp-vk-spirv-assembly STATIC ${DEQP_VK_SPIRV_ASSEMBLY_SRCS})
+target_link_libraries(deqp-vk-spirv-assembly ${DEQP_VK_SPIRV_ASSEMBLY_LIBS})
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.cpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.cpp
new file mode 100644
index 0000000..19cecc4
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.cpp
@@ -0,0 +1,434 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Test Case Skeleton Based on Compute Shaders
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSpvAsmComputeShaderCase.hpp"
+
+#include "deSharedPtr.hpp"
+
+#include "vkBuilderUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+namespace
+{
+
+using namespace vk;
+using std::vector;
+
+typedef vkt::SpirVAssembly::AllocationMp			AllocationMp;
+typedef vkt::SpirVAssembly::AllocationSp			AllocationSp;
+
+typedef Unique<VkBuffer>							BufferHandleUp;
+typedef de::SharedPtr<BufferHandleUp>				BufferHandleSp;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create storage buffer, allocate and bind memory for the buffer
+ *
+ * The memory is created as host visible and passed back as a vk::Allocation
+ * instance via outMemory.
+ *//*--------------------------------------------------------------------*/
+Move<VkBuffer> createBufferAndBindMemory (const DeviceInterface& vkdi, const VkDevice& device, Allocator& allocator, size_t numBytes, AllocationMp* outMemory)
+{
+	const VkBufferCreateInfo bufferCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// sType
+		DE_NULL,								// pNext
+		0u,										// flags
+		numBytes,								// size
+		VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,		// usage
+		VK_SHARING_MODE_EXCLUSIVE,				// sharingMode
+		0u,										// queueFamilyCount
+		DE_NULL,								// pQueueFamilyIndices
+	};
+
+	Move<VkBuffer>				buffer			(createBuffer(vkdi, device, &bufferCreateInfo));
+	const VkMemoryRequirements	requirements	= getBufferMemoryRequirements(vkdi, device, *buffer);
+	AllocationMp				bufferMemory	= allocator.allocate(requirements, MemoryRequirement::HostVisible);
+
+	VK_CHECK(vkdi.bindBufferMemory(device, *buffer, bufferMemory->getMemory(), bufferMemory->getOffset()));
+	*outMemory = bufferMemory;
+
+	return buffer;
+}
+
+void setMemory (const DeviceInterface& vkdi, const VkDevice& device, Allocation* destAlloc, size_t numBytes, const void* data)
+{
+	void* const hostPtr = destAlloc->getHostPtr();
+
+	deMemcpy((deUint8*)hostPtr, data, numBytes);
+	flushMappedMemoryRange(vkdi, device, destAlloc->getMemory(), destAlloc->getOffset(), numBytes);
+}
+
+void fillMemoryWithValue (const DeviceInterface& vkdi, const VkDevice& device, Allocation* destAlloc, size_t numBytes, deUint8 value)
+{
+	void* const hostPtr = destAlloc->getHostPtr();
+
+	deMemset((deUint8*)hostPtr, value, numBytes);
+	flushMappedMemoryRange(vkdi, device, destAlloc->getMemory(), destAlloc->getOffset(), numBytes);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a descriptor set layout with numBindings descriptors
+ *
+ * All descriptors are created for shader storage buffer objects and
+ * compute pipeline.
+ *//*--------------------------------------------------------------------*/
+Move<VkDescriptorSetLayout> createDescriptorSetLayout (const DeviceInterface& vkdi, const VkDevice& device, size_t numBindings)
+{
+	DescriptorSetLayoutBuilder builder;
+
+	for (size_t bindingNdx = 0; bindingNdx < numBindings; ++bindingNdx)
+		builder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT);
+
+	return builder.build(vkdi, device);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a pipeline layout with one descriptor set
+ *//*--------------------------------------------------------------------*/
+Move<VkPipelineLayout> createPipelineLayout (const DeviceInterface& vkdi, const VkDevice& device, VkDescriptorSetLayout descriptorSetLayout)
+{
+	const VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,	// sType
+		DE_NULL,										// pNext
+		(VkPipelineLayoutCreateFlags)0,
+		1u,												// descriptorSetCount
+		&descriptorSetLayout,							// pSetLayouts
+		0u,												// pushConstantRangeCount
+		DE_NULL,										// pPushConstantRanges
+	};
+
+	return createPipelineLayout(vkdi, device, &pipelineLayoutCreateInfo);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a one-time descriptor pool for one descriptor set
+ *
+ * The pool supports numDescriptors storage buffer descriptors.
+ *//*--------------------------------------------------------------------*/
+inline Move<VkDescriptorPool> createDescriptorPool (const DeviceInterface& vkdi, const VkDevice& device, deUint32 numDescriptors)
+{
+	return DescriptorPoolBuilder()
+		.addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, numDescriptors)
+		.build(vkdi, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, /* maxSets = */ 1);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a descriptor set
+ *
+ * The descriptor set's layout should contain numViews descriptors.
+ * All the descriptors represent buffer views, and they are sequentially
+ * binded to binding point starting from 0.
+ *//*--------------------------------------------------------------------*/
+Move<VkDescriptorSet> createDescriptorSet (const DeviceInterface& vkdi, const VkDevice& device, VkDescriptorPool pool, VkDescriptorSetLayout layout, size_t numViews, const vector<VkDescriptorBufferInfo>& descriptorInfos)
+{
+	const VkDescriptorSetAllocateInfo	allocInfo	=
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		pool,
+		1u,
+		&layout
+	};
+
+	Move<VkDescriptorSet>				descriptorSet	= allocateDescriptorSet(vkdi, device, &allocInfo);
+	DescriptorSetUpdateBuilder			builder;
+
+	for (deUint32 descriptorNdx = 0; descriptorNdx < numViews; ++descriptorNdx)
+		builder.writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(descriptorNdx), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfos[descriptorNdx]);
+	builder.update(vkdi, device);
+
+	return descriptorSet;
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a compute pipeline based on the given shader
+ *//*--------------------------------------------------------------------*/
+Move<VkPipeline> createComputePipeline (const DeviceInterface& vkdi, const VkDevice& device, VkPipelineLayout pipelineLayout, VkShaderModule shader, const char* entryPoint, const vector<deUint32>& specConstants)
+{
+	const deUint32							numSpecConstants				= (deUint32)specConstants.size();
+	vector<VkSpecializationMapEntry>		entries;
+	VkSpecializationInfo					specInfo;
+
+	if (numSpecConstants != 0)
+	{
+		entries.resize(numSpecConstants);
+
+		for (deUint32 ndx = 0; ndx < numSpecConstants; ++ndx)
+		{
+			entries[ndx].constantID	= ndx;
+			entries[ndx].offset		= ndx * (deUint32)sizeof(deUint32);
+			entries[ndx].size		= sizeof(deUint32);
+		}
+
+		specInfo.mapEntryCount		= numSpecConstants;
+		specInfo.pMapEntries		= &entries[0];
+		specInfo.dataSize			= numSpecConstants * sizeof(deUint32);
+		specInfo.pData				= specConstants.data();
+	}
+
+	const VkPipelineShaderStageCreateInfo	pipelineShaderStageCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+		DE_NULL,												// pNext
+		(VkPipelineShaderStageCreateFlags)0,					// flags
+		VK_SHADER_STAGE_COMPUTE_BIT,							// stage
+		shader,													// module
+		entryPoint,												// pName
+		(numSpecConstants == 0) ? DE_NULL : &specInfo,			// pSpecializationInfo
+	};
+	const VkComputePipelineCreateInfo		pipelineCreateInfo				=
+	{
+		VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,			// sType
+		DE_NULL,												// pNext
+		(VkPipelineCreateFlags)0,
+		pipelineShaderStageCreateInfo,							// cs
+		pipelineLayout,											// layout
+		(VkPipeline)0,											// basePipelineHandle
+		0u,														// basePipelineIndex
+	};
+
+	return createComputePipeline(vkdi, device, (VkPipelineCache)0u, &pipelineCreateInfo);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Create a command pool
+ *
+ * The created command pool is designated for use on the queue type
+ * represented by the given queueFamilyIndex.
+ *//*--------------------------------------------------------------------*/
+Move<VkCommandPool> createCommandPool (const DeviceInterface& vkdi, VkDevice device, deUint32 queueFamilyIndex)
+{
+	const VkCommandPoolCreateInfo cmdPoolCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,	// sType
+		DE_NULL,									// pNext
+		0u,											// flags
+		queueFamilyIndex,							// queueFamilyIndex
+	};
+
+	return createCommandPool(vkdi, device, &cmdPoolCreateInfo);
+}
+
+} // anonymous
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Test instance for compute pipeline
+ *
+ * The compute shader is specified in the format of SPIR-V assembly, which
+ * is allowed to access MAX_NUM_INPUT_BUFFERS input storage buffers and
+ * MAX_NUM_OUTPUT_BUFFERS output storage buffers maximally. The shader
+ * source and input/output data are given in a ComputeShaderSpec object.
+ *
+ * This instance runs the given compute shader by feeding the data from input
+ * buffers and compares the data in the output buffers with the expected.
+ *//*--------------------------------------------------------------------*/
+class SpvAsmComputeShaderInstance : public TestInstance
+{
+public:
+								SpvAsmComputeShaderInstance	(Context& ctx, const ComputeShaderSpec& spec);
+	tcu::TestStatus				iterate						(void);
+
+private:
+	const ComputeShaderSpec&	m_shaderSpec;
+};
+
+// ComputeShaderTestCase implementations
+
+SpvAsmComputeShaderCase::SpvAsmComputeShaderCase (tcu::TestContext& testCtx, const char* name, const char* description, const ComputeShaderSpec& spec)
+	: TestCase		(testCtx, name, description)
+	, m_shaderSpec	(spec)
+{
+}
+
+void SpvAsmComputeShaderCase::initPrograms (SourceCollections& programCollection) const
+{
+	programCollection.spirvAsmSources.add("compute") << m_shaderSpec.assembly.c_str();
+}
+
+TestInstance* SpvAsmComputeShaderCase::createInstance (Context& ctx) const
+{
+	return new SpvAsmComputeShaderInstance(ctx, m_shaderSpec);
+}
+
+// ComputeShaderTestInstance implementations
+
+SpvAsmComputeShaderInstance::SpvAsmComputeShaderInstance (Context& ctx, const ComputeShaderSpec& spec)
+	: TestInstance	(ctx)
+	, m_shaderSpec	(spec)
+{
+}
+
+tcu::TestStatus SpvAsmComputeShaderInstance::iterate (void)
+{
+	const DeviceInterface&				vkdi				= m_context.getDeviceInterface();
+	const VkDevice&						device				= m_context.getDevice();
+	Allocator&							allocator			= m_context.getDefaultAllocator();
+
+	vector<AllocationSp>				inputAllocs;
+	vector<AllocationSp>				outputAllocs;
+	vector<BufferHandleSp>				inputBuffers;
+	vector<BufferHandleSp>				outputBuffers;
+	vector<VkDescriptorBufferInfo>		descriptorInfos;
+
+	DE_ASSERT(!m_shaderSpec.outputs.empty());
+	const size_t						numBuffers			= m_shaderSpec.inputs.size() + m_shaderSpec.outputs.size();
+
+	// Create buffer object, allocate storage, and create view for all input/output buffers.
+
+	for (size_t inputNdx = 0; inputNdx < m_shaderSpec.inputs.size(); ++inputNdx)
+	{
+		AllocationMp		alloc;
+		const BufferSp&		input		= m_shaderSpec.inputs[inputNdx];
+		const size_t		numBytes	= input->getNumBytes();
+		BufferHandleUp*		buffer		= new BufferHandleUp(createBufferAndBindMemory(vkdi, device, allocator, numBytes, &alloc));
+
+		setMemory(vkdi, device, &*alloc, numBytes, input->data());
+		descriptorInfos.push_back(vk::makeDescriptorBufferInfo(**buffer, 0u, numBytes));
+		inputBuffers.push_back(BufferHandleSp(buffer));
+		inputAllocs.push_back(de::SharedPtr<Allocation>(alloc.release()));
+	}
+
+	for (size_t outputNdx = 0; outputNdx < m_shaderSpec.outputs.size(); ++outputNdx)
+	{
+		AllocationMp		alloc;
+		const BufferSp&		output		= m_shaderSpec.outputs[outputNdx];
+		const size_t		numBytes	= output->getNumBytes();
+		BufferHandleUp*		buffer		= new BufferHandleUp(createBufferAndBindMemory(vkdi, device, allocator, numBytes, &alloc));
+
+		fillMemoryWithValue(vkdi, device, &*alloc, numBytes, 0xff);
+		descriptorInfos.push_back(vk::makeDescriptorBufferInfo(**buffer, 0u, numBytes));
+		outputBuffers.push_back(BufferHandleSp(buffer));
+		outputAllocs.push_back(de::SharedPtr<Allocation>(alloc.release()));
+	}
+
+	// Create layouts and descriptor set.
+
+	Unique<VkDescriptorSetLayout>		descriptorSetLayout	(createDescriptorSetLayout(vkdi, device, numBuffers));
+	Unique<VkPipelineLayout>			pipelineLayout		(createPipelineLayout(vkdi, device, *descriptorSetLayout));
+	Unique<VkDescriptorPool>			descriptorPool		(createDescriptorPool(vkdi, device, (deUint32)numBuffers));
+	Unique<VkDescriptorSet>				descriptorSet		(createDescriptorSet(vkdi, device, *descriptorPool, *descriptorSetLayout, numBuffers, descriptorInfos));
+
+	// Create compute shader and pipeline.
+
+	const ProgramBinary&				binary				= m_context.getBinaryCollection().get("compute");
+	Unique<VkShaderModule>				module				(createShaderModule(vkdi, device, binary, (VkShaderModuleCreateFlags)0u));
+
+	Unique<VkPipeline>					computePipeline		(createComputePipeline(vkdi, device, *pipelineLayout, *module, m_shaderSpec.entryPoint.c_str(), m_shaderSpec.specConstants));
+
+	// Create command buffer and record commands
+
+	const Unique<VkCommandPool>			cmdPool				(createCommandPool(vkdi, device, m_context.getUniversalQueueFamilyIndex()));
+	const VkCommandBufferAllocateInfo	cmdBufferCreateInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// sType
+		NULL,											// pNext
+		*cmdPool,										// cmdPool
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// level
+		1u												// count
+	};
+
+	Unique<VkCommandBuffer>				cmdBuffer			(allocateCommandBuffer(vkdi, device, &cmdBufferCreateInfo));
+
+	const VkCommandBufferBeginInfo		cmdBufferBeginInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// sType
+		DE_NULL,										// pNext
+		VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	const tcu::IVec3&				numWorkGroups		= m_shaderSpec.numWorkGroups;
+
+	VK_CHECK(vkdi.beginCommandBuffer(*cmdBuffer, &cmdBufferBeginInfo));
+	vkdi.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *computePipeline);
+	vkdi.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0, 1, &descriptorSet.get(), 0, DE_NULL);
+	vkdi.cmdDispatch(*cmdBuffer, numWorkGroups.x(), numWorkGroups.y(), numWorkGroups.z());
+	VK_CHECK(vkdi.endCommandBuffer(*cmdBuffer));
+
+	// Create fence and run.
+
+	const VkFenceCreateInfo			fenceCreateInfo		=
+	{
+		 VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,		// sType
+		 NULL,										// pNext
+		 0											// flags
+    };
+	const Unique<VkFence>			cmdCompleteFence	(createFence(vkdi, device, &fenceCreateInfo));
+	const deUint64					infiniteTimeout		= ~(deUint64)0u;
+	const VkSubmitInfo				submitInfo			=
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+		0u,
+		(const VkSemaphore*)DE_NULL,
+		(const VkPipelineStageFlags*)DE_NULL,
+		1u,
+		&cmdBuffer.get(),
+		0u,
+		(const VkSemaphore*)DE_NULL,
+	};
+
+	VK_CHECK(vkdi.queueSubmit(m_context.getUniversalQueue(), 1, &submitInfo, *cmdCompleteFence));
+	VK_CHECK(vkdi.waitForFences(device, 1, &cmdCompleteFence.get(), 0u, infiniteTimeout)); // \note: timeout is failure
+
+	// Check output.
+	if (m_shaderSpec.verifyIO)
+	{
+		if (!(*m_shaderSpec.verifyIO)(m_shaderSpec.inputs, outputAllocs, m_shaderSpec.outputs))
+			return tcu::TestStatus::fail("Output doesn't match with expected");
+	}
+	else
+	{
+		for (size_t outputNdx = 0; outputNdx < m_shaderSpec.outputs.size(); ++outputNdx)
+		{
+			const BufferSp& expectedOutput = m_shaderSpec.outputs[outputNdx];
+			if (deMemCmp(expectedOutput->data(), outputAllocs[outputNdx]->getHostPtr(), expectedOutput->getNumBytes()))
+				return tcu::TestStatus::fail("Output doesn't match with expected");
+		}
+	}
+
+	return tcu::TestStatus::pass("Ouput match with expected");
+}
+
+} // SpirVAssembly
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.hpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.hpp
new file mode 100644
index 0000000..4051fad
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderCase.hpp
@@ -0,0 +1,61 @@
+#ifndef _VKTSPVASMCOMPUTESHADERCASE_HPP
+#define _VKTSPVASMCOMPUTESHADERCASE_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Test Case Skeleton Based on Compute Shaders
+ *//*--------------------------------------------------------------------*/
+
+#include "vkPrograms.hpp"
+#include "vktTestCase.hpp"
+
+#include "vktSpvAsmComputeShaderTestUtil.hpp"
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+class SpvAsmComputeShaderCase : public TestCase
+{
+public:
+						SpvAsmComputeShaderCase	(tcu::TestContext& testCtx, const char* name, const char* description, const ComputeShaderSpec& spec);
+	void				initPrograms			(vk::SourceCollections& programCollection) const;
+	TestInstance*		createInstance			(Context& ctx) const;
+
+private:
+	ComputeShaderSpec	m_shaderSpec;
+};
+
+} // SpirVAssembly
+} // vkt
+
+#endif // _VKTSPVASMCOMPUTESHADERCASE_HPP
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.cpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.cpp
new file mode 100644
index 0000000..7e9a1d3
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.cpp
@@ -0,0 +1,37 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Based Test Case Utility Structs/Functions
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSpvAsmComputeShaderTestUtil.hpp"
+
+DE_EMPTY_CPP_FILE
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.hpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.hpp
new file mode 100644
index 0000000..d4703b9
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmComputeShaderTestUtil.hpp
@@ -0,0 +1,122 @@
+#ifndef _VKTSPVASMCOMPUTESHADERTESTUTIL_HPP
+#define _VKTSPVASMCOMPUTESHADERTESTUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Compute Shader Based Test Case Utility Structs/Functions
+ *//*--------------------------------------------------------------------*/
+
+#include "deDefs.h"
+#include "deSharedPtr.hpp"
+#include "tcuVector.hpp"
+#include "vkMemUtil.hpp"
+
+#include <string>
+#include <vector>
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+typedef de::MovePtr<vk::Allocation>			AllocationMp;
+typedef de::SharedPtr<vk::Allocation>		AllocationSp;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Abstract class for an input/output storage buffer object
+ *//*--------------------------------------------------------------------*/
+class BufferInterface
+{
+public:
+	virtual				~BufferInterface	(void)				{}
+
+	virtual size_t		getNumBytes			(void) const = 0;
+	virtual const void*	data				(void) const = 0;
+};
+
+typedef de::SharedPtr<BufferInterface>		BufferSp;
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Concrete class for an input/output storage buffer object
+ *//*--------------------------------------------------------------------*/
+template<typename E>
+class Buffer : public BufferInterface
+{
+public:
+						Buffer				(const std::vector<E>& elements)
+							: m_elements(elements)
+						{}
+
+	size_t				getNumBytes			(void) const		{ return m_elements.size() * sizeof(E); }
+	const void*			data				(void) const		{ return &m_elements.front(); }
+
+private:
+	std::vector<E>		m_elements;
+};
+
+DE_STATIC_ASSERT(sizeof(tcu::Vec4) == 4 * sizeof(float));
+
+typedef Buffer<float>		Float32Buffer;
+typedef Buffer<deInt32>		Int32Buffer;
+typedef Buffer<tcu::Vec4>	Vec4Buffer;
+
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Specification for a compute shader.
+ *
+ * This struct bundles SPIR-V assembly code, input and expected output
+ * together.
+ *//*--------------------------------------------------------------------*/
+struct ComputeShaderSpec
+{
+	std::string				assembly;
+	std::string				entryPoint;
+	std::vector<BufferSp>	inputs;
+	std::vector<BufferSp>	outputs;
+	tcu::IVec3				numWorkGroups;
+	std::vector<deUint32>	specConstants;
+	// If null, a default verification will be performed by comparing the memory pointed to by outputAllocations
+	// and the contents of expectedOutputs. Otherwise the function pointed to by verifyIO will be called.
+	// If true is returned, then the test case is assumed to have passed, if false is returned, then the test
+	// case is assumed to have failed.
+	bool					(*verifyIO)(const std::vector<BufferSp>& inputs, const std::vector<AllocationSp>& outputAllocations, const std::vector<BufferSp>& expectedOutputs);
+
+							ComputeShaderSpec()
+								: entryPoint	("main")
+								, verifyIO		(DE_NULL)
+							{}
+
+};
+
+} // SpirVAssembly
+} // vkt
+
+#endif // _VKTSPVASMCOMPUTESHADERTESTUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.cpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.cpp
new file mode 100644
index 0000000..16b01fc
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.cpp
@@ -0,0 +1,7735 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V Assembly Tests for Instructions (special opcode/operand)
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSpvAsmInstructionTests.hpp"
+
+#include "tcuCommandLine.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuRGBA.hpp"
+#include "tcuStringTemplate.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkStrUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deUniquePtr.hpp"
+#include "tcuStringTemplate.hpp"
+
+#include <cmath>
+#include "vktSpvAsmComputeShaderCase.hpp"
+#include "vktSpvAsmComputeShaderTestUtil.hpp"
+#include "vktTestCaseUtil.hpp"
+
+#include <cmath>
+#include <limits>
+#include <map>
+#include <string>
+#include <sstream>
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+namespace
+{
+
+using namespace vk;
+using std::map;
+using std::string;
+using std::vector;
+using tcu::IVec3;
+using tcu::IVec4;
+using tcu::RGBA;
+using tcu::TestLog;
+using tcu::TestStatus;
+using tcu::Vec4;
+using de::UniquePtr;
+using tcu::StringTemplate;
+using tcu::Vec4;
+
+typedef Unique<VkShaderModule>			ModuleHandleUp;
+typedef de::SharedPtr<ModuleHandleUp>	ModuleHandleSp;
+
+template<typename T>	T			randomScalar	(de::Random& rnd, T minValue, T maxValue);
+template<> inline		float		randomScalar	(de::Random& rnd, float minValue, float maxValue)		{ return rnd.getFloat(minValue, maxValue);	}
+template<> inline		deInt32		randomScalar	(de::Random& rnd, deInt32 minValue, deInt32 maxValue)	{ return rnd.getInt(minValue, maxValue);	}
+
+template<typename T>
+static void fillRandomScalars (de::Random& rnd, T minValue, T maxValue, void* dst, int numValues, int offset = 0)
+{
+	T* const typedPtr = (T*)dst;
+	for (int ndx = 0; ndx < numValues; ndx++)
+		typedPtr[offset + ndx] = randomScalar<T>(rnd, minValue, maxValue);
+}
+
+struct CaseParameter
+{
+	const char*		name;
+	string			param;
+
+	CaseParameter	(const char* case_, const string& param_) : name(case_), param(param_) {}
+};
+
+// Assembly code used for testing OpNop, OpConstant{Null|Composite}, Op[No]Line, OpSource[Continued], OpSourceExtension, OpUndef is based on GLSL source code:
+//
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   output_data.elements[x] = -input_data.elements[x];
+// }
+
+static const char* const s_ShaderPreamble =
+	"OpCapability Shader\n"
+	"OpMemoryModel Logical GLSL450\n"
+	"OpEntryPoint GLCompute %main \"main\" %id\n"
+	"OpExecutionMode %main LocalSize 1 1 1\n";
+
+static const char* const s_CommonTypes =
+	"%bool      = OpTypeBool\n"
+	"%void      = OpTypeVoid\n"
+	"%voidf     = OpTypeFunction %void\n"
+	"%u32       = OpTypeInt 32 0\n"
+	"%i32       = OpTypeInt 32 1\n"
+	"%f32       = OpTypeFloat 32\n"
+	"%uvec3     = OpTypeVector %u32 3\n"
+	"%fvec3     = OpTypeVector %f32 3\n"
+	"%uvec3ptr  = OpTypePointer Input %uvec3\n"
+	"%f32ptr    = OpTypePointer Uniform %f32\n"
+	"%f32arr    = OpTypeRuntimeArray %f32\n";
+
+// Declares two uniform variables (indata, outdata) of type "struct { float[] }". Depends on type "f32arr" (for "float[]").
+static const char* const s_InputOutputBuffer =
+	"%inbuf     = OpTypeStruct %f32arr\n"
+	"%inbufptr  = OpTypePointer Uniform %inbuf\n"
+	"%indata    = OpVariable %inbufptr Uniform\n"
+	"%outbuf    = OpTypeStruct %f32arr\n"
+	"%outbufptr = OpTypePointer Uniform %outbuf\n"
+	"%outdata   = OpVariable %outbufptr Uniform\n";
+
+// Declares buffer type and layout for uniform variables indata and outdata. Both of them are SSBO bounded to descriptor set 0.
+// indata is at binding point 0, while outdata is at 1.
+static const char* const s_InputOutputBufferTraits =
+	"OpDecorate %inbuf BufferBlock\n"
+	"OpDecorate %indata DescriptorSet 0\n"
+	"OpDecorate %indata Binding 0\n"
+	"OpDecorate %outbuf BufferBlock\n"
+	"OpDecorate %outdata DescriptorSet 0\n"
+	"OpDecorate %outdata Binding 1\n"
+	"OpDecorate %f32arr ArrayStride 4\n"
+	"OpMemberDecorate %inbuf 0 Offset 0\n"
+	"OpMemberDecorate %outbuf 0 Offset 0\n";
+
+tcu::TestCaseGroup* createOpNopGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opnop", "Test the OpNop instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes)
+
+		+ string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+
+		"             OpNop\n" // Inside a function body
+
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "OpNop appearing at different places", spec));
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpLineGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opline", "Test the OpLine instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"%fname1 = OpString \"negateInputs.comp\"\n"
+		"%fname2 = OpString \"negateInputs\"\n"
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) +
+
+		"OpLine %fname1 0 0\n" // At the earliest possible position
+
+		+ string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"OpLine %fname1 0 1\n" // Multiple OpLines in sequence
+		"OpLine %fname2 1 0\n" // Different filenames
+		"OpLine %fname1 1000 100000\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"OpLine %fname1 1 1\n" // Before a function
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+
+		"OpLine %fname1 1 1\n" // In a function
+
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "OpLine appearing at different places", spec));
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpNoLineGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opnoline", "Test the OpNoLine instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"%fname = OpString \"negateInputs.comp\"\n"
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) +
+
+		"OpNoLine\n" // At the earliest possible position, without preceding OpLine
+
+		+ string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"OpLine %fname 0 1\n"
+		"OpNoLine\n" // Immediately following a preceding OpLine
+
+		"OpLine %fname 1000 1\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"OpNoLine\n" // Contents after the previous OpLine
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+
+		"OpNoLine\n" // Multiple OpNoLine
+		"OpNoLine\n"
+		"OpNoLine\n"
+
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "OpNoLine appearing at different places", spec));
+
+	return group.release();
+}
+
+// Compare instruction for the contraction compute case.
+// Returns true if the output is what is expected from the test case.
+bool compareNoContractCase(const std::vector<BufferSp>&, const vector<AllocationSp>& outputAllocs, const std::vector<BufferSp>& expectedOutputs)
+{
+	if (outputAllocs.size() != 1)
+		return false;
+
+	// We really just need this for size because we are not comparing the exact values.
+	const BufferSp&	expectedOutput	= expectedOutputs[0];
+	const float*	outputAsFloat	= static_cast<const float*>(outputAllocs[0]->getHostPtr());;
+
+	for(size_t i = 0; i < expectedOutput->getNumBytes() / sizeof(float); ++i) {
+		if (outputAsFloat[i] != 0.f &&
+			outputAsFloat[i] != -ldexp(1, -24)) {
+			return false;
+		}
+	}
+
+	return true;
+}
+
+tcu::TestCaseGroup* createNoContractionGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "nocontraction", "Test the NoContraction decoration"));
+	vector<CaseParameter>			cases;
+	const int						numElements		= 100;
+	vector<float>					inputFloats1	(numElements, 0);
+	vector<float>					inputFloats2	(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		"${DECORATION}\n"
+
+		"OpDecorate %inbuf1 BufferBlock\n"
+		"OpDecorate %indata1 DescriptorSet 0\n"
+		"OpDecorate %indata1 Binding 0\n"
+		"OpDecorate %inbuf2 BufferBlock\n"
+		"OpDecorate %indata2 DescriptorSet 0\n"
+		"OpDecorate %indata2 Binding 1\n"
+		"OpDecorate %outbuf BufferBlock\n"
+		"OpDecorate %outdata DescriptorSet 0\n"
+		"OpDecorate %outdata Binding 2\n"
+		"OpDecorate %f32arr ArrayStride 4\n"
+		"OpMemberDecorate %inbuf1 0 Offset 0\n"
+		"OpMemberDecorate %inbuf2 0 Offset 0\n"
+		"OpMemberDecorate %outbuf 0 Offset 0\n"
+
+		+ string(s_CommonTypes) +
+
+		"%inbuf1     = OpTypeStruct %f32arr\n"
+		"%inbufptr1  = OpTypePointer Uniform %inbuf1\n"
+		"%indata1    = OpVariable %inbufptr1 Uniform\n"
+		"%inbuf2     = OpTypeStruct %f32arr\n"
+		"%inbufptr2  = OpTypePointer Uniform %inbuf2\n"
+		"%indata2    = OpVariable %inbufptr2 Uniform\n"
+		"%outbuf     = OpTypeStruct %f32arr\n"
+		"%outbufptr  = OpTypePointer Uniform %outbuf\n"
+		"%outdata    = OpVariable %outbufptr Uniform\n"
+
+		"%id         = OpVariable %uvec3ptr Input\n"
+		"%zero       = OpConstant %i32 0\n"
+		"%c_f_m1     = OpConstant %f32 -1.\n"
+
+		"%main       = OpFunction %void None %voidf\n"
+		"%label      = OpLabel\n"
+		"%idval      = OpLoad %uvec3 %id\n"
+		"%x          = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc1     = OpAccessChain %f32ptr %indata1 %zero %x\n"
+		"%inval1     = OpLoad %f32 %inloc1\n"
+		"%inloc2     = OpAccessChain %f32ptr %indata2 %zero %x\n"
+		"%inval2     = OpLoad %f32 %inloc2\n"
+		"%mul        = OpFMul %f32 %inval1 %inval2\n"
+		"%add        = OpFAdd %f32 %mul %c_f_m1\n"
+		"%outloc     = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"              OpStore %outloc %add\n"
+		"              OpReturn\n"
+		"              OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("multiplication",	"OpDecorate %mul NoContraction"));
+	cases.push_back(CaseParameter("addition",		"OpDecorate %add NoContraction"));
+	cases.push_back(CaseParameter("both",			"OpDecorate %mul NoContraction\nOpDecorate %add NoContraction"));
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		inputFloats1[ndx]	= 1.f + std::ldexp(1.f, -23); // 1 + 2^-23.
+		inputFloats2[ndx]	= 1.f - std::ldexp(1.f, -23); // 1 - 2^-23.
+		// Result for (1 + 2^-23) * (1 - 2^-23) - 1. With NoContraction, the multiplication will be
+		// conducted separately and the result is rounded to 1, or 0x1.fffffcp-1 
+		// So the final result will be 0.f or 0x1p-24.
+		// If the operation is combined into a precise fused multiply-add, then the result would be
+		// 2^-46 (0xa8800000).
+		outputFloats[ndx]	= 0.f;
+	}
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["DECORATION"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats1)));
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats2)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+		// Check against the two possible answers based on rounding mode.
+		spec.verifyIO = &compareNoContractCase;
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+	return group.release();
+}
+
+bool compareFRem(const std::vector<BufferSp>&, const vector<AllocationSp>& outputAllocs, const std::vector<BufferSp>& expectedOutputs)
+{
+	if (outputAllocs.size() != 1)
+		return false;
+
+	const BufferSp& expectedOutput = expectedOutputs[0];
+	const float *expectedOutputAsFloat = static_cast<const float*>(expectedOutput->data());
+	const float* outputAsFloat = static_cast<const float*>(outputAllocs[0]->getHostPtr());;
+
+	for (size_t idx = 0; idx < expectedOutput->getNumBytes() / sizeof(float); ++idx)
+	{
+		const float f0 = expectedOutputAsFloat[idx];
+		const float f1 = outputAsFloat[idx];
+		// \todo relative error needs to be fairly high because FRem may be implemented as
+		// (roughly) frac(a/b)*b, so LSB errors can be magnified. But this should be fine for now.
+		if (deFloatAbs((f1 - f0) / f0) > 0.02)
+			return false;
+	}
+
+	return true;
+}
+
+tcu::TestCaseGroup* createOpFRemGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opfrem", "Test the OpFRem instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 200;
+	vector<float>					inputFloats1	(numElements, 0);
+	vector<float>					inputFloats2	(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, -10000.f, 10000.f, &inputFloats1[0], numElements);
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats2[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		// Guard against divisors near zero.
+		if (std::fabs(inputFloats2[ndx]) < 1e-3)
+			inputFloats2[ndx] = 8.f;
+
+		// The return value of std::fmod() has the same sign as its first operand, which is how OpFRem spec'd.
+		outputFloats[ndx] = std::fmod(inputFloats1[ndx], inputFloats2[ndx]);
+	}
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		"OpDecorate %inbuf1 BufferBlock\n"
+		"OpDecorate %indata1 DescriptorSet 0\n"
+		"OpDecorate %indata1 Binding 0\n"
+		"OpDecorate %inbuf2 BufferBlock\n"
+		"OpDecorate %indata2 DescriptorSet 0\n"
+		"OpDecorate %indata2 Binding 1\n"
+		"OpDecorate %outbuf BufferBlock\n"
+		"OpDecorate %outdata DescriptorSet 0\n"
+		"OpDecorate %outdata Binding 2\n"
+		"OpDecorate %f32arr ArrayStride 4\n"
+		"OpMemberDecorate %inbuf1 0 Offset 0\n"
+		"OpMemberDecorate %inbuf2 0 Offset 0\n"
+		"OpMemberDecorate %outbuf 0 Offset 0\n"
+
+		+ string(s_CommonTypes) +
+
+		"%inbuf1     = OpTypeStruct %f32arr\n"
+		"%inbufptr1  = OpTypePointer Uniform %inbuf1\n"
+		"%indata1    = OpVariable %inbufptr1 Uniform\n"
+		"%inbuf2     = OpTypeStruct %f32arr\n"
+		"%inbufptr2  = OpTypePointer Uniform %inbuf2\n"
+		"%indata2    = OpVariable %inbufptr2 Uniform\n"
+		"%outbuf     = OpTypeStruct %f32arr\n"
+		"%outbufptr  = OpTypePointer Uniform %outbuf\n"
+		"%outdata    = OpVariable %outbufptr Uniform\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc1    = OpAccessChain %f32ptr %indata1 %zero %x\n"
+		"%inval1    = OpLoad %f32 %inloc1\n"
+		"%inloc2    = OpAccessChain %f32ptr %indata2 %zero %x\n"
+		"%inval2    = OpLoad %f32 %inloc2\n"
+		"%rem       = OpFRem %f32 %inval1 %inval2\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %rem\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats1)));
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats2)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+	spec.verifyIO = &compareFRem;
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "", spec));
+
+	return group.release();
+}
+
+// Copy contents in the input buffer to the output buffer.
+tcu::TestCaseGroup* createOpCopyMemoryGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opcopymemory", "Test the OpCopyMemory instruction"));
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+
+	// The following case adds vec4(0., 0.5, 1.5, 2.5) to each of the elements in the input buffer and writes output to the output buffer.
+	ComputeShaderSpec				spec1;
+	vector<Vec4>					inputFloats1	(numElements);
+	vector<Vec4>					outputFloats1	(numElements);
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats1[0], numElements * 4);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats1[ndx] = inputFloats1[ndx] + Vec4(0.f, 0.5f, 1.5f, 2.5f);
+
+	spec1.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		"OpDecorate %vec4arr ArrayStride 16\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%vec4       = OpTypeVector %f32 4\n"
+		"%vec4ptr_u  = OpTypePointer Uniform %vec4\n"
+		"%vec4ptr_f  = OpTypePointer Function %vec4\n"
+		"%vec4arr    = OpTypeRuntimeArray %vec4\n"
+		"%inbuf      = OpTypeStruct %vec4arr\n"
+		"%inbufptr   = OpTypePointer Uniform %inbuf\n"
+		"%indata     = OpVariable %inbufptr Uniform\n"
+		"%outbuf     = OpTypeStruct %vec4arr\n"
+		"%outbufptr  = OpTypePointer Uniform %outbuf\n"
+		"%outdata    = OpVariable %outbufptr Uniform\n"
+
+		"%id         = OpVariable %uvec3ptr Input\n"
+		"%zero       = OpConstant %i32 0\n"
+		"%c_f_0      = OpConstant %f32 0.\n"
+		"%c_f_0_5    = OpConstant %f32 0.5\n"
+		"%c_f_1_5    = OpConstant %f32 1.5\n"
+		"%c_f_2_5    = OpConstant %f32 2.5\n"
+		"%c_vec4     = OpConstantComposite %vec4 %c_f_0 %c_f_0_5 %c_f_1_5 %c_f_2_5\n"
+
+		"%main       = OpFunction %void None %voidf\n"
+		"%label      = OpLabel\n"
+		"%v_vec4     = OpVariable %vec4ptr_f Function\n"
+		"%idval      = OpLoad %uvec3 %id\n"
+		"%x          = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc      = OpAccessChain %vec4ptr_u %indata %zero %x\n"
+		"%outloc     = OpAccessChain %vec4ptr_u %outdata %zero %x\n"
+		"              OpCopyMemory %v_vec4 %inloc\n"
+		"%v_vec4_val = OpLoad %vec4 %v_vec4\n"
+		"%add        = OpFAdd %vec4 %v_vec4_val %c_vec4\n"
+		"              OpStore %outloc %add\n"
+		"              OpReturn\n"
+		"              OpFunctionEnd\n";
+
+	spec1.inputs.push_back(BufferSp(new Vec4Buffer(inputFloats1)));
+	spec1.outputs.push_back(BufferSp(new Vec4Buffer(outputFloats1)));
+	spec1.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "vector", "OpCopyMemory elements of vector type", spec1));
+
+	// The following case copies a float[100] variable from the input buffer to the output buffer.
+	ComputeShaderSpec				spec2;
+	vector<float>					inputFloats2	(numElements);
+	vector<float>					outputFloats2	(numElements);
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats2[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats2[ndx] = inputFloats2[ndx];
+
+	spec2.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		"OpDecorate %f32arr100 ArrayStride 4\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%hundred        = OpConstant %u32 100\n"
+		"%f32arr100      = OpTypeArray %f32 %hundred\n"
+		"%f32arr100ptr_f = OpTypePointer Function %f32arr100\n"
+		"%f32arr100ptr_u = OpTypePointer Uniform %f32arr100\n"
+		"%inbuf          = OpTypeStruct %f32arr100\n"
+		"%inbufptr       = OpTypePointer Uniform %inbuf\n"
+		"%indata         = OpVariable %inbufptr Uniform\n"
+		"%outbuf         = OpTypeStruct %f32arr100\n"
+		"%outbufptr      = OpTypePointer Uniform %outbuf\n"
+		"%outdata        = OpVariable %outbufptr Uniform\n"
+
+		"%id             = OpVariable %uvec3ptr Input\n"
+		"%zero           = OpConstant %i32 0\n"
+
+		"%main           = OpFunction %void None %voidf\n"
+		"%label          = OpLabel\n"
+		"%var            = OpVariable %f32arr100ptr_f Function\n"
+		"%inarr          = OpAccessChain %f32arr100ptr_u %indata %zero\n"
+		"%outarr         = OpAccessChain %f32arr100ptr_u %outdata %zero\n"
+		"                  OpCopyMemory %var %inarr\n"
+		"                  OpCopyMemory %outarr %var\n"
+		"                  OpReturn\n"
+		"                  OpFunctionEnd\n";
+
+	spec2.inputs.push_back(BufferSp(new Float32Buffer(inputFloats2)));
+	spec2.outputs.push_back(BufferSp(new Float32Buffer(outputFloats2)));
+	spec2.numWorkGroups = IVec3(1, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "array", "OpCopyMemory elements of array type", spec2));
+
+	// The following case copies a struct{vec4, vec4, vec4, vec4} variable from the input buffer to the output buffer.
+	ComputeShaderSpec				spec3;
+	vector<float>					inputFloats3	(16);
+	vector<float>					outputFloats3	(16);
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats3[0], 16);
+
+	for (size_t ndx = 0; ndx < 16; ++ndx)
+		outputFloats3[ndx] = inputFloats3[ndx];
+
+	spec3.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		"OpMemberDecorate %inbuf 0 Offset 0\n"
+		"OpMemberDecorate %inbuf 1 Offset 16\n"
+		"OpMemberDecorate %inbuf 2 Offset 32\n"
+		"OpMemberDecorate %inbuf 3 Offset 48\n"
+		"OpMemberDecorate %outbuf 0 Offset 0\n"
+		"OpMemberDecorate %outbuf 1 Offset 16\n"
+		"OpMemberDecorate %outbuf 2 Offset 32\n"
+		"OpMemberDecorate %outbuf 3 Offset 48\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%vec4      = OpTypeVector %f32 4\n"
+		"%inbuf     = OpTypeStruct %vec4 %vec4 %vec4 %vec4\n"
+		"%inbufptr  = OpTypePointer Uniform %inbuf\n"
+		"%indata    = OpVariable %inbufptr Uniform\n"
+		"%outbuf    = OpTypeStruct %vec4 %vec4 %vec4 %vec4\n"
+		"%outbufptr = OpTypePointer Uniform %outbuf\n"
+		"%outdata   = OpVariable %outbufptr Uniform\n"
+		"%vec4stptr = OpTypePointer Function %inbuf\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%var       = OpVariable %vec4stptr Function\n"
+		"             OpCopyMemory %var %indata\n"
+		"             OpCopyMemory %outdata %var\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+
+	spec3.inputs.push_back(BufferSp(new Float32Buffer(inputFloats3)));
+	spec3.outputs.push_back(BufferSp(new Float32Buffer(outputFloats3)));
+	spec3.numWorkGroups = IVec3(1, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "struct", "OpCopyMemory elements of struct type", spec3));
+
+	// The following case negates multiple float variables from the input buffer and stores the results to the output buffer.
+	ComputeShaderSpec				spec4;
+	vector<float>					inputFloats4	(numElements);
+	vector<float>					outputFloats4	(numElements);
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats4[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats4[ndx] = -inputFloats4[ndx];
+
+	spec4.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%f32ptr_f  = OpTypePointer Function %f32\n"
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%var       = OpVariable %f32ptr_f Function\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpCopyMemory %var %inloc\n"
+		"%val       = OpLoad %f32 %var\n"
+		"%neg       = OpFNegate %f32 %val\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+
+	spec4.inputs.push_back(BufferSp(new Float32Buffer(inputFloats4)));
+	spec4.outputs.push_back(BufferSp(new Float32Buffer(outputFloats4)));
+	spec4.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "float", "OpCopyMemory elements of float type", spec4));
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpCopyObjectGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opcopyobject", "Test the OpCopyObject instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats[ndx] + 7.5f;
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%fmat     = OpTypeMatrix %fvec3 3\n"
+		"%three    = OpConstant %u32 3\n"
+		"%farr     = OpTypeArray %f32 %three\n"
+		"%fst      = OpTypeStruct %f32 %f32\n"
+
+		+ string(s_InputOutputBuffer) +
+
+		"%id            = OpVariable %uvec3ptr Input\n"
+		"%zero          = OpConstant %i32 0\n"
+		"%c_f           = OpConstant %f32 1.5\n"
+		"%c_fvec3       = OpConstantComposite %fvec3 %c_f %c_f %c_f\n"
+		"%c_fmat        = OpConstantComposite %fmat %c_fvec3 %c_fvec3 %c_fvec3\n"
+		"%c_farr        = OpConstantComposite %farr %c_f %c_f %c_f\n"
+		"%c_fst         = OpConstantComposite %fst %c_f %c_f\n"
+
+		"%main          = OpFunction %void None %voidf\n"
+		"%label         = OpLabel\n"
+		"%c_f_copy      = OpCopyObject %f32   %c_f\n"
+		"%c_fvec3_copy  = OpCopyObject %fvec3 %c_fvec3\n"
+		"%c_fmat_copy   = OpCopyObject %fmat  %c_fmat\n"
+		"%c_farr_copy   = OpCopyObject %farr  %c_farr\n"
+		"%c_fst_copy    = OpCopyObject %fst   %c_fst\n"
+		"%fvec3_elem    = OpCompositeExtract %f32 %c_fvec3_copy 0\n"
+		"%fmat_elem     = OpCompositeExtract %f32 %c_fmat_copy 1 2\n"
+		"%farr_elem     = OpCompositeExtract %f32 %c_farr_copy 2\n"
+		"%fst_elem      = OpCompositeExtract %f32 %c_fst_copy 1\n"
+		// Add up. 1.5 * 5 = 7.5.
+		"%add1          = OpFAdd %f32 %c_f_copy %fvec3_elem\n"
+		"%add2          = OpFAdd %f32 %add1     %fmat_elem\n"
+		"%add3          = OpFAdd %f32 %add2     %farr_elem\n"
+		"%add4          = OpFAdd %f32 %add3     %fst_elem\n"
+
+		"%idval         = OpLoad %uvec3 %id\n"
+		"%x             = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc         = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%outloc        = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%inval         = OpLoad %f32 %inloc\n"
+		"%add           = OpFAdd %f32 %add4 %inval\n"
+		"                 OpStore %outloc %add\n"
+		"                 OpReturn\n"
+		"                 OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "spotcheck", "OpCopyObject on different types", spec));
+
+	return group.release();
+}
+// Assembly code used for testing OpUnreachable is based on GLSL source code:
+//
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// void not_called_func() {
+//   // place OpUnreachable here
+// }
+//
+// uint modulo4(uint val) {
+//   switch (val % uint(4)) {
+//     case 0:  return 3;
+//     case 1:  return 2;
+//     case 2:  return 1;
+//     case 3:  return 0;
+//     default: return 100; // place OpUnreachable here
+//   }
+// }
+//
+// uint const5() {
+//   return 5;
+//   // place OpUnreachable here
+// }
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   if (const5() > modulo4(1000)) {
+//     output_data.elements[x] = -input_data.elements[x];
+//   } else {
+//     // place OpUnreachable here
+//     output_data.elements[x] = input_data.elements[x];
+//   }
+// }
+
+tcu::TestCaseGroup* createOpUnreachableGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opunreachable", "Test the OpUnreachable instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main            \"main\"\n"
+		"OpName %func_not_called_func \"not_called_func(\"\n"
+		"OpName %func_modulo4         \"modulo4(u1;\"\n"
+		"OpName %func_const5          \"const5(\"\n"
+		"OpName %id                   \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%u32ptr    = OpTypePointer Function %u32\n"
+		"%uintfuint = OpTypeFunction %u32 %u32ptr\n"
+		"%unitf     = OpTypeFunction %u32\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %u32 0\n"
+		"%one       = OpConstant %u32 1\n"
+		"%two       = OpConstant %u32 2\n"
+		"%three     = OpConstant %u32 3\n"
+		"%four      = OpConstant %u32 4\n"
+		"%five      = OpConstant %u32 5\n"
+		"%hundred   = OpConstant %u32 100\n"
+		"%thousand  = OpConstant %u32 1000\n"
+
+		+ string(s_InputOutputBuffer) +
+
+		// Main()
+		"%main   = OpFunction %void None %voidf\n"
+		"%main_entry  = OpLabel\n"
+		"%v_thousand  = OpVariable %u32ptr Function %thousand\n"
+		"%idval       = OpLoad %uvec3 %id\n"
+		"%x           = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc       = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval       = OpLoad %f32 %inloc\n"
+		"%outloc      = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%ret_const5  = OpFunctionCall %u32 %func_const5\n"
+		"%ret_modulo4 = OpFunctionCall %u32 %func_modulo4 %v_thousand\n"
+		"%cmp_gt      = OpUGreaterThan %bool %ret_const5 %ret_modulo4\n"
+		"               OpSelectionMerge %if_end None\n"
+		"               OpBranchConditional %cmp_gt %if_true %if_false\n"
+		"%if_true     = OpLabel\n"
+		"%negate      = OpFNegate %f32 %inval\n"
+		"               OpStore %outloc %negate\n"
+		"               OpBranch %if_end\n"
+		"%if_false    = OpLabel\n"
+		"               OpUnreachable\n" // Unreachable else branch for if statement
+		"%if_end      = OpLabel\n"
+		"               OpReturn\n"
+		"               OpFunctionEnd\n"
+
+		// not_called_function()
+		"%func_not_called_func  = OpFunction %void None %voidf\n"
+		"%not_called_func_entry = OpLabel\n"
+		"                         OpUnreachable\n" // Unreachable entry block in not called static function
+		"                         OpFunctionEnd\n"
+
+		// modulo4()
+		"%func_modulo4  = OpFunction %u32 None %uintfuint\n"
+		"%valptr        = OpFunctionParameter %u32ptr\n"
+		"%modulo4_entry = OpLabel\n"
+		"%val           = OpLoad %u32 %valptr\n"
+		"%modulo        = OpUMod %u32 %val %four\n"
+		"                 OpSelectionMerge %switch_merge None\n"
+		"                 OpSwitch %modulo %default 0 %case0 1 %case1 2 %case2 3 %case3\n"
+		"%case0         = OpLabel\n"
+		"                 OpReturnValue %three\n"
+		"%case1         = OpLabel\n"
+		"                 OpReturnValue %two\n"
+		"%case2         = OpLabel\n"
+		"                 OpReturnValue %one\n"
+		"%case3         = OpLabel\n"
+		"                 OpReturnValue %zero\n"
+		"%default       = OpLabel\n"
+		"                 OpUnreachable\n" // Unreachable default case for switch statement
+		"%switch_merge  = OpLabel\n"
+		"                 OpUnreachable\n" // Unreachable merge block for switch statement
+		"                 OpFunctionEnd\n"
+
+		// const5()
+		"%func_const5  = OpFunction %u32 None %unitf\n"
+		"%const5_entry = OpLabel\n"
+		"                OpReturnValue %five\n"
+		"%unreachable  = OpLabel\n"
+		"                OpUnreachable\n" // Unreachable block in function
+		"                OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "OpUnreachable appearing at different places", spec));
+
+	return group.release();
+}
+
+// Assembly code used for testing decoration group is based on GLSL source code:
+//
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input0 {
+//   float elements[];
+// } input_data0;
+// layout(std140, set = 0, binding = 1) readonly buffer Input1 {
+//   float elements[];
+// } input_data1;
+// layout(std140, set = 0, binding = 2) readonly buffer Input2 {
+//   float elements[];
+// } input_data2;
+// layout(std140, set = 0, binding = 3) readonly buffer Input3 {
+//   float elements[];
+// } input_data3;
+// layout(std140, set = 0, binding = 4) readonly buffer Input4 {
+//   float elements[];
+// } input_data4;
+// layout(std140, set = 0, binding = 5) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   output_data.elements[x] = input_data0.elements[x] + input_data1.elements[x] + input_data2.elements[x] + input_data3.elements[x] + input_data4.elements[x];
+// }
+tcu::TestCaseGroup* createDecorationGroupGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "decoration_group", "Test the OpDecorationGroup & OpGroupDecorate instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats0	(numElements, 0);
+	vector<float>					inputFloats1	(numElements, 0);
+	vector<float>					inputFloats2	(numElements, 0);
+	vector<float>					inputFloats3	(numElements, 0);
+	vector<float>					inputFloats4	(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats0[0], numElements);
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats1[0], numElements);
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats2[0], numElements);
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats3[0], numElements);
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats4[0], numElements);
+
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		inputFloats0[ndx] = deFloatFloor(inputFloats0[ndx]);
+		inputFloats1[ndx] = deFloatFloor(inputFloats1[ndx]);
+		inputFloats2[ndx] = deFloatFloor(inputFloats2[ndx]);
+		inputFloats3[ndx] = deFloatFloor(inputFloats3[ndx]);
+		inputFloats4[ndx] = deFloatFloor(inputFloats4[ndx]);
+	}
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats0[ndx] + inputFloats1[ndx] + inputFloats2[ndx] + inputFloats3[ndx] + inputFloats4[ndx];
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		// Not using group decoration on variable.
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		// Not using group decoration on type.
+		"OpDecorate %f32arr ArrayStride 4\n"
+
+		"OpDecorate %groups BufferBlock\n"
+		"OpDecorate %groupm Offset 0\n"
+		"%groups = OpDecorationGroup\n"
+		"%groupm = OpDecorationGroup\n"
+
+		// Group decoration on multiple structs.
+		"OpGroupDecorate %groups %outbuf %inbuf0 %inbuf1 %inbuf2 %inbuf3 %inbuf4\n"
+		// Group decoration on multiple struct members.
+		"OpGroupMemberDecorate %groupm %outbuf 0 %inbuf0 0 %inbuf1 0 %inbuf2 0 %inbuf3 0 %inbuf4 0\n"
+
+		"OpDecorate %group1 DescriptorSet 0\n"
+		"OpDecorate %group3 DescriptorSet 0\n"
+		"OpDecorate %group3 NonWritable\n"
+		"OpDecorate %group3 Restrict\n"
+		"%group0 = OpDecorationGroup\n"
+		"%group1 = OpDecorationGroup\n"
+		"%group3 = OpDecorationGroup\n"
+
+		// Applying the same decoration group multiple times.
+		"OpGroupDecorate %group1 %outdata\n"
+		"OpGroupDecorate %group1 %outdata\n"
+		"OpGroupDecorate %group1 %outdata\n"
+		"OpDecorate %outdata DescriptorSet 0\n"
+		"OpDecorate %outdata Binding 5\n"
+		// Applying decoration group containing nothing.
+		"OpGroupDecorate %group0 %indata0\n"
+		"OpDecorate %indata0 DescriptorSet 0\n"
+		"OpDecorate %indata0 Binding 0\n"
+		// Applying decoration group containing one decoration.
+		"OpGroupDecorate %group1 %indata1\n"
+		"OpDecorate %indata1 Binding 1\n"
+		// Applying decoration group containing multiple decorations.
+		"OpGroupDecorate %group3 %indata2 %indata3\n"
+		"OpDecorate %indata2 Binding 2\n"
+		"OpDecorate %indata3 Binding 3\n"
+		// Applying multiple decoration groups (with overlapping).
+		"OpGroupDecorate %group0 %indata4\n"
+		"OpGroupDecorate %group1 %indata4\n"
+		"OpGroupDecorate %group3 %indata4\n"
+		"OpDecorate %indata4 Binding 4\n"
+
+		+ string(s_CommonTypes) +
+
+		"%id   = OpVariable %uvec3ptr Input\n"
+		"%zero = OpConstant %i32 0\n"
+
+		"%outbuf    = OpTypeStruct %f32arr\n"
+		"%outbufptr = OpTypePointer Uniform %outbuf\n"
+		"%outdata   = OpVariable %outbufptr Uniform\n"
+		"%inbuf0    = OpTypeStruct %f32arr\n"
+		"%inbuf0ptr = OpTypePointer Uniform %inbuf0\n"
+		"%indata0   = OpVariable %inbuf0ptr Uniform\n"
+		"%inbuf1    = OpTypeStruct %f32arr\n"
+		"%inbuf1ptr = OpTypePointer Uniform %inbuf1\n"
+		"%indata1   = OpVariable %inbuf1ptr Uniform\n"
+		"%inbuf2    = OpTypeStruct %f32arr\n"
+		"%inbuf2ptr = OpTypePointer Uniform %inbuf2\n"
+		"%indata2   = OpVariable %inbuf2ptr Uniform\n"
+		"%inbuf3    = OpTypeStruct %f32arr\n"
+		"%inbuf3ptr = OpTypePointer Uniform %inbuf3\n"
+		"%indata3   = OpVariable %inbuf3ptr Uniform\n"
+		"%inbuf4    = OpTypeStruct %f32arr\n"
+		"%inbufptr  = OpTypePointer Uniform %inbuf4\n"
+		"%indata4   = OpVariable %inbufptr Uniform\n"
+
+		"%main   = OpFunction %void None %voidf\n"
+		"%label  = OpLabel\n"
+		"%idval  = OpLoad %uvec3 %id\n"
+		"%x      = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc0 = OpAccessChain %f32ptr %indata0 %zero %x\n"
+		"%inloc1 = OpAccessChain %f32ptr %indata1 %zero %x\n"
+		"%inloc2 = OpAccessChain %f32ptr %indata2 %zero %x\n"
+		"%inloc3 = OpAccessChain %f32ptr %indata3 %zero %x\n"
+		"%inloc4 = OpAccessChain %f32ptr %indata4 %zero %x\n"
+		"%outloc = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%inval0 = OpLoad %f32 %inloc0\n"
+		"%inval1 = OpLoad %f32 %inloc1\n"
+		"%inval2 = OpLoad %f32 %inloc2\n"
+		"%inval3 = OpLoad %f32 %inloc3\n"
+		"%inval4 = OpLoad %f32 %inloc4\n"
+		"%add0   = OpFAdd %f32 %inval0 %inval1\n"
+		"%add1   = OpFAdd %f32 %add0 %inval2\n"
+		"%add2   = OpFAdd %f32 %add1 %inval3\n"
+		"%add    = OpFAdd %f32 %add2 %inval4\n"
+		"          OpStore %outloc %add\n"
+		"          OpReturn\n"
+		"          OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats0)));
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats1)));
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats2)));
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats3)));
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats4)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "decoration group cases", spec));
+
+	return group.release();
+}
+
+struct SpecConstantTwoIntCase
+{
+	const char*		caseName;
+	const char*		scDefinition0;
+	const char*		scDefinition1;
+	const char*		scResultType;
+	const char*		scOperation;
+	deInt32			scActualValue0;
+	deInt32			scActualValue1;
+	const char*		resultOperation;
+	vector<deInt32>	expectedOutput;
+
+					SpecConstantTwoIntCase (const char* name,
+											const char* definition0,
+											const char* definition1,
+											const char* resultType,
+											const char* operation,
+											deInt32 value0,
+											deInt32 value1,
+											const char* resultOp,
+											const vector<deInt32>& output)
+						: caseName			(name)
+						, scDefinition0		(definition0)
+						, scDefinition1		(definition1)
+						, scResultType		(resultType)
+						, scOperation		(operation)
+						, scActualValue0	(value0)
+						, scActualValue1	(value1)
+						, resultOperation	(resultOp)
+						, expectedOutput	(output) {}
+};
+
+tcu::TestCaseGroup* createSpecConstantGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opspecconstantop", "Test the OpSpecConstantOp instruction"));
+	vector<SpecConstantTwoIntCase>	cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<deInt32>					inputInts		(numElements, 0);
+	vector<deInt32>					outputInts1		(numElements, 0);
+	vector<deInt32>					outputInts2		(numElements, 0);
+	vector<deInt32>					outputInts3		(numElements, 0);
+	vector<deInt32>					outputInts4		(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		"OpDecorate %sc_0  SpecId 0\n"
+		"OpDecorate %sc_1  SpecId 1\n"
+		"OpDecorate %i32arr ArrayStride 4\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%i32ptr    = OpTypePointer Uniform %i32\n"
+		"%i32arr    = OpTypeRuntimeArray %i32\n"
+		"%boolptr   = OpTypePointer Uniform %bool\n"
+		"%boolarr   = OpTypeRuntimeArray %bool\n"
+		"%inbuf     = OpTypeStruct %i32arr\n"
+		"%inbufptr  = OpTypePointer Uniform %inbuf\n"
+		"%indata    = OpVariable %inbufptr Uniform\n"
+		"%outbuf    = OpTypeStruct %i32arr\n"
+		"%outbufptr = OpTypePointer Uniform %outbuf\n"
+		"%outdata   = OpVariable %outbufptr Uniform\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%sc_0      = OpSpecConstant${SC_DEF0}\n"
+		"%sc_1      = OpSpecConstant${SC_DEF1}\n"
+		"%sc_final  = OpSpecConstantOp ${SC_RESULT_TYPE} ${SC_OP}\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %i32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %i32 %inloc\n"
+		"%final     = ${GEN_RESULT}\n"
+		"%outloc    = OpAccessChain %i32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %final\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	fillRandomScalars(rnd, -65536, 65536, &inputInts[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		outputInts1[ndx] = inputInts[ndx] + 42;
+		outputInts2[ndx] = inputInts[ndx];
+		outputInts3[ndx] = inputInts[ndx] - 11200;
+		outputInts4[ndx] = inputInts[ndx] + 1;
+	}
+
+	const char addScToInput[]		= "OpIAdd %i32 %inval %sc_final";
+	const char selectTrueUsingSc[]	= "OpSelect %i32 %sc_final %inval %zero";
+	const char selectFalseUsingSc[]	= "OpSelect %i32 %sc_final %zero %inval";
+
+	cases.push_back(SpecConstantTwoIntCase("iadd",					" %i32 0",		" %i32 0",		"%i32",		"IAdd                 %sc_0 %sc_1",			62,		-20,	addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("isub",					" %i32 0",		" %i32 0",		"%i32",		"ISub                 %sc_0 %sc_1",			100,	58,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("imul",					" %i32 0",		" %i32 0",		"%i32",		"IMul                 %sc_0 %sc_1",			-2,		-21,	addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("sdiv",					" %i32 0",		" %i32 0",		"%i32",		"SDiv                 %sc_0 %sc_1",			-126,	-3,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("udiv",					" %i32 0",		" %i32 0",		"%i32",		"UDiv                 %sc_0 %sc_1",			126,	3,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("srem",					" %i32 0",		" %i32 0",		"%i32",		"SRem                 %sc_0 %sc_1",			7,		3,		addScToInput,		outputInts4));
+	cases.push_back(SpecConstantTwoIntCase("smod",					" %i32 0",		" %i32 0",		"%i32",		"SMod                 %sc_0 %sc_1",			7,		3,		addScToInput,		outputInts4));
+	cases.push_back(SpecConstantTwoIntCase("umod",					" %i32 0",		" %i32 0",		"%i32",		"UMod                 %sc_0 %sc_1",			342,	50,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("bitwiseand",			" %i32 0",		" %i32 0",		"%i32",		"BitwiseAnd           %sc_0 %sc_1",			42,		63,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("bitwiseor",				" %i32 0",		" %i32 0",		"%i32",		"BitwiseOr            %sc_0 %sc_1",			34,		8,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("bitwisexor",			" %i32 0",		" %i32 0",		"%i32",		"BitwiseXor           %sc_0 %sc_1",			18,		56,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("shiftrightlogical",		" %i32 0",		" %i32 0",		"%i32",		"ShiftRightLogical    %sc_0 %sc_1",			168,	2,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("shiftrightarithmetic",	" %i32 0",		" %i32 0",		"%i32",		"ShiftRightArithmetic %sc_0 %sc_1",			168,	2,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("shiftleftlogical",		" %i32 0",		" %i32 0",		"%i32",		"ShiftLeftLogical     %sc_0 %sc_1",			21,		1,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("slessthan",				" %i32 0",		" %i32 0",		"%bool",	"SLessThan            %sc_0 %sc_1",			-20,	-10,	selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("ulessthan",				" %i32 0",		" %i32 0",		"%bool",	"ULessThan            %sc_0 %sc_1",			10,		20,		selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("sgreaterthan",			" %i32 0",		" %i32 0",		"%bool",	"SGreaterThan         %sc_0 %sc_1",			-1000,	50,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("ugreaterthan",			" %i32 0",		" %i32 0",		"%bool",	"UGreaterThan         %sc_0 %sc_1",			10,		5,		selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("slessthanequal",		" %i32 0",		" %i32 0",		"%bool",	"SLessThanEqual       %sc_0 %sc_1",			-10,	-10,	selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("ulessthanequal",		" %i32 0",		" %i32 0",		"%bool",	"ULessThanEqual       %sc_0 %sc_1",			50,		100,	selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("sgreaterthanequal",		" %i32 0",		" %i32 0",		"%bool",	"SGreaterThanEqual    %sc_0 %sc_1",			-1000,	50,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("ugreaterthanequal",		" %i32 0",		" %i32 0",		"%bool",	"UGreaterThanEqual    %sc_0 %sc_1",			10,		10,		selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("iequal",				" %i32 0",		" %i32 0",		"%bool",	"IEqual               %sc_0 %sc_1",			42,		24,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("logicaland",			"True %bool",	"True %bool",	"%bool",	"LogicalAnd           %sc_0 %sc_1",			0,		1,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("logicalor",				"False %bool",	"False %bool",	"%bool",	"LogicalOr            %sc_0 %sc_1",			1,		0,		selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("logicalequal",			"True %bool",	"True %bool",	"%bool",	"LogicalEqual         %sc_0 %sc_1",			0,		1,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("logicalnotequal",		"False %bool",	"False %bool",	"%bool",	"LogicalNotEqual      %sc_0 %sc_1",			1,		0,		selectTrueUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("snegate",				" %i32 0",		" %i32 0",		"%i32",		"SNegate              %sc_0",				-42,	0,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("not",					" %i32 0",		" %i32 0",		"%i32",		"Not                  %sc_0",				-43,	0,		addScToInput,		outputInts1));
+	cases.push_back(SpecConstantTwoIntCase("logicalnot",			"False %bool",	"False %bool",	"%bool",	"LogicalNot           %sc_0",				1,		0,		selectFalseUsingSc,	outputInts2));
+	cases.push_back(SpecConstantTwoIntCase("select",				"False %bool",	" %i32 0",		"%i32",		"Select               %sc_0 %sc_1 %zero",	1,		42,		addScToInput,		outputInts1));
+	// OpSConvert, OpFConvert: these two instructions involve ints/floats of different bitwidths.
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["SC_DEF0"]			= cases[caseNdx].scDefinition0;
+		specializations["SC_DEF1"]			= cases[caseNdx].scDefinition1;
+		specializations["SC_RESULT_TYPE"]	= cases[caseNdx].scResultType;
+		specializations["SC_OP"]			= cases[caseNdx].scOperation;
+		specializations["GEN_RESULT"]		= cases[caseNdx].resultOperation;
+
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Int32Buffer(inputInts)));
+		spec.outputs.push_back(BufferSp(new Int32Buffer(cases[caseNdx].expectedOutput)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+		spec.specConstants.push_back(cases[caseNdx].scActualValue0);
+		spec.specConstants.push_back(cases[caseNdx].scActualValue1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].caseName, cases[caseNdx].caseName, spec));
+	}
+
+	ComputeShaderSpec				spec;
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+		"OpDecorate %sc_0  SpecId 0\n"
+		"OpDecorate %sc_1  SpecId 1\n"
+		"OpDecorate %sc_2  SpecId 2\n"
+		"OpDecorate %i32arr ArrayStride 4\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%ivec3     = OpTypeVector %i32 3\n"
+		"%i32ptr    = OpTypePointer Uniform %i32\n"
+		"%i32arr    = OpTypeRuntimeArray %i32\n"
+		"%boolptr   = OpTypePointer Uniform %bool\n"
+		"%boolarr   = OpTypeRuntimeArray %bool\n"
+		"%inbuf     = OpTypeStruct %i32arr\n"
+		"%inbufptr  = OpTypePointer Uniform %inbuf\n"
+		"%indata    = OpVariable %inbufptr Uniform\n"
+		"%outbuf    = OpTypeStruct %i32arr\n"
+		"%outbufptr = OpTypePointer Uniform %outbuf\n"
+		"%outdata   = OpVariable %outbufptr Uniform\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+		"%ivec3_0   = OpConstantComposite %ivec3 %zero %zero %zero\n"
+
+		"%sc_0        = OpSpecConstant %i32 0\n"
+		"%sc_1        = OpSpecConstant %i32 0\n"
+		"%sc_2        = OpSpecConstant %i32 0\n"
+		"%sc_vec3_0   = OpSpecConstantOp %ivec3 CompositeInsert  %sc_0        %ivec3_0   0\n"     // (sc_0, 0, 0)
+		"%sc_vec3_1   = OpSpecConstantOp %ivec3 CompositeInsert  %sc_1        %ivec3_0   1\n"     // (0, sc_1, 0)
+		"%sc_vec3_2   = OpSpecConstantOp %ivec3 CompositeInsert  %sc_2        %ivec3_0   2\n"     // (0, 0, sc_2)
+		"%sc_vec3_01  = OpSpecConstantOp %ivec3 VectorShuffle    %sc_vec3_0   %sc_vec3_1 1 0 4\n" // (0,    sc_0, sc_1)
+		"%sc_vec3_012 = OpSpecConstantOp %ivec3 VectorShuffle    %sc_vec3_01  %sc_vec3_2 5 1 2\n" // (sc_2, sc_0, sc_1)
+		"%sc_ext_0    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            0\n"     // sc_2
+		"%sc_ext_1    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            1\n"     // sc_0
+		"%sc_ext_2    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            2\n"     // sc_1
+		"%sc_sub      = OpSpecConstantOp %i32   ISub             %sc_ext_0    %sc_ext_1\n"        // (sc_2 - sc_0)
+		"%sc_final    = OpSpecConstantOp %i32   IMul             %sc_sub      %sc_ext_2\n"        // (sc_2 - sc_0) * sc_1
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %i32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %i32 %inloc\n"
+		"%final     = OpIAdd %i32 %inval %sc_final\n"
+		"%outloc    = OpAccessChain %i32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %final\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Int32Buffer(inputInts)));
+	spec.outputs.push_back(BufferSp(new Int32Buffer(outputInts3)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+	spec.specConstants.push_back(123);
+	spec.specConstants.push_back(56);
+	spec.specConstants.push_back(-77);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "vector_related", "VectorShuffle, CompositeExtract, & CompositeInsert", spec));
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpPhiGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opphi", "Test the OpPhi instruction"));
+	ComputeShaderSpec				spec1;
+	ComputeShaderSpec				spec2;
+	ComputeShaderSpec				spec3;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats1	(numElements, 0);
+	vector<float>					outputFloats2	(numElements, 0);
+	vector<float>					outputFloats3	(numElements, 0);
+
+	fillRandomScalars(rnd, -300.f, 300.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		switch (ndx % 3)
+		{
+			case 0:		outputFloats1[ndx] = inputFloats[ndx] + 5.5f;	break;
+			case 1:		outputFloats1[ndx] = inputFloats[ndx] + 20.5f;	break;
+			case 2:		outputFloats1[ndx] = inputFloats[ndx] + 1.75f;	break;
+			default:	break;
+		}
+		outputFloats2[ndx] = inputFloats[ndx] + 6.5f * 3;
+		outputFloats3[ndx] = 8.5f - inputFloats[ndx];
+	}
+
+	spec1.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id = OpVariable %uvec3ptr Input\n"
+		"%zero       = OpConstant %i32 0\n"
+		"%three      = OpConstant %u32 3\n"
+		"%constf5p5  = OpConstant %f32 5.5\n"
+		"%constf20p5 = OpConstant %f32 20.5\n"
+		"%constf1p75 = OpConstant %f32 1.75\n"
+		"%constf8p5  = OpConstant %f32 8.5\n"
+		"%constf6p5  = OpConstant %f32 6.5\n"
+
+		"%main     = OpFunction %void None %voidf\n"
+		"%entry    = OpLabel\n"
+		"%idval    = OpLoad %uvec3 %id\n"
+		"%x        = OpCompositeExtract %u32 %idval 0\n"
+		"%selector = OpUMod %u32 %x %three\n"
+		"            OpSelectionMerge %phi None\n"
+		"            OpSwitch %selector %default 0 %case0 1 %case1 2 %case2\n"
+
+		// Case 1 before OpPhi.
+		"%case1    = OpLabel\n"
+		"            OpBranch %phi\n"
+
+		"%default  = OpLabel\n"
+		"            OpUnreachable\n"
+
+		"%phi      = OpLabel\n"
+		"%operand  = OpPhi %f32   %constf1p75 %case2   %constf20p5 %case1   %constf5p5 %case0\n" // not in the order of blocks
+		"%inloc    = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval    = OpLoad %f32 %inloc\n"
+		"%add      = OpFAdd %f32 %inval %operand\n"
+		"%outloc   = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"            OpStore %outloc %add\n"
+		"            OpReturn\n"
+
+		// Case 0 after OpPhi.
+		"%case0    = OpLabel\n"
+		"            OpBranch %phi\n"
+
+
+		// Case 2 after OpPhi.
+		"%case2    = OpLabel\n"
+		"            OpBranch %phi\n"
+
+		"            OpFunctionEnd\n";
+	spec1.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec1.outputs.push_back(BufferSp(new Float32Buffer(outputFloats1)));
+	spec1.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "block", "out-of-order and unreachable blocks for OpPhi", spec1));
+
+	spec2.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id         = OpVariable %uvec3ptr Input\n"
+		"%zero       = OpConstant %i32 0\n"
+		"%one        = OpConstant %i32 1\n"
+		"%three      = OpConstant %i32 3\n"
+		"%constf6p5  = OpConstant %f32 6.5\n"
+
+		"%main       = OpFunction %void None %voidf\n"
+		"%entry      = OpLabel\n"
+		"%idval      = OpLoad %uvec3 %id\n"
+		"%x          = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc      = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%outloc     = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%inval      = OpLoad %f32 %inloc\n"
+		"              OpBranch %phi\n"
+
+		"%phi        = OpLabel\n"
+		"%step       = OpPhi %i32 %zero  %entry %step_next  %phi\n"
+		"%accum      = OpPhi %f32 %inval %entry %accum_next %phi\n"
+		"%step_next  = OpIAdd %i32 %step %one\n"
+		"%accum_next = OpFAdd %f32 %accum %constf6p5\n"
+		"%still_loop = OpSLessThan %bool %step %three\n"
+		"              OpLoopMerge %exit %phi None\n"
+		"              OpBranchConditional %still_loop %phi %exit\n"
+
+		"%exit       = OpLabel\n"
+		"              OpStore %outloc %accum\n"
+		"              OpReturn\n"
+		"              OpFunctionEnd\n";
+	spec2.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec2.outputs.push_back(BufferSp(new Float32Buffer(outputFloats2)));
+	spec2.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "induction", "The usual way induction variables are handled in LLVM IR", spec2));
+
+	spec3.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%f32ptr_f   = OpTypePointer Function %f32\n"
+		"%id         = OpVariable %uvec3ptr Input\n"
+		"%true       = OpConstantTrue %bool\n"
+		"%false      = OpConstantFalse %bool\n"
+		"%zero       = OpConstant %i32 0\n"
+		"%constf8p5  = OpConstant %f32 8.5\n"
+
+		"%main       = OpFunction %void None %voidf\n"
+		"%entry      = OpLabel\n"
+		"%b          = OpVariable %f32ptr_f Function %constf8p5\n"
+		"%idval      = OpLoad %uvec3 %id\n"
+		"%x          = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc      = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%outloc     = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%a_init     = OpLoad %f32 %inloc\n"
+		"%b_init     = OpLoad %f32 %b\n"
+		"              OpBranch %phi\n"
+
+		"%phi        = OpLabel\n"
+		"%still_loop = OpPhi %bool %true   %entry %false  %phi\n"
+		"%a_next     = OpPhi %f32  %a_init %entry %b_next %phi\n"
+		"%b_next     = OpPhi %f32  %b_init %entry %a_next %phi\n"
+		"              OpLoopMerge %exit %phi None\n"
+		"              OpBranchConditional %still_loop %phi %exit\n"
+
+		"%exit       = OpLabel\n"
+		"%sub        = OpFSub %f32 %a_next %b_next\n"
+		"              OpStore %outloc %sub\n"
+		"              OpReturn\n"
+		"              OpFunctionEnd\n";
+	spec3.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec3.outputs.push_back(BufferSp(new Float32Buffer(outputFloats3)));
+	spec3.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "swap", "Swap the values of two variables using OpPhi", spec3));
+
+	return group.release();
+}
+
+// Assembly code used for testing block order is based on GLSL source code:
+//
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   output_data.elements[x] = input_data.elements[x];
+//   if (x > uint(50)) {
+//     switch (x % uint(3)) {
+//       case 0: output_data.elements[x] += 1.5f; break;
+//       case 1: output_data.elements[x] += 42.f; break;
+//       case 2: output_data.elements[x] -= 27.f; break;
+//       default: break;
+//     }
+//   } else {
+//     output_data.elements[x] = -input_data.elements[x];
+//   }
+// }
+tcu::TestCaseGroup* createBlockOrderGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "block_order", "Test block orders"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		inputFloats[ndx] = deFloatFloor(inputFloats[ndx]);
+
+	for (size_t ndx = 0; ndx <= 50; ++ndx)
+		outputFloats[ndx] = -inputFloats[ndx];
+
+	for (size_t ndx = 51; ndx < numElements; ++ndx)
+	{
+		switch (ndx % 3)
+		{
+			case 0:		outputFloats[ndx] = inputFloats[ndx] + 1.5f; break;
+			case 1:		outputFloats[ndx] = inputFloats[ndx] + 42.f; break;
+			case 2:		outputFloats[ndx] = inputFloats[ndx] - 27.f; break;
+			default:	break;
+		}
+	}
+
+	spec.assembly =
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%u32ptr       = OpTypePointer Function %u32\n"
+		"%u32ptr_input = OpTypePointer Input %u32\n"
+
+		+ string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+		"%const3    = OpConstant %u32 3\n"
+		"%const50   = OpConstant %u32 50\n"
+		"%constf1p5 = OpConstant %f32 1.5\n"
+		"%constf27  = OpConstant %f32 27.0\n"
+		"%constf42  = OpConstant %f32 42.0\n"
+
+		"%main = OpFunction %void None %voidf\n"
+
+		// entry block.
+		"%entry    = OpLabel\n"
+
+		// Create a temporary variable to hold the value of gl_GlobalInvocationID.x.
+		"%xvar     = OpVariable %u32ptr Function\n"
+		"%xptr     = OpAccessChain %u32ptr_input %id %zero\n"
+		"%x        = OpLoad %u32 %xptr\n"
+		"            OpStore %xvar %x\n"
+
+		"%cmp      = OpUGreaterThan %bool %x %const50\n"
+		"            OpSelectionMerge %if_merge None\n"
+		"            OpBranchConditional %cmp %if_true %if_false\n"
+
+		// Merge block for switch-statement: placed at the beginning.
+		"%switch_merge = OpLabel\n"
+		"                OpBranch %if_merge\n"
+
+		// Case 1 for switch-statement.
+		"%case1    = OpLabel\n"
+		"%x_1      = OpLoad %u32 %xvar\n"
+		"%inloc_1  = OpAccessChain %f32ptr %indata %zero %x_1\n"
+		"%inval_1  = OpLoad %f32 %inloc_1\n"
+		"%addf42   = OpFAdd %f32 %inval_1 %constf42\n"
+		"%outloc_1 = OpAccessChain %f32ptr %outdata %zero %x_1\n"
+		"            OpStore %outloc_1 %addf42\n"
+		"            OpBranch %switch_merge\n"
+
+		// False branch for if-statement: placed in the middle of switch cases and before true branch.
+		"%if_false = OpLabel\n"
+		"%x_f      = OpLoad %u32 %xvar\n"
+		"%inloc_f  = OpAccessChain %f32ptr %indata %zero %x_f\n"
+		"%inval_f  = OpLoad %f32 %inloc_f\n"
+		"%negate   = OpFNegate %f32 %inval_f\n"
+		"%outloc_f = OpAccessChain %f32ptr %outdata %zero %x_f\n"
+		"            OpStore %outloc_f %negate\n"
+		"            OpBranch %if_merge\n"
+
+		// Merge block for if-statement: placed in the middle of true and false branch.
+		"%if_merge = OpLabel\n"
+		"            OpReturn\n"
+
+		// True branch for if-statement: placed in the middle of swtich cases and after the false branch.
+		"%if_true  = OpLabel\n"
+		"%xval_t   = OpLoad %u32 %xvar\n"
+		"%mod      = OpUMod %u32 %xval_t %const3\n"
+		"            OpSelectionMerge %switch_merge None\n"
+		"            OpSwitch %mod %default 0 %case0 1 %case1 2 %case2\n"
+
+		// Case 2 for switch-statement.
+		"%case2    = OpLabel\n"
+		"%x_2      = OpLoad %u32 %xvar\n"
+		"%inloc_2  = OpAccessChain %f32ptr %indata %zero %x_2\n"
+		"%inval_2  = OpLoad %f32 %inloc_2\n"
+		"%subf27   = OpFSub %f32 %inval_2 %constf27\n"
+		"%outloc_2 = OpAccessChain %f32ptr %outdata %zero %x_2\n"
+		"            OpStore %outloc_2 %subf27\n"
+		"            OpBranch %switch_merge\n"
+
+		// Default case for switch-statement: placed in the middle of normal cases.
+		"%default = OpLabel\n"
+		"           OpBranch %switch_merge\n"
+
+		// Case 0 for switch-statement: out of order.
+		"%case0    = OpLabel\n"
+		"%x_0      = OpLoad %u32 %xvar\n"
+		"%inloc_0  = OpAccessChain %f32ptr %indata %zero %x_0\n"
+		"%inval_0  = OpLoad %f32 %inloc_0\n"
+		"%addf1p5  = OpFAdd %f32 %inval_0 %constf1p5\n"
+		"%outloc_0 = OpAccessChain %f32ptr %outdata %zero %x_0\n"
+		"            OpStore %outloc_0 %addf1p5\n"
+		"            OpBranch %switch_merge\n"
+
+		"            OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "all", "various out-of-order blocks", spec));
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createMultipleShaderGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "multiple_shaders", "Test multiple shaders in the same module"));
+	ComputeShaderSpec				spec1;
+	ComputeShaderSpec				spec2;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats1	(numElements, 0);
+	vector<float>					outputFloats2	(numElements, 0);
+	fillRandomScalars(rnd, -500.f, 500.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+	{
+		outputFloats1[ndx] = inputFloats[ndx] + inputFloats[ndx];
+		outputFloats2[ndx] = -inputFloats[ndx];
+	}
+
+	const string assembly(
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint GLCompute %comp_main1 \"entrypoint1\" %id\n"
+		"OpEntryPoint GLCompute %comp_main2 \"entrypoint2\" %id\n"
+		// A module cannot have two OpEntryPoint instructions with the same Execution Model and the same Name string.
+		"OpEntryPoint Vertex    %vert_main  \"entrypoint2\" %vert_builtins %vertexIndex %instanceIndex\n"
+		"OpExecutionMode %comp_main1 LocalSize 1 1 1\n"
+		"OpExecutionMode %comp_main2 LocalSize 1 1 1\n"
+
+		"OpName %comp_main1              \"entrypoint1\"\n"
+		"OpName %comp_main2              \"entrypoint2\"\n"
+		"OpName %vert_main               \"entrypoint2\"\n"
+		"OpName %id                      \"gl_GlobalInvocationID\"\n"
+		"OpName %vert_builtin_st         \"gl_PerVertex\"\n"
+		"OpName %vertexIndex             \"gl_VertexIndex\"\n"
+		"OpName %instanceIndex           \"gl_InstanceIndex\"\n"
+		"OpMemberName %vert_builtin_st 0 \"gl_Position\"\n"
+		"OpMemberName %vert_builtin_st 1 \"gl_PointSize\"\n"
+		"OpMemberName %vert_builtin_st 2 \"gl_ClipDistance\"\n"
+
+		"OpDecorate %id                      BuiltIn GlobalInvocationId\n"
+		"OpDecorate %vertexIndex             BuiltIn VertexIndex\n"
+		"OpDecorate %instanceIndex           BuiltIn InstanceIndex\n"
+		"OpDecorate %vert_builtin_st         Block\n"
+		"OpMemberDecorate %vert_builtin_st 0 BuiltIn Position\n"
+		"OpMemberDecorate %vert_builtin_st 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %vert_builtin_st 2 BuiltIn ClipDistance\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%zero       = OpConstant %i32 0\n"
+		"%one        = OpConstant %u32 1\n"
+		"%c_f32_1    = OpConstant %f32 1\n"
+
+		"%i32ptr              = OpTypePointer Input %i32\n"
+		"%vec4                = OpTypeVector %f32 4\n"
+		"%vec4ptr             = OpTypePointer Output %vec4\n"
+		"%f32arr1             = OpTypeArray %f32 %one\n"
+		"%vert_builtin_st     = OpTypeStruct %vec4 %f32 %f32arr1\n"
+		"%vert_builtin_st_ptr = OpTypePointer Output %vert_builtin_st\n"
+		"%vert_builtins       = OpVariable %vert_builtin_st_ptr Output\n"
+
+		"%id         = OpVariable %uvec3ptr Input\n"
+		"%vertexIndex = OpVariable %i32ptr Input\n"
+		"%instanceIndex = OpVariable %i32ptr Input\n"
+		"%c_vec4_1   = OpConstantComposite %vec4 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_1\n"
+
+		// gl_Position = vec4(1.);
+		"%vert_main  = OpFunction %void None %voidf\n"
+		"%vert_entry = OpLabel\n"
+		"%position   = OpAccessChain %vec4ptr %vert_builtins %zero\n"
+		"              OpStore %position %c_vec4_1\n"
+		"              OpReturn\n"
+		"              OpFunctionEnd\n"
+
+		// Double inputs.
+		"%comp_main1  = OpFunction %void None %voidf\n"
+		"%comp1_entry = OpLabel\n"
+		"%idval1      = OpLoad %uvec3 %id\n"
+		"%x1          = OpCompositeExtract %u32 %idval1 0\n"
+		"%inloc1      = OpAccessChain %f32ptr %indata %zero %x1\n"
+		"%inval1      = OpLoad %f32 %inloc1\n"
+		"%add         = OpFAdd %f32 %inval1 %inval1\n"
+		"%outloc1     = OpAccessChain %f32ptr %outdata %zero %x1\n"
+		"               OpStore %outloc1 %add\n"
+		"               OpReturn\n"
+		"               OpFunctionEnd\n"
+
+		// Negate inputs.
+		"%comp_main2  = OpFunction %void None %voidf\n"
+		"%comp2_entry = OpLabel\n"
+		"%idval2      = OpLoad %uvec3 %id\n"
+		"%x2          = OpCompositeExtract %u32 %idval2 0\n"
+		"%inloc2      = OpAccessChain %f32ptr %indata %zero %x2\n"
+		"%inval2      = OpLoad %f32 %inloc2\n"
+		"%neg         = OpFNegate %f32 %inval2\n"
+		"%outloc2     = OpAccessChain %f32ptr %outdata %zero %x2\n"
+		"               OpStore %outloc2 %neg\n"
+		"               OpReturn\n"
+		"               OpFunctionEnd\n");
+
+	spec1.assembly = assembly;
+	spec1.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec1.outputs.push_back(BufferSp(new Float32Buffer(outputFloats1)));
+	spec1.numWorkGroups = IVec3(numElements, 1, 1);
+	spec1.entryPoint = "entrypoint1";
+
+	spec2.assembly = assembly;
+	spec2.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+	spec2.outputs.push_back(BufferSp(new Float32Buffer(outputFloats2)));
+	spec2.numWorkGroups = IVec3(numElements, 1, 1);
+	spec2.entryPoint = "entrypoint2";
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "shader1", "multiple shaders in the same module", spec1));
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "shader2", "multiple shaders in the same module", spec2));
+
+	return group.release();
+}
+
+inline std::string makeLongUTF8String (size_t num4ByteChars)
+{
+	// An example of a longest valid UTF-8 character.  Be explicit about the
+	// character type because Microsoft compilers can otherwise interpret the
+	// character string as being over wide (16-bit) characters. Ideally, we
+	// would just use a C++11 UTF-8 string literal, but we want to support older
+	// Microsoft compilers.
+	const std::basic_string<char> earthAfrica("\xF0\x9F\x8C\x8D");
+	std::string longString;
+	longString.reserve(num4ByteChars * 4);
+	for (size_t count = 0; count < num4ByteChars; count++)
+	{
+		longString += earthAfrica;
+	}
+	return longString;
+}
+
+tcu::TestCaseGroup* createOpSourceGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opsource", "Tests the OpSource & OpSourceContinued instruction"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+
+		"OpEntryPoint GLCompute %main \"main\" %id\n"
+		"OpExecutionMode %main LocalSize 1 1 1\n"
+
+		"${SOURCE}\n"
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("unknown_source",							"OpSource Unknown 0"));
+	cases.push_back(CaseParameter("wrong_source",							"OpSource OpenCL_C 210"));
+	cases.push_back(CaseParameter("normal_filename",						"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname"));
+	cases.push_back(CaseParameter("empty_filename",							"%fname = OpString \"\"\n"
+																			"OpSource GLSL 430 %fname"));
+	cases.push_back(CaseParameter("normal_source_code",						"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\nvoid main() {}\""));
+	cases.push_back(CaseParameter("empty_source_code",						"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"\""));
+	cases.push_back(CaseParameter("long_source_code",						"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"" + makeLongUTF8String(65530) + "ccc\"")); // word count: 65535
+	cases.push_back(CaseParameter("utf8_source_code",						"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"\xE2\x98\x82\xE2\x98\x85\"")); // umbrella & black star symbol
+	cases.push_back(CaseParameter("normal_sourcecontinued",					"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\nvo\"\n"
+																			"OpSourceContinued \"id main() {}\""));
+	cases.push_back(CaseParameter("empty_sourcecontinued",					"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\nvoid main() {}\"\n"
+																			"OpSourceContinued \"\""));
+	cases.push_back(CaseParameter("long_sourcecontinued",					"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\nvoid main() {}\"\n"
+																			"OpSourceContinued \"" + makeLongUTF8String(65533) + "ccc\"")); // word count: 65535
+	cases.push_back(CaseParameter("utf8_sourcecontinued",					"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\nvoid main() {}\"\n"
+																			"OpSourceContinued \"\xE2\x98\x8E\xE2\x9A\x91\"")); // white telephone & black flag symbol
+	cases.push_back(CaseParameter("multi_sourcecontinued",					"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"#version 430\n\"\n"
+																			"OpSourceContinued \"void\"\n"
+																			"OpSourceContinued \"main()\"\n"
+																			"OpSourceContinued \"{}\""));
+	cases.push_back(CaseParameter("empty_source_before_sourcecontinued",	"%fname = OpString \"filename\"\n"
+																			"OpSource GLSL 430 %fname \"\"\n"
+																			"OpSourceContinued \"#version 430\nvoid main() {}\""));
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["SOURCE"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpSourceExtensionGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opsourceextension", "Tests the OpSource instruction"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSourceExtension \"${EXTENSION}\"\n"
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("empty_extension",	""));
+	cases.push_back(CaseParameter("real_extension",		"GL_ARB_texture_rectangle"));
+	cases.push_back(CaseParameter("fake_extension",		"GL_ARB_im_the_ultimate_extension"));
+	cases.push_back(CaseParameter("utf8_extension",		"GL_ARB_\xE2\x98\x82\xE2\x98\x85"));
+	cases.push_back(CaseParameter("long_extension",		makeLongUTF8String(65533) + "ccc")); // word count: 65535
+
+	fillRandomScalars(rnd, -200.f, 200.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = -inputFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["EXTENSION"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Checks that a compute shader can generate a constant null value of various types, without exercising a computation on it.
+tcu::TestCaseGroup* createOpConstantNullGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opconstantnull", "Tests the OpConstantNull instruction"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"${TYPE}\n"
+		"%null      = OpConstantNull %type\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("bool",			"%type = OpTypeBool"));
+	cases.push_back(CaseParameter("sint32",			"%type = OpTypeInt 32 1"));
+	cases.push_back(CaseParameter("uint32",			"%type = OpTypeInt 32 0"));
+	cases.push_back(CaseParameter("float32",		"%type = OpTypeFloat 32"));
+	cases.push_back(CaseParameter("vec4float32",	"%type = OpTypeVector %f32 4"));
+	cases.push_back(CaseParameter("vec3bool",		"%type = OpTypeVector %bool 3"));
+	cases.push_back(CaseParameter("vec2uint32",		"%type = OpTypeVector %u32 2"));
+	cases.push_back(CaseParameter("matrix",			"%type = OpTypeMatrix %fvec3 3"));
+	cases.push_back(CaseParameter("array",			"%100 = OpConstant %u32 100\n"
+													"%type = OpTypeArray %i32 %100"));
+	cases.push_back(CaseParameter("struct",			"%type = OpTypeStruct %f32 %i32 %u32"));
+	cases.push_back(CaseParameter("pointer",		"%type = OpTypePointer Function %i32"));
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["TYPE"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Checks that a compute shader can generate a constant composite value of various types, without exercising a computation on it.
+tcu::TestCaseGroup* createOpConstantCompositeGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opconstantcomposite", "Tests the OpConstantComposite instruction"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"${CONSTANT}\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("vector",			"%five = OpConstant %u32 5\n"
+													"%const = OpConstantComposite %uvec3 %five %zero %five"));
+	cases.push_back(CaseParameter("matrix",			"%m3fvec3 = OpTypeMatrix %fvec3 3\n"
+													"%ten = OpConstant %f32 10.\n"
+													"%fzero = OpConstant %f32 0.\n"
+													"%vec = OpConstantComposite %fvec3 %ten %fzero %ten\n"
+													"%mat = OpConstantComposite %m3fvec3 %vec %vec %vec"));
+	cases.push_back(CaseParameter("struct",			"%m2vec3 = OpTypeMatrix %fvec3 2\n"
+													"%struct = OpTypeStruct %i32 %f32 %fvec3 %m2vec3\n"
+													"%fzero = OpConstant %f32 0.\n"
+													"%one = OpConstant %f32 1.\n"
+													"%point5 = OpConstant %f32 0.5\n"
+													"%vec = OpConstantComposite %fvec3 %one %one %fzero\n"
+													"%mat = OpConstantComposite %m2vec3 %vec %vec\n"
+													"%const = OpConstantComposite %struct %zero %point5 %vec %mat"));
+	cases.push_back(CaseParameter("nested_struct",	"%st1 = OpTypeStruct %u32 %f32\n"
+													"%st2 = OpTypeStruct %i32 %i32\n"
+													"%struct = OpTypeStruct %st1 %st2\n"
+													"%point5 = OpConstant %f32 0.5\n"
+													"%one = OpConstant %u32 1\n"
+													"%ten = OpConstant %i32 10\n"
+													"%st1val = OpConstantComposite %st1 %one %point5\n"
+													"%st2val = OpConstantComposite %st2 %ten %ten\n"
+													"%const = OpConstantComposite %struct %st1val %st2val"));
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["CONSTANT"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Creates a floating point number with the given exponent, and significand
+// bits set. It can only create normalized numbers. Only the least significant
+// 24 bits of the significand will be examined. The final bit of the
+// significand will also be ignored. This allows alignment to be written
+// similarly to C99 hex-floats.
+// For example if you wanted to write 0x1.7f34p-12 you would call
+// constructNormalizedFloat(-12, 0x7f3400)
+float constructNormalizedFloat (deInt32 exponent, deUint32 significand)
+{
+	float f = 1.0f;
+
+	for (deInt32 idx = 0; idx < 23; ++idx)
+	{
+		f += ((significand & 0x800000) == 0) ? 0.f : std::ldexp(1.0f, -(idx + 1));
+		significand <<= 1;
+	}
+
+	return std::ldexp(f, exponent);
+}
+
+// Compare instruction for the OpQuantizeF16 compute exact case.
+// Returns true if the output is what is expected from the test case.
+bool compareOpQuantizeF16ComputeExactCase (const std::vector<BufferSp>&, const vector<AllocationSp>& outputAllocs, const std::vector<BufferSp>& expectedOutputs)
+{
+	if (outputAllocs.size() != 1)
+		return false;
+
+	// We really just need this for size because we cannot compare Nans.
+	const BufferSp&	expectedOutput	= expectedOutputs[0];
+	const float*	outputAsFloat	= static_cast<const float*>(outputAllocs[0]->getHostPtr());;
+
+	if (expectedOutput->getNumBytes() != 4*sizeof(float)) {
+		return false;
+	}
+
+	if (*outputAsFloat != constructNormalizedFloat(8, 0x304000) &&
+		*outputAsFloat != constructNormalizedFloat(8, 0x300000)) {
+		return false;
+	}
+	outputAsFloat++;
+
+	if (*outputAsFloat != -constructNormalizedFloat(-7, 0x600000) &&
+		*outputAsFloat != -constructNormalizedFloat(-7, 0x604000)) {
+		return false;
+	}
+	outputAsFloat++;
+
+	if (*outputAsFloat != constructNormalizedFloat(2, 0x01C000) &&
+		*outputAsFloat != constructNormalizedFloat(2, 0x020000)) {
+		return false;
+	}
+	outputAsFloat++;
+
+	if (*outputAsFloat != constructNormalizedFloat(1, 0xFFC000) &&
+		*outputAsFloat != constructNormalizedFloat(2, 0x000000)) {
+		return false;
+	}
+
+	return true;
+}
+
+// Checks that every output from a test-case is a float NaN.
+bool compareNan (const std::vector<BufferSp>&, const vector<AllocationSp>& outputAllocs, const std::vector<BufferSp>& expectedOutputs)
+{
+	if (outputAllocs.size() != 1)
+		return false;
+
+	// We really just need this for size because we cannot compare Nans.
+	const BufferSp& expectedOutput		= expectedOutputs[0];
+	const float* output_as_float		= static_cast<const float*>(outputAllocs[0]->getHostPtr());;
+
+	for (size_t idx = 0; idx < expectedOutput->getNumBytes() / sizeof(float); ++idx)
+	{
+		if (!isnan(output_as_float[idx]))
+		{
+			return false;
+		}
+	}
+
+	return true;
+}
+
+// Checks that a compute shader can generate a constant composite value of various types, without exercising a computation on it.
+tcu::TestCaseGroup* createOpQuantizeToF16Group (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opquantize", "Tests the OpQuantizeToF16 instruction"));
+
+	const std::string shader (
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%quant     = OpQuantizeToF16 %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %quant\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint32		numElements		= 100;
+		vector<float>		infinities;
+		vector<float>		results;
+
+		infinities.reserve(numElements);
+		results.reserve(numElements);
+
+		for (size_t idx = 0; idx < numElements; ++idx)
+		{
+			switch(idx % 4)
+			{
+				case 0:
+					infinities.push_back(std::numeric_limits<float>::infinity());
+					results.push_back(std::numeric_limits<float>::infinity());
+					break;
+				case 1:
+					infinities.push_back(-std::numeric_limits<float>::infinity());
+					results.push_back(-std::numeric_limits<float>::infinity());
+					break;
+				case 2:
+					infinities.push_back(std::ldexp(1.0f, 16));
+					results.push_back(std::numeric_limits<float>::infinity());
+					break;
+				case 3:
+					infinities.push_back(std::ldexp(-1.0f, 32));
+					results.push_back(-std::numeric_limits<float>::infinity());
+					break;
+			}
+		}
+
+		spec.assembly = shader;
+		spec.inputs.push_back(BufferSp(new Float32Buffer(infinities)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(results)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "infinities", "Check that infinities propagated and created", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		vector<float>		nans;
+		const deUint32		numElements		= 100;
+
+		nans.reserve(numElements);
+
+		for (size_t idx = 0; idx < numElements; ++idx)
+		{
+			if (idx % 2 == 0)
+			{
+				nans.push_back(std::numeric_limits<float>::quiet_NaN());
+			}
+			else
+			{
+				nans.push_back(-std::numeric_limits<float>::quiet_NaN());
+			}
+		}
+
+		spec.assembly = shader;
+		spec.inputs.push_back(BufferSp(new Float32Buffer(nans)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(nans)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+		spec.verifyIO = &compareNan;
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "propagated_nans", "Check that nans are propagated", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		vector<float>		small;
+		vector<float>		zeros;
+		const deUint32		numElements		= 100;
+
+		small.reserve(numElements);
+		zeros.reserve(numElements);
+
+		for (size_t idx = 0; idx < numElements; ++idx)
+		{
+			switch(idx % 6)
+			{
+				case 0:
+					small.push_back(0.f);
+					zeros.push_back(0.f);
+					break;
+				case 1:
+					small.push_back(-0.f);
+					zeros.push_back(-0.f);
+					break;
+				case 2:
+					small.push_back(std::ldexp(1.0f, -16));
+					zeros.push_back(0.f);
+					break;
+				case 3:
+					small.push_back(std::ldexp(-1.0f, -32));
+					zeros.push_back(-0.f);
+					break;
+				case 4:
+					small.push_back(std::ldexp(1.0f, -127));
+					zeros.push_back(0.f);
+					break;
+				case 5:
+					small.push_back(-std::ldexp(1.0f, -128));
+					zeros.push_back(-0.f);
+					break;
+			}
+		}
+
+		spec.assembly = shader;
+		spec.inputs.push_back(BufferSp(new Float32Buffer(small)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(zeros)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "flush_to_zero", "Check that values are zeroed correctly", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		vector<float>		exact;
+		const deUint32		numElements		= 200;
+
+		exact.reserve(numElements);
+
+		for (size_t idx = 0; idx < numElements; ++idx)
+			exact.push_back(static_cast<float>(static_cast<int>(idx) - 100));
+
+		spec.assembly = shader;
+		spec.inputs.push_back(BufferSp(new Float32Buffer(exact)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(exact)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "exact", "Check that values exactly preserved where appropriate", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		vector<float>		inputs;
+		const deUint32		numElements		= 4;
+
+		inputs.push_back(constructNormalizedFloat(8,	0x300300));
+		inputs.push_back(-constructNormalizedFloat(-7,	0x600800));
+		inputs.push_back(constructNormalizedFloat(2,	0x01E000));
+		inputs.push_back(constructNormalizedFloat(1,	0xFFE000));
+
+		spec.assembly = shader;
+		spec.verifyIO = &compareOpQuantizeF16ComputeExactCase;
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "rounded", "Check that are rounded when needed", spec));
+	}
+
+	return group.release();
+}
+
+// Performs a bitwise copy of source to the destination type Dest.
+template <typename Dest, typename Src>
+Dest bitwiseCast(Src source)
+{
+  Dest dest;
+  DE_STATIC_ASSERT(sizeof(source) == sizeof(dest));
+  deMemcpy(&dest, &source, sizeof(dest));
+  return dest;
+}
+
+tcu::TestCaseGroup* createSpecConstantOpQuantizeToF16Group (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opspecconstantop_opquantize", "Tests the OpQuantizeToF16 opcode for the OpSpecConstantOp instruction"));
+
+	const std::string shader (
+		string(s_ShaderPreamble) +
+
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		"OpDecorate %sc_0  SpecId 0\n"
+		"OpDecorate %sc_1  SpecId 1\n"
+		"OpDecorate %sc_2  SpecId 2\n"
+		"OpDecorate %sc_3  SpecId 3\n"
+		"OpDecorate %sc_4  SpecId 4\n"
+		"OpDecorate %sc_5  SpecId 5\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+		"%c_u32_6   = OpConstant %u32 6\n"
+
+		"%sc_0      = OpSpecConstant %f32 0.\n"
+		"%sc_1      = OpSpecConstant %f32 0.\n"
+		"%sc_2      = OpSpecConstant %f32 0.\n"
+		"%sc_3      = OpSpecConstant %f32 0.\n"
+		"%sc_4      = OpSpecConstant %f32 0.\n"
+		"%sc_5      = OpSpecConstant %f32 0.\n"
+
+		"%sc_0_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_0\n"
+		"%sc_1_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_1\n"
+		"%sc_2_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_2\n"
+		"%sc_3_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_3\n"
+		"%sc_4_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_4\n"
+		"%sc_5_quant = OpSpecConstantOp %f32 QuantizeToF16 %sc_5\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%selector  = OpUMod %u32 %x %c_u32_6\n"
+		"            OpSelectionMerge %exit None\n"
+		"            OpSwitch %selector %exit 0 %case0 1 %case1 2 %case2 3 %case3 4 %case4 5 %case5\n"
+
+		"%case0     = OpLabel\n"
+		"             OpStore %outloc %sc_0_quant\n"
+		"             OpBranch %exit\n"
+
+		"%case1     = OpLabel\n"
+		"             OpStore %outloc %sc_1_quant\n"
+		"             OpBranch %exit\n"
+
+		"%case2     = OpLabel\n"
+		"             OpStore %outloc %sc_2_quant\n"
+		"             OpBranch %exit\n"
+
+		"%case3     = OpLabel\n"
+		"             OpStore %outloc %sc_3_quant\n"
+		"             OpBranch %exit\n"
+
+		"%case4     = OpLabel\n"
+		"             OpStore %outloc %sc_4_quant\n"
+		"             OpBranch %exit\n"
+
+		"%case5     = OpLabel\n"
+		"             OpStore %outloc %sc_5_quant\n"
+		"             OpBranch %exit\n"
+
+		"%exit      = OpLabel\n"
+		"             OpReturn\n"
+
+		"             OpFunctionEnd\n");
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint8		numCases	= 4;
+		vector<float>		inputs		(numCases, 0.f);
+		vector<float>		outputs;
+
+		spec.assembly		= shader;
+		spec.numWorkGroups	= IVec3(numCases, 1, 1);
+
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::numeric_limits<float>::infinity()));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(-std::numeric_limits<float>::infinity()));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::ldexp(1.0f, 16)));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::ldexp(-1.0f, 32)));
+
+		outputs.push_back(std::numeric_limits<float>::infinity());
+		outputs.push_back(-std::numeric_limits<float>::infinity());
+		outputs.push_back(std::numeric_limits<float>::infinity());
+		outputs.push_back(-std::numeric_limits<float>::infinity());
+
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputs)));
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "infinities", "Check that infinities propagated and created", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint8		numCases	= 2;
+		vector<float>		inputs		(numCases, 0.f);
+		vector<float>		outputs;
+
+		spec.assembly		= shader;
+		spec.numWorkGroups	= IVec3(numCases, 1, 1);
+		spec.verifyIO		= &compareNan;
+
+		outputs.push_back(std::numeric_limits<float>::quiet_NaN());
+		outputs.push_back(-std::numeric_limits<float>::quiet_NaN());
+
+		for (deUint8 idx = 0; idx < numCases; ++idx)
+			spec.specConstants.push_back(bitwiseCast<deUint32>(outputs[idx]));
+
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputs)));
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "propagated_nans", "Check that nans are propagated", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint8		numCases	= 6;
+		vector<float>		inputs		(numCases, 0.f);
+		vector<float>		outputs;
+
+		spec.assembly		= shader;
+		spec.numWorkGroups	= IVec3(numCases, 1, 1);
+
+		spec.specConstants.push_back(bitwiseCast<deUint32>(0.f));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(-0.f));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::ldexp(1.0f, -16)));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::ldexp(-1.0f, -32)));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(std::ldexp(1.0f, -127)));
+		spec.specConstants.push_back(bitwiseCast<deUint32>(-std::ldexp(1.0f, -128)));
+
+		outputs.push_back(0.f);
+		outputs.push_back(-0.f);
+		outputs.push_back(0.f);
+		outputs.push_back(-0.f);
+		outputs.push_back(0.f);
+		outputs.push_back(-0.f);
+
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputs)));
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "flush_to_zero", "Check that values are zeroed correctly", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint8		numCases	= 6;
+		vector<float>		inputs		(numCases, 0.f);
+		vector<float>		outputs;
+
+		spec.assembly		= shader;
+		spec.numWorkGroups	= IVec3(numCases, 1, 1);
+
+		for (deUint8 idx = 0; idx < 6; ++idx)
+		{
+			const float f = static_cast<float>(idx * 10 - 30) / 4.f;
+			spec.specConstants.push_back(bitwiseCast<deUint32>(f));
+			outputs.push_back(f);
+		}
+
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputs)));
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "exact", "Check that values exactly preserved where appropriate", spec));
+	}
+
+	{
+		ComputeShaderSpec	spec;
+		const deUint8		numCases	= 4;
+		vector<float>		inputs		(numCases, 0.f);
+		vector<float>		outputs;
+
+		spec.assembly		= shader;
+		spec.numWorkGroups	= IVec3(numCases, 1, 1);
+		spec.verifyIO		= &compareOpQuantizeF16ComputeExactCase;
+
+		outputs.push_back(constructNormalizedFloat(8, 0x300300));
+		outputs.push_back(-constructNormalizedFloat(-7, 0x600800));
+		outputs.push_back(constructNormalizedFloat(2, 0x01E000));
+		outputs.push_back(constructNormalizedFloat(1, 0xFFE000));
+
+		for (deUint8 idx = 0; idx < numCases; ++idx)
+			spec.specConstants.push_back(bitwiseCast<deUint32>(outputs[idx]));
+
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputs)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputs)));
+
+		group->addChild(new SpvAsmComputeShaderCase(
+			testCtx, "rounded", "Check that are rounded when needed", spec));
+	}
+
+	return group.release();
+}
+
+// Checks that constant null/composite values can be used in computation.
+tcu::TestCaseGroup* createOpConstantUsageGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opconstantnullcomposite", "Spotcheck the OpConstantNull & OpConstantComposite instruction"));
+	ComputeShaderSpec				spec;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	spec.assembly =
+		"OpCapability Shader\n"
+		"%std450 = OpExtInstImport \"GLSL.std.450\"\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint GLCompute %main \"main\" %id\n"
+		"OpExecutionMode %main LocalSize 1 1 1\n"
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) +
+
+		"%fmat      = OpTypeMatrix %fvec3 3\n"
+		"%ten       = OpConstant %u32 10\n"
+		"%f32arr10  = OpTypeArray %f32 %ten\n"
+		"%fst       = OpTypeStruct %f32 %f32\n"
+
+		+ string(s_InputOutputBuffer) +
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		// Create a bunch of null values
+		"%unull     = OpConstantNull %u32\n"
+		"%fnull     = OpConstantNull %f32\n"
+		"%vnull     = OpConstantNull %fvec3\n"
+		"%mnull     = OpConstantNull %fmat\n"
+		"%anull     = OpConstantNull %f32arr10\n"
+		"%snull     = OpConstantComposite %fst %fnull %fnull\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+
+		// Get the abs() of (a certain element of) those null values
+		"%unull_cov = OpConvertUToF %f32 %unull\n"
+		"%unull_abs = OpExtInst %f32 %std450 FAbs %unull_cov\n"
+		"%fnull_abs = OpExtInst %f32 %std450 FAbs %fnull\n"
+		"%vnull_0   = OpCompositeExtract %f32 %vnull 0\n"
+		"%vnull_abs = OpExtInst %f32 %std450 FAbs %vnull_0\n"
+		"%mnull_12  = OpCompositeExtract %f32 %mnull 1 2\n"
+		"%mnull_abs = OpExtInst %f32 %std450 FAbs %mnull_12\n"
+		"%anull_3   = OpCompositeExtract %f32 %anull 3\n"
+		"%anull_abs = OpExtInst %f32 %std450 FAbs %anull_3\n"
+		"%snull_1   = OpCompositeExtract %f32 %snull 1\n"
+		"%snull_abs = OpExtInst %f32 %std450 FAbs %snull_1\n"
+
+		// Add them all
+		"%add1      = OpFAdd %f32 %neg  %unull_abs\n"
+		"%add2      = OpFAdd %f32 %add1 %fnull_abs\n"
+		"%add3      = OpFAdd %f32 %add2 %vnull_abs\n"
+		"%add4      = OpFAdd %f32 %add3 %mnull_abs\n"
+		"%add5      = OpFAdd %f32 %add4 %anull_abs\n"
+		"%final     = OpFAdd %f32 %add5 %snull_abs\n"
+
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %final\n" // write to output
+		"             OpReturn\n"
+		"             OpFunctionEnd\n";
+	spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+	spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+	spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+	group->addChild(new SpvAsmComputeShaderCase(testCtx, "spotcheck", "Check that values constructed via OpConstantNull & OpConstantComposite can be used", spec));
+
+	return group.release();
+}
+
+// Assembly code used for testing loop control is based on GLSL source code:
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   output_data.elements[x] = input_data.elements[x];
+//   for (uint i = 0; i < 4; ++i)
+//     output_data.elements[x] += 1.f;
+// }
+tcu::TestCaseGroup* createLoopControlGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "loop_control", "Tests loop control cases"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%u32ptr      = OpTypePointer Function %u32\n"
+
+		"%id          = OpVariable %uvec3ptr Input\n"
+		"%zero        = OpConstant %i32 0\n"
+		"%uzero       = OpConstant %u32 0\n"
+		"%one         = OpConstant %i32 1\n"
+		"%constf1     = OpConstant %f32 1.0\n"
+		"%four        = OpConstant %u32 4\n"
+
+		"%main        = OpFunction %void None %voidf\n"
+		"%entry       = OpLabel\n"
+		"%i           = OpVariable %u32ptr Function\n"
+		"               OpStore %i %uzero\n"
+
+		"%idval       = OpLoad %uvec3 %id\n"
+		"%x           = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc       = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval       = OpLoad %f32 %inloc\n"
+		"%outloc      = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"               OpStore %outloc %inval\n"
+		"               OpBranch %loop_entry\n"
+
+		"%loop_entry  = OpLabel\n"
+		"%i_val       = OpLoad %u32 %i\n"
+		"%cmp_lt      = OpULessThan %bool %i_val %four\n"
+		"               OpLoopMerge %loop_merge %loop_entry ${CONTROL}\n"
+		"               OpBranchConditional %cmp_lt %loop_body %loop_merge\n"
+		"%loop_body   = OpLabel\n"
+		"%outval      = OpLoad %f32 %outloc\n"
+		"%addf1       = OpFAdd %f32 %outval %constf1\n"
+		"               OpStore %outloc %addf1\n"
+		"%new_i       = OpIAdd %u32 %i_val %one\n"
+		"               OpStore %i %new_i\n"
+		"               OpBranch %loop_entry\n"
+		"%loop_merge  = OpLabel\n"
+		"               OpReturn\n"
+		"               OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("none",				"None"));
+	cases.push_back(CaseParameter("unroll",				"Unroll"));
+	cases.push_back(CaseParameter("dont_unroll",		"DontUnroll"));
+	cases.push_back(CaseParameter("unroll_dont_unroll",	"Unroll|DontUnroll"));
+
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats[ndx] + 4.f;
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["CONTROL"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Assembly code used for testing selection control is based on GLSL source code:
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   float val = input_data.elements[x];
+//   if (val > 10.f)
+//     output_data.elements[x] = val + 1.f;
+//   else
+//     output_data.elements[x] = val - 1.f;
+// }
+tcu::TestCaseGroup* createSelectionControlGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "selection_control", "Tests selection control cases"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%id       = OpVariable %uvec3ptr Input\n"
+		"%zero     = OpConstant %i32 0\n"
+		"%constf1  = OpConstant %f32 1.0\n"
+		"%constf10 = OpConstant %f32 10.0\n"
+
+		"%main     = OpFunction %void None %voidf\n"
+		"%entry    = OpLabel\n"
+		"%idval    = OpLoad %uvec3 %id\n"
+		"%x        = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc    = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval    = OpLoad %f32 %inloc\n"
+		"%outloc   = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"%cmp_gt   = OpFOrdGreaterThan %bool %inval %constf10\n"
+
+		"            OpSelectionMerge %if_end ${CONTROL}\n"
+		"            OpBranchConditional %cmp_gt %if_true %if_false\n"
+		"%if_true  = OpLabel\n"
+		"%addf1    = OpFAdd %f32 %inval %constf1\n"
+		"            OpStore %outloc %addf1\n"
+		"            OpBranch %if_end\n"
+		"%if_false = OpLabel\n"
+		"%subf1    = OpFSub %f32 %inval %constf1\n"
+		"            OpStore %outloc %subf1\n"
+		"            OpBranch %if_end\n"
+		"%if_end   = OpLabel\n"
+		"            OpReturn\n"
+		"            OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("none",					"None"));
+	cases.push_back(CaseParameter("flatten",				"Flatten"));
+	cases.push_back(CaseParameter("dont_flatten",			"DontFlatten"));
+	cases.push_back(CaseParameter("flatten_dont_flatten",	"DontFlatten|Flatten"));
+
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats[ndx] + (inputFloats[ndx] > 10.f ? 1.f : -1.f);
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["CONTROL"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Assembly code used for testing function control is based on GLSL source code:
+//
+// #version 430
+//
+// layout(std140, set = 0, binding = 0) readonly buffer Input {
+//   float elements[];
+// } input_data;
+// layout(std140, set = 0, binding = 1) writeonly buffer Output {
+//   float elements[];
+// } output_data;
+//
+// float const10() { return 10.f; }
+//
+// void main() {
+//   uint x = gl_GlobalInvocationID.x;
+//   output_data.elements[x] = input_data.elements[x] + const10();
+// }
+tcu::TestCaseGroup* createFunctionControlGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "function_control", "Tests function control cases"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main \"main\"\n"
+		"OpName %func_const10 \"const10(\"\n"
+		"OpName %id \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%f32f = OpTypeFunction %f32\n"
+		"%id = OpVariable %uvec3ptr Input\n"
+		"%zero = OpConstant %i32 0\n"
+		"%constf10 = OpConstant %f32 10.0\n"
+
+		"%main         = OpFunction %void None %voidf\n"
+		"%entry        = OpLabel\n"
+		"%idval        = OpLoad %uvec3 %id\n"
+		"%x            = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc        = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval        = OpLoad %f32 %inloc\n"
+		"%ret_10       = OpFunctionCall %f32 %func_const10\n"
+		"%fadd         = OpFAdd %f32 %inval %ret_10\n"
+		"%outloc       = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"                OpStore %outloc %fadd\n"
+		"                OpReturn\n"
+		"                OpFunctionEnd\n"
+
+		"%func_const10 = OpFunction %f32 ${CONTROL} %f32f\n"
+		"%label        = OpLabel\n"
+		"                OpReturnValue %constf10\n"
+		"                OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("none",						"None"));
+	cases.push_back(CaseParameter("inline",						"Inline"));
+	cases.push_back(CaseParameter("dont_inline",				"DontInline"));
+	cases.push_back(CaseParameter("pure",						"Pure"));
+	cases.push_back(CaseParameter("const",						"Const"));
+	cases.push_back(CaseParameter("inline_pure",				"Inline|Pure"));
+	cases.push_back(CaseParameter("const_dont_inline",			"Const|DontInline"));
+	cases.push_back(CaseParameter("inline_dont_inline",			"Inline|DontInline"));
+	cases.push_back(CaseParameter("pure_inline_dont_inline",	"Pure|Inline|DontInline"));
+
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats[ndx] + 10.f;
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["CONTROL"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createMemoryAccessGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "memory_access", "Tests memory access cases"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					inputFloats		(numElements, 0);
+	vector<float>					outputFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"%f32ptr_f  = OpTypePointer Function %f32\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+		"%four      = OpConstant %i32 4\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+		"%copy      = OpVariable %f32ptr_f Function\n"
+		"%idval     = OpLoad %uvec3 %id ${ACCESS}\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+		"%inloc     = OpAccessChain %f32ptr %indata  %zero %x\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpCopyMemory %copy %inloc ${ACCESS}\n"
+		"%val1      = OpLoad %f32 %copy\n"
+		"%val2      = OpLoad %f32 %inloc\n"
+		"%add       = OpFAdd %f32 %val1 %val2\n"
+		"             OpStore %outloc %add ${ACCESS}\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("null",					""));
+	cases.push_back(CaseParameter("none",					"None"));
+	cases.push_back(CaseParameter("volatile",				"Volatile"));
+	cases.push_back(CaseParameter("aligned",				"Aligned 4"));
+	cases.push_back(CaseParameter("nontemporal",			"Nontemporal"));
+	cases.push_back(CaseParameter("aligned_nontemporal",	"Aligned|Nontemporal 4"));
+	cases.push_back(CaseParameter("aligned_volatile",		"Volatile|Aligned 4"));
+
+	fillRandomScalars(rnd, -100.f, 100.f, &inputFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		outputFloats[ndx] = inputFloats[ndx] + inputFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["ACCESS"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(inputFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(outputFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+	return group.release();
+}
+
+// Checks that we can get undefined values for various types, without exercising a computation with it.
+tcu::TestCaseGroup* createOpUndefGroup (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "opundef", "Tests the OpUndef instruction"));
+	vector<CaseParameter>			cases;
+	de::Random						rnd				(deStringHash(group->getName()));
+	const int						numElements		= 100;
+	vector<float>					positiveFloats	(numElements, 0);
+	vector<float>					negativeFloats	(numElements, 0);
+	const StringTemplate			shaderTemplate	(
+		string(s_ShaderPreamble) +
+
+		"OpSource GLSL 430\n"
+		"OpName %main           \"main\"\n"
+		"OpName %id             \"gl_GlobalInvocationID\"\n"
+
+		"OpDecorate %id BuiltIn GlobalInvocationId\n"
+
+		+ string(s_InputOutputBufferTraits) + string(s_CommonTypes) + string(s_InputOutputBuffer) +
+
+		"${TYPE}\n"
+
+		"%id        = OpVariable %uvec3ptr Input\n"
+		"%zero      = OpConstant %i32 0\n"
+
+		"%main      = OpFunction %void None %voidf\n"
+		"%label     = OpLabel\n"
+
+		"%undef     = OpUndef %type\n"
+
+		"%idval     = OpLoad %uvec3 %id\n"
+		"%x         = OpCompositeExtract %u32 %idval 0\n"
+
+		"%inloc     = OpAccessChain %f32ptr %indata %zero %x\n"
+		"%inval     = OpLoad %f32 %inloc\n"
+		"%neg       = OpFNegate %f32 %inval\n"
+		"%outloc    = OpAccessChain %f32ptr %outdata %zero %x\n"
+		"             OpStore %outloc %neg\n"
+		"             OpReturn\n"
+		"             OpFunctionEnd\n");
+
+	cases.push_back(CaseParameter("bool",			"%type = OpTypeBool"));
+	cases.push_back(CaseParameter("sint32",			"%type = OpTypeInt 32 1"));
+	cases.push_back(CaseParameter("uint32",			"%type = OpTypeInt 32 0"));
+	cases.push_back(CaseParameter("float32",		"%type = OpTypeFloat 32"));
+	cases.push_back(CaseParameter("vec4float32",	"%type = OpTypeVector %f32 4"));
+	cases.push_back(CaseParameter("vec2uint32",		"%type = OpTypeVector %u32 2"));
+	cases.push_back(CaseParameter("matrix",			"%type = OpTypeMatrix %fvec3 3"));
+	cases.push_back(CaseParameter("image",			"%type = OpTypeImage %f32 2D 0 0 0 1 Unknown"));
+	cases.push_back(CaseParameter("sampler",		"%type = OpTypeSampler"));
+	cases.push_back(CaseParameter("sampledimage",	"%img = OpTypeImage %f32 2D 0 0 0 1 Unknown\n"
+													"%type = OpTypeSampledImage %img"));
+	cases.push_back(CaseParameter("array",			"%100 = OpConstant %u32 100\n"
+													"%type = OpTypeArray %i32 %100"));
+	cases.push_back(CaseParameter("runtimearray",	"%type = OpTypeRuntimeArray %f32"));
+	cases.push_back(CaseParameter("struct",			"%type = OpTypeStruct %f32 %i32 %u32"));
+	cases.push_back(CaseParameter("pointer",		"%type = OpTypePointer Function %i32"));
+
+	fillRandomScalars(rnd, 1.f, 100.f, &positiveFloats[0], numElements);
+
+	for (size_t ndx = 0; ndx < numElements; ++ndx)
+		negativeFloats[ndx] = -positiveFloats[ndx];
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>		specializations;
+		ComputeShaderSpec		spec;
+
+		specializations["TYPE"] = cases[caseNdx].param;
+		spec.assembly = shaderTemplate.specialize(specializations);
+		spec.inputs.push_back(BufferSp(new Float32Buffer(positiveFloats)));
+		spec.outputs.push_back(BufferSp(new Float32Buffer(negativeFloats)));
+		spec.numWorkGroups = IVec3(numElements, 1, 1);
+
+		group->addChild(new SpvAsmComputeShaderCase(testCtx, cases[caseNdx].name, cases[caseNdx].name, spec));
+	}
+
+		return group.release();
+}
+typedef std::pair<std::string, VkShaderStageFlagBits>	EntryToStage;
+typedef map<string, vector<EntryToStage> >				ModuleMap;
+typedef map<VkShaderStageFlagBits, vector<deInt32> >	StageToSpecConstantMap;
+
+// Context for a specific test instantiation. For example, an instantiation
+// may test colors yellow/magenta/cyan/mauve in a tesselation shader
+// with an entry point named 'main_to_the_main'
+struct InstanceContext
+{
+	// Map of modules to what entry_points we care to use from those modules.
+	ModuleMap				moduleMap;
+	RGBA					inputColors[4];
+	RGBA					outputColors[4];
+	// Concrete SPIR-V code to test via boilerplate specialization.
+	map<string, string>		testCodeFragments;
+	StageToSpecConstantMap	specConstants;
+	bool					hasTessellation;
+	VkShaderStageFlagBits	requiredStages;
+
+	InstanceContext (const RGBA (&inputs)[4], const RGBA (&outputs)[4], const map<string, string>& testCodeFragments_, const StageToSpecConstantMap& specConstants_)
+		: testCodeFragments		(testCodeFragments_)
+		, specConstants			(specConstants_)
+		, hasTessellation		(false)
+		, requiredStages		(static_cast<VkShaderStageFlagBits>(0))
+	{
+		inputColors[0]		= inputs[0];
+		inputColors[1]		= inputs[1];
+		inputColors[2]		= inputs[2];
+		inputColors[3]		= inputs[3];
+
+		outputColors[0]		= outputs[0];
+		outputColors[1]		= outputs[1];
+		outputColors[2]		= outputs[2];
+		outputColors[3]		= outputs[3];
+	}
+
+	InstanceContext (const InstanceContext& other)
+		: moduleMap			(other.moduleMap)
+		, testCodeFragments	(other.testCodeFragments)
+		, specConstants		(other.specConstants)
+		, hasTessellation	(other.hasTessellation)
+		, requiredStages    (other.requiredStages)
+	{
+		inputColors[0]		= other.inputColors[0];
+		inputColors[1]		= other.inputColors[1];
+		inputColors[2]		= other.inputColors[2];
+		inputColors[3]		= other.inputColors[3];
+
+		outputColors[0]		= other.outputColors[0];
+		outputColors[1]		= other.outputColors[1];
+		outputColors[2]		= other.outputColors[2];
+		outputColors[3]		= other.outputColors[3];
+	}
+};
+
+// A description of a shader to be used for a single stage of the graphics pipeline.
+struct ShaderElement
+{
+	// The module that contains this shader entrypoint.
+	string					moduleName;
+
+	// The name of the entrypoint.
+	string					entryName;
+
+	// Which shader stage this entry point represents.
+	VkShaderStageFlagBits	stage;
+
+	ShaderElement (const string& moduleName_, const string& entryPoint_, VkShaderStageFlagBits shaderStage_)
+		: moduleName(moduleName_)
+		, entryName(entryPoint_)
+		, stage(shaderStage_)
+	{
+	}
+};
+
+void getDefaultColors (RGBA (&colors)[4])
+{
+	colors[0] = RGBA::white();
+	colors[1] = RGBA::red();
+	colors[2] = RGBA::green();
+	colors[3] = RGBA::blue();
+}
+
+void getHalfColorsFullAlpha (RGBA (&colors)[4])
+{
+	colors[0] = RGBA(127, 127, 127, 255);
+	colors[1] = RGBA(127, 0,   0,	255);
+	colors[2] = RGBA(0,	  127, 0,	255);
+	colors[3] = RGBA(0,	  0,   127, 255);
+}
+
+void getInvertedDefaultColors (RGBA (&colors)[4])
+{
+	colors[0] = RGBA(0,		0,		0,		255);
+	colors[1] = RGBA(0,		255,	255,	255);
+	colors[2] = RGBA(255,	0,		255,	255);
+	colors[3] = RGBA(255,	255,	0,		255);
+}
+
+// Turns a statically sized array of ShaderElements into an instance-context
+// by setting up the mapping of modules to their contained shaders and stages.
+// The inputs and expected outputs are given by inputColors and outputColors
+template<size_t N>
+InstanceContext createInstanceContext (const ShaderElement (&elements)[N], const RGBA (&inputColors)[4], const RGBA (&outputColors)[4], const map<string, string>& testCodeFragments, const StageToSpecConstantMap& specConstants)
+{
+	InstanceContext ctx (inputColors, outputColors, testCodeFragments, specConstants);
+	for (size_t i = 0; i < N; ++i)
+	{
+		ctx.moduleMap[elements[i].moduleName].push_back(std::make_pair(elements[i].entryName, elements[i].stage));
+		ctx.requiredStages = static_cast<VkShaderStageFlagBits>(ctx.requiredStages | elements[i].stage);
+	}
+	return ctx;
+}
+
+template<size_t N>
+inline InstanceContext createInstanceContext (const ShaderElement (&elements)[N], RGBA (&inputColors)[4], const RGBA (&outputColors)[4], const map<string, string>& testCodeFragments)
+{
+	return createInstanceContext(elements, inputColors, outputColors, testCodeFragments, StageToSpecConstantMap());
+}
+
+// The same as createInstanceContext above, but with default colors.
+template<size_t N>
+InstanceContext createInstanceContext (const ShaderElement (&elements)[N], const map<string, string>& testCodeFragments)
+{
+	RGBA defaultColors[4];
+	getDefaultColors(defaultColors);
+	return createInstanceContext(elements, defaultColors, defaultColors, testCodeFragments);
+}
+
+// For the current InstanceContext, constructs the required modules and shader stage create infos.
+void createPipelineShaderStages (const DeviceInterface& vk, const VkDevice vkDevice, InstanceContext& instance, Context& context, vector<ModuleHandleSp>& modules, vector<VkPipelineShaderStageCreateInfo>& createInfos)
+{
+	for (ModuleMap::const_iterator moduleNdx = instance.moduleMap.begin(); moduleNdx != instance.moduleMap.end(); ++moduleNdx)
+	{
+		const ModuleHandleSp mod(new Unique<VkShaderModule>(createShaderModule(vk, vkDevice, context.getBinaryCollection().get(moduleNdx->first), 0)));
+		modules.push_back(ModuleHandleSp(mod));
+		for (vector<EntryToStage>::const_iterator shaderNdx = moduleNdx->second.begin(); shaderNdx != moduleNdx->second.end(); ++shaderNdx)
+		{
+			const EntryToStage&						stage			= *shaderNdx;
+			const VkPipelineShaderStageCreateInfo	shaderParam		=
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	//	VkStructureType			sType;
+				DE_NULL,												//	const void*				pNext;
+				(VkPipelineShaderStageCreateFlags)0,
+				stage.second,											//	VkShaderStageFlagBits	stage;
+				**modules.back(),										//	VkShaderModule			module;
+				stage.first.c_str(),									//	const char*				pName;
+				(const VkSpecializationInfo*)DE_NULL,
+			};
+			createInfos.push_back(shaderParam);
+		}
+	}
+}
+
+#define SPIRV_ASSEMBLY_TYPES																	\
+	"%void = OpTypeVoid\n"																		\
+	"%bool = OpTypeBool\n"																		\
+																								\
+	"%i32 = OpTypeInt 32 1\n"																	\
+	"%u32 = OpTypeInt 32 0\n"																	\
+																								\
+	"%f32 = OpTypeFloat 32\n"																	\
+	"%v3f32 = OpTypeVector %f32 3\n"															\
+	"%v4f32 = OpTypeVector %f32 4\n"															\
+	"%v4bool = OpTypeVector %bool 4\n"															\
+																								\
+	"%v4f32_function = OpTypeFunction %v4f32 %v4f32\n"											\
+	"%fun = OpTypeFunction %void\n"																\
+																								\
+	"%ip_f32 = OpTypePointer Input %f32\n"														\
+	"%ip_i32 = OpTypePointer Input %i32\n"														\
+	"%ip_v3f32 = OpTypePointer Input %v3f32\n"													\
+	"%ip_v4f32 = OpTypePointer Input %v4f32\n"													\
+																								\
+	"%op_f32 = OpTypePointer Output %f32\n"														\
+	"%op_v4f32 = OpTypePointer Output %v4f32\n"													\
+																								\
+	"%fp_f32   = OpTypePointer Function %f32\n"													\
+	"%fp_i32   = OpTypePointer Function %i32\n"													\
+	"%fp_v4f32 = OpTypePointer Function %v4f32\n"
+
+#define SPIRV_ASSEMBLY_CONSTANTS																\
+	"%c_f32_1 = OpConstant %f32 1.0\n"															\
+	"%c_f32_0 = OpConstant %f32 0.0\n"															\
+	"%c_f32_0_5 = OpConstant %f32 0.5\n"														\
+	"%c_f32_n1  = OpConstant %f32 -1.\n"														\
+	"%c_f32_7 = OpConstant %f32 7.0\n"															\
+	"%c_f32_8 = OpConstant %f32 8.0\n"															\
+	"%c_i32_0 = OpConstant %i32 0\n"															\
+	"%c_i32_1 = OpConstant %i32 1\n"															\
+	"%c_i32_2 = OpConstant %i32 2\n"															\
+	"%c_i32_3 = OpConstant %i32 3\n"															\
+	"%c_i32_4 = OpConstant %i32 4\n"															\
+	"%c_u32_0 = OpConstant %u32 0\n"															\
+	"%c_u32_1 = OpConstant %u32 1\n"															\
+	"%c_u32_2 = OpConstant %u32 2\n"															\
+	"%c_u32_3 = OpConstant %u32 3\n"															\
+	"%c_u32_32 = OpConstant %u32 32\n"															\
+	"%c_u32_4 = OpConstant %u32 4\n"															\
+	"%c_u32_31_bits = OpConstant %u32 0x7FFFFFFF\n"												\
+	"%c_v4f32_1_1_1_1 = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_1\n"		\
+	"%c_v4f32_1_0_0_1 = OpConstantComposite %v4f32 %c_f32_1 %c_f32_0 %c_f32_0 %c_f32_1\n"		\
+	"%c_v4f32_0_5_0_5_0_5_0_5 = OpConstantComposite %v4f32 %c_f32_0_5 %c_f32_0_5 %c_f32_0_5 %c_f32_0_5\n"
+
+#define SPIRV_ASSEMBLY_ARRAYS																	\
+	"%a1f32 = OpTypeArray %f32 %c_u32_1\n"														\
+	"%a2f32 = OpTypeArray %f32 %c_u32_2\n"														\
+	"%a3v4f32 = OpTypeArray %v4f32 %c_u32_3\n"													\
+	"%a4f32 = OpTypeArray %f32 %c_u32_4\n"														\
+	"%a32v4f32 = OpTypeArray %v4f32 %c_u32_32\n"												\
+	"%ip_a3v4f32 = OpTypePointer Input %a3v4f32\n"												\
+	"%ip_a32v4f32 = OpTypePointer Input %a32v4f32\n"											\
+	"%op_a2f32 = OpTypePointer Output %a2f32\n"													\
+	"%op_a3v4f32 = OpTypePointer Output %a3v4f32\n"												\
+	"%op_a4f32 = OpTypePointer Output %a4f32\n"
+
+// Creates vertex-shader assembly by specializing a boilerplate StringTemplate
+// on fragments, which must (at least) map "testfun" to an OpFunction definition
+// for %test_code that takes and returns a %v4f32.  Boilerplate IDs are prefixed
+// with "BP_" to avoid collisions with fragments.
+//
+// It corresponds roughly to this GLSL:
+//;
+// layout(location = 0) in vec4 position;
+// layout(location = 1) in vec4 color;
+// layout(location = 1) out highp vec4 vtxColor;
+// void main (void) { gl_Position = position; vtxColor = test_func(color); }
+string makeVertexShaderAssembly(const map<string, string>& fragments)
+{
+// \todo [2015-11-23 awoloszyn] Remove OpName once these have stabalized
+	static const char vertexShaderBoilerplate[] =
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Vertex %main \"main\" %BP_stream %BP_position %BP_vtx_color %BP_color %BP_gl_VertexIndex %BP_gl_InstanceIndex\n"
+		"${debug:opt}\n"
+		"OpName %main \"main\"\n"
+		"OpName %BP_gl_PerVertex \"gl_PerVertex\"\n"
+		"OpMemberName %BP_gl_PerVertex 0 \"gl_Position\"\n"
+		"OpMemberName %BP_gl_PerVertex 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_gl_PerVertex 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_gl_PerVertex 3 \"gl_CullDistance\"\n"
+		"OpName %test_code \"testfun(vf4;\"\n"
+		"OpName %BP_stream \"\"\n"
+		"OpName %BP_position \"position\"\n"
+		"OpName %BP_vtx_color \"vtxColor\"\n"
+		"OpName %BP_color \"color\"\n"
+		"OpName %BP_gl_VertexIndex \"gl_VertexIndex\"\n"
+		"OpName %BP_gl_InstanceIndex \"gl_InstanceIndex\"\n"
+		"OpMemberDecorate %BP_gl_PerVertex 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_gl_PerVertex 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_gl_PerVertex 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_gl_PerVertex 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_gl_PerVertex Block\n"
+		"OpDecorate %BP_position Location 0\n"
+		"OpDecorate %BP_vtx_color Location 1\n"
+		"OpDecorate %BP_color Location 1\n"
+		"OpDecorate %BP_gl_VertexIndex BuiltIn VertexIndex\n"
+		"OpDecorate %BP_gl_InstanceIndex BuiltIn InstanceIndex\n"
+		"${decoration:opt}\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%BP_gl_PerVertex = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_op_gl_PerVertex = OpTypePointer Output %BP_gl_PerVertex\n"
+		"%BP_stream = OpVariable %BP_op_gl_PerVertex Output\n"
+		"%BP_position = OpVariable %ip_v4f32 Input\n"
+		"%BP_vtx_color = OpVariable %op_v4f32 Output\n"
+		"%BP_color = OpVariable %ip_v4f32 Input\n"
+		"%BP_gl_VertexIndex = OpVariable %ip_i32 Input\n"
+		"%BP_gl_InstanceIndex = OpVariable %ip_i32 Input\n"
+		"${pre_main:opt}\n"
+		"%main = OpFunction %void None %fun\n"
+		"%BP_label = OpLabel\n"
+		"%BP_pos = OpLoad %v4f32 %BP_position\n"
+		"%BP_gl_pos = OpAccessChain %op_v4f32 %BP_stream %c_i32_0\n"
+		"OpStore %BP_gl_pos %BP_pos\n"
+		"%BP_col = OpLoad %v4f32 %BP_color\n"
+		"%BP_col_transformed = OpFunctionCall %v4f32 %test_code %BP_col\n"
+		"OpStore %BP_vtx_color %BP_col_transformed\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+		"${testfun}\n";
+	return tcu::StringTemplate(vertexShaderBoilerplate).specialize(fragments);
+}
+
+// Creates tess-control-shader assembly by specializing a boilerplate
+// StringTemplate on fragments, which must (at least) map "testfun" to an
+// OpFunction definition for %test_code that takes and returns a %v4f32.
+// Boilerplate IDs are prefixed with "BP_" to avoid collisions with fragments.
+//
+// It roughly corresponds to the following GLSL.
+//
+// #version 450
+// layout(vertices = 3) out;
+// layout(location = 1) in vec4 in_color[];
+// layout(location = 1) out vec4 out_color[];
+//
+// void main() {
+//   out_color[gl_InvocationID] = testfun(in_color[gl_InvocationID]);
+//   gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;
+//   if (gl_InvocationID == 0) {
+//     gl_TessLevelOuter[0] = 1.0;
+//     gl_TessLevelOuter[1] = 1.0;
+//     gl_TessLevelOuter[2] = 1.0;
+//     gl_TessLevelInner[0] = 1.0;
+//   }
+// }
+string makeTessControlShaderAssembly (const map<string, string>& fragments)
+{
+	static const char tessControlShaderBoilerplate[] =
+		"OpCapability Tessellation\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint TessellationControl %BP_main \"main\" %BP_out_color %BP_gl_InvocationID %BP_in_color %BP_gl_out %BP_gl_in %BP_gl_TessLevelOuter %BP_gl_TessLevelInner\n"
+		"OpExecutionMode %BP_main OutputVertices 3\n"
+		"${debug:opt}\n"
+		"OpName %BP_main \"main\"\n"
+		"OpName %test_code \"testfun(vf4;\"\n"
+		"OpName %BP_out_color \"out_color\"\n"
+		"OpName %BP_gl_InvocationID \"gl_InvocationID\"\n"
+		"OpName %BP_in_color \"in_color\"\n"
+		"OpName %BP_gl_PerVertex \"gl_PerVertex\"\n"
+		"OpMemberName %BP_gl_PerVertex 0 \"gl_Position\"\n"
+		"OpMemberName %BP_gl_PerVertex 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_gl_PerVertex 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_gl_PerVertex 3 \"gl_CullDistance\"\n"
+		"OpName %BP_gl_out \"gl_out\"\n"
+		"OpName %BP_gl_PVOut \"gl_PerVertex\"\n"
+		"OpMemberName %BP_gl_PVOut 0 \"gl_Position\"\n"
+		"OpMemberName %BP_gl_PVOut 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_gl_PVOut 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_gl_PVOut 3 \"gl_CullDistance\"\n"
+		"OpName %BP_gl_in \"gl_in\"\n"
+		"OpName %BP_gl_TessLevelOuter \"gl_TessLevelOuter\"\n"
+		"OpName %BP_gl_TessLevelInner \"gl_TessLevelInner\"\n"
+		"OpDecorate %BP_out_color Location 1\n"
+		"OpDecorate %BP_gl_InvocationID BuiltIn InvocationId\n"
+		"OpDecorate %BP_in_color Location 1\n"
+		"OpMemberDecorate %BP_gl_PerVertex 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_gl_PerVertex 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_gl_PerVertex 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_gl_PerVertex 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_gl_PerVertex Block\n"
+		"OpMemberDecorate %BP_gl_PVOut 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_gl_PVOut 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_gl_PVOut 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_gl_PVOut 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_gl_PVOut Block\n"
+		"OpDecorate %BP_gl_TessLevelOuter Patch\n"
+		"OpDecorate %BP_gl_TessLevelOuter BuiltIn TessLevelOuter\n"
+		"OpDecorate %BP_gl_TessLevelInner Patch\n"
+		"OpDecorate %BP_gl_TessLevelInner BuiltIn TessLevelInner\n"
+		"${decoration:opt}\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%BP_out_color = OpVariable %op_a3v4f32 Output\n"
+		"%BP_gl_InvocationID = OpVariable %ip_i32 Input\n"
+		"%BP_in_color = OpVariable %ip_a32v4f32 Input\n"
+		"%BP_gl_PerVertex = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_a3_gl_PerVertex = OpTypeArray %BP_gl_PerVertex %c_u32_3\n"
+		"%BP_op_a3_gl_PerVertex = OpTypePointer Output %BP_a3_gl_PerVertex\n"
+		"%BP_gl_out = OpVariable %BP_op_a3_gl_PerVertex Output\n"
+		"%BP_gl_PVOut = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_a32_gl_PVOut = OpTypeArray %BP_gl_PVOut %c_u32_32\n"
+		"%BP_ip_a32_gl_PVOut = OpTypePointer Input %BP_a32_gl_PVOut\n"
+		"%BP_gl_in = OpVariable %BP_ip_a32_gl_PVOut Input\n"
+		"%BP_gl_TessLevelOuter = OpVariable %op_a4f32 Output\n"
+		"%BP_gl_TessLevelInner = OpVariable %op_a2f32 Output\n"
+		"${pre_main:opt}\n"
+
+		"%BP_main = OpFunction %void None %fun\n"
+		"%BP_label = OpLabel\n"
+
+		"%BP_gl_Invoc = OpLoad %i32 %BP_gl_InvocationID\n"
+
+		"%BP_in_col_loc = OpAccessChain %ip_v4f32 %BP_in_color %BP_gl_Invoc\n"
+		"%BP_out_col_loc = OpAccessChain %op_v4f32 %BP_out_color %BP_gl_Invoc\n"
+		"%BP_in_col_val = OpLoad %v4f32 %BP_in_col_loc\n"
+		"%BP_clr_transformed = OpFunctionCall %v4f32 %test_code %BP_in_col_val\n"
+		"OpStore %BP_out_col_loc %BP_clr_transformed\n"
+
+		"%BP_in_pos_loc = OpAccessChain %ip_v4f32 %BP_gl_in %BP_gl_Invoc %c_i32_0\n"
+		"%BP_out_pos_loc = OpAccessChain %op_v4f32 %BP_gl_out %BP_gl_Invoc %c_i32_0\n"
+		"%BP_in_pos_val = OpLoad %v4f32 %BP_in_pos_loc\n"
+		"OpStore %BP_out_pos_loc %BP_in_pos_val\n"
+
+		"%BP_cmp = OpIEqual %bool %BP_gl_Invoc %c_i32_0\n"
+		"OpSelectionMerge %BP_merge_label None\n"
+		"OpBranchConditional %BP_cmp %BP_if_label %BP_merge_label\n"
+		"%BP_if_label = OpLabel\n"
+		"%BP_gl_TessLevelOuterPos_0 = OpAccessChain %op_f32 %BP_gl_TessLevelOuter %c_i32_0\n"
+		"%BP_gl_TessLevelOuterPos_1 = OpAccessChain %op_f32 %BP_gl_TessLevelOuter %c_i32_1\n"
+		"%BP_gl_TessLevelOuterPos_2 = OpAccessChain %op_f32 %BP_gl_TessLevelOuter %c_i32_2\n"
+		"%BP_gl_TessLevelInnerPos_0 = OpAccessChain %op_f32 %BP_gl_TessLevelInner %c_i32_0\n"
+		"OpStore %BP_gl_TessLevelOuterPos_0 %c_f32_1\n"
+		"OpStore %BP_gl_TessLevelOuterPos_1 %c_f32_1\n"
+		"OpStore %BP_gl_TessLevelOuterPos_2 %c_f32_1\n"
+		"OpStore %BP_gl_TessLevelInnerPos_0 %c_f32_1\n"
+		"OpBranch %BP_merge_label\n"
+		"%BP_merge_label = OpLabel\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+		"${testfun}\n";
+	return tcu::StringTemplate(tessControlShaderBoilerplate).specialize(fragments);
+}
+
+// Creates tess-evaluation-shader assembly by specializing a boilerplate
+// StringTemplate on fragments, which must (at least) map "testfun" to an
+// OpFunction definition for %test_code that takes and returns a %v4f32.
+// Boilerplate IDs are prefixed with "BP_" to avoid collisions with fragments.
+//
+// It roughly corresponds to the following glsl.
+//
+// #version 450
+//
+// layout(triangles, equal_spacing, ccw) in;
+// layout(location = 1) in vec4 in_color[];
+// layout(location = 1) out vec4 out_color;
+//
+// #define interpolate(val)
+//   vec4(gl_TessCoord.x) * val[0] + vec4(gl_TessCoord.y) * val[1] +
+//          vec4(gl_TessCoord.z) * val[2]
+//
+// void main() {
+//   gl_Position = vec4(gl_TessCoord.x) * gl_in[0].gl_Position +
+//                  vec4(gl_TessCoord.y) * gl_in[1].gl_Position +
+//                  vec4(gl_TessCoord.z) * gl_in[2].gl_Position;
+//   out_color = testfun(interpolate(in_color));
+// }
+string makeTessEvalShaderAssembly(const map<string, string>& fragments)
+{
+	static const char tessEvalBoilerplate[] =
+		"OpCapability Tessellation\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint TessellationEvaluation %BP_main \"main\" %BP_stream %BP_gl_TessCoord %BP_gl_in %BP_out_color %BP_in_color\n"
+		"OpExecutionMode %BP_main Triangles\n"
+		"OpExecutionMode %BP_main SpacingEqual\n"
+		"OpExecutionMode %BP_main VertexOrderCcw\n"
+		"${debug:opt}\n"
+		"OpName %BP_main \"main\"\n"
+		"OpName %test_code \"testfun(vf4;\"\n"
+		"OpName %BP_gl_PerVertexOut \"gl_PerVertex\"\n"
+		"OpMemberName %BP_gl_PerVertexOut 0 \"gl_Position\"\n"
+		"OpMemberName %BP_gl_PerVertexOut 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_gl_PerVertexOut 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_gl_PerVertexOut 3 \"gl_CullDistance\"\n"
+		"OpName %BP_stream \"\"\n"
+		"OpName %BP_gl_TessCoord \"gl_TessCoord\"\n"
+		"OpName %BP_gl_PerVertexIn \"gl_PerVertex\"\n"
+		"OpMemberName %BP_gl_PerVertexIn 0 \"gl_Position\"\n"
+		"OpMemberName %BP_gl_PerVertexIn 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_gl_PerVertexIn 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_gl_PerVertexIn 3 \"gl_CullDistance\"\n"
+		"OpName %BP_gl_in \"gl_in\"\n"
+		"OpName %BP_out_color \"out_color\"\n"
+		"OpName %BP_in_color \"in_color\"\n"
+		"OpMemberDecorate %BP_gl_PerVertexOut 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_gl_PerVertexOut 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_gl_PerVertexOut 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_gl_PerVertexOut 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_gl_PerVertexOut Block\n"
+		"OpDecorate %BP_gl_TessCoord BuiltIn TessCoord\n"
+		"OpMemberDecorate %BP_gl_PerVertexIn 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_gl_PerVertexIn 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_gl_PerVertexIn 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_gl_PerVertexIn 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_gl_PerVertexIn Block\n"
+		"OpDecorate %BP_out_color Location 1\n"
+		"OpDecorate %BP_in_color Location 1\n"
+		"${decoration:opt}\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%BP_gl_PerVertexOut = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_op_gl_PerVertexOut = OpTypePointer Output %BP_gl_PerVertexOut\n"
+		"%BP_stream = OpVariable %BP_op_gl_PerVertexOut Output\n"
+		"%BP_gl_TessCoord = OpVariable %ip_v3f32 Input\n"
+		"%BP_gl_PerVertexIn = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_a32_gl_PerVertexIn = OpTypeArray %BP_gl_PerVertexIn %c_u32_32\n"
+		"%BP_ip_a32_gl_PerVertexIn = OpTypePointer Input %BP_a32_gl_PerVertexIn\n"
+		"%BP_gl_in = OpVariable %BP_ip_a32_gl_PerVertexIn Input\n"
+		"%BP_out_color = OpVariable %op_v4f32 Output\n"
+		"%BP_in_color = OpVariable %ip_a32v4f32 Input\n"
+		"${pre_main:opt}\n"
+		"%BP_main = OpFunction %void None %fun\n"
+		"%BP_label = OpLabel\n"
+		"%BP_gl_TC_0 = OpAccessChain %ip_f32 %BP_gl_TessCoord %c_u32_0\n"
+		"%BP_gl_TC_1 = OpAccessChain %ip_f32 %BP_gl_TessCoord %c_u32_1\n"
+		"%BP_gl_TC_2 = OpAccessChain %ip_f32 %BP_gl_TessCoord %c_u32_2\n"
+		"%BP_gl_in_gl_Pos_0 = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_0 %c_i32_0\n"
+		"%BP_gl_in_gl_Pos_1 = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_1 %c_i32_0\n"
+		"%BP_gl_in_gl_Pos_2 = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_2 %c_i32_0\n"
+
+		"%BP_gl_OPos = OpAccessChain %op_v4f32 %BP_stream %c_i32_0\n"
+		"%BP_in_color_0 = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_0\n"
+		"%BP_in_color_1 = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_1\n"
+		"%BP_in_color_2 = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_2\n"
+
+		"%BP_TC_W_0 = OpLoad %f32 %BP_gl_TC_0\n"
+		"%BP_TC_W_1 = OpLoad %f32 %BP_gl_TC_1\n"
+		"%BP_TC_W_2 = OpLoad %f32 %BP_gl_TC_2\n"
+		"%BP_v4f32_TC_0 = OpCompositeConstruct %v4f32 %BP_TC_W_0 %BP_TC_W_0 %BP_TC_W_0 %BP_TC_W_0\n"
+		"%BP_v4f32_TC_1 = OpCompositeConstruct %v4f32 %BP_TC_W_1 %BP_TC_W_1 %BP_TC_W_1 %BP_TC_W_1\n"
+		"%BP_v4f32_TC_2 = OpCompositeConstruct %v4f32 %BP_TC_W_2 %BP_TC_W_2 %BP_TC_W_2 %BP_TC_W_2\n"
+
+		"%BP_gl_IP_0 = OpLoad %v4f32 %BP_gl_in_gl_Pos_0\n"
+		"%BP_gl_IP_1 = OpLoad %v4f32 %BP_gl_in_gl_Pos_1\n"
+		"%BP_gl_IP_2 = OpLoad %v4f32 %BP_gl_in_gl_Pos_2\n"
+
+		"%BP_IP_W_0 = OpFMul %v4f32 %BP_v4f32_TC_0 %BP_gl_IP_0\n"
+		"%BP_IP_W_1 = OpFMul %v4f32 %BP_v4f32_TC_1 %BP_gl_IP_1\n"
+		"%BP_IP_W_2 = OpFMul %v4f32 %BP_v4f32_TC_2 %BP_gl_IP_2\n"
+
+		"%BP_pos_sum_0 = OpFAdd %v4f32 %BP_IP_W_0 %BP_IP_W_1\n"
+		"%BP_pos_sum_1 = OpFAdd %v4f32 %BP_pos_sum_0 %BP_IP_W_2\n"
+
+		"OpStore %BP_gl_OPos %BP_pos_sum_1\n"
+
+		"%BP_IC_0 = OpLoad %v4f32 %BP_in_color_0\n"
+		"%BP_IC_1 = OpLoad %v4f32 %BP_in_color_1\n"
+		"%BP_IC_2 = OpLoad %v4f32 %BP_in_color_2\n"
+
+		"%BP_IC_W_0 = OpFMul %v4f32 %BP_v4f32_TC_0 %BP_IC_0\n"
+		"%BP_IC_W_1 = OpFMul %v4f32 %BP_v4f32_TC_1 %BP_IC_1\n"
+		"%BP_IC_W_2 = OpFMul %v4f32 %BP_v4f32_TC_2 %BP_IC_2\n"
+
+		"%BP_col_sum_0 = OpFAdd %v4f32 %BP_IC_W_0 %BP_IC_W_1\n"
+		"%BP_col_sum_1 = OpFAdd %v4f32 %BP_col_sum_0 %BP_IC_W_2\n"
+
+		"%BP_clr_transformed = OpFunctionCall %v4f32 %test_code %BP_col_sum_1\n"
+
+		"OpStore %BP_out_color %BP_clr_transformed\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+		"${testfun}\n";
+	return tcu::StringTemplate(tessEvalBoilerplate).specialize(fragments);
+}
+
+// Creates geometry-shader assembly by specializing a boilerplate StringTemplate
+// on fragments, which must (at least) map "testfun" to an OpFunction definition
+// for %test_code that takes and returns a %v4f32.  Boilerplate IDs are prefixed
+// with "BP_" to avoid collisions with fragments.
+//
+// Derived from this GLSL:
+//
+// #version 450
+// layout(triangles) in;
+// layout(triangle_strip, max_vertices = 3) out;
+//
+// layout(location = 1) in vec4 in_color[];
+// layout(location = 1) out vec4 out_color;
+//
+// void main() {
+//   gl_Position = gl_in[0].gl_Position;
+//   out_color = test_fun(in_color[0]);
+//   EmitVertex();
+//   gl_Position = gl_in[1].gl_Position;
+//   out_color = test_fun(in_color[1]);
+//   EmitVertex();
+//   gl_Position = gl_in[2].gl_Position;
+//   out_color = test_fun(in_color[2]);
+//   EmitVertex();
+//   EndPrimitive();
+// }
+string makeGeometryShaderAssembly(const map<string, string>& fragments)
+{
+	static const char geometryShaderBoilerplate[] =
+		"OpCapability Geometry\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Geometry %BP_main \"main\" %BP_out_gl_position %BP_gl_in %BP_out_color %BP_in_color\n"
+		"OpExecutionMode %BP_main Triangles\n"
+		"OpExecutionMode %BP_main Invocations 0\n"
+		"OpExecutionMode %BP_main OutputTriangleStrip\n"
+		"OpExecutionMode %BP_main OutputVertices 3\n"
+		"${debug:opt}\n"
+		"OpName %BP_main \"main\"\n"
+		"OpName %BP_per_vertex_in \"gl_PerVertex\"\n"
+		"OpMemberName %BP_per_vertex_in 0 \"gl_Position\"\n"
+		"OpMemberName %BP_per_vertex_in 1 \"gl_PointSize\"\n"
+		"OpMemberName %BP_per_vertex_in 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %BP_per_vertex_in 3 \"gl_CullDistance\"\n"
+		"OpName %BP_gl_in \"gl_in\"\n"
+		"OpName %BP_out_color \"out_color\"\n"
+		"OpName %BP_in_color \"in_color\"\n"
+		"OpName %test_code \"testfun(vf4;\"\n"
+		"OpDecorate %BP_out_gl_position BuiltIn Position\n"
+		"OpMemberDecorate %BP_per_vertex_in 0 BuiltIn Position\n"
+		"OpMemberDecorate %BP_per_vertex_in 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %BP_per_vertex_in 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %BP_per_vertex_in 3 BuiltIn CullDistance\n"
+		"OpDecorate %BP_per_vertex_in Block\n"
+		"OpDecorate %BP_out_color Location 1\n"
+		"OpDecorate %BP_out_color Stream 0\n"
+		"OpDecorate %BP_in_color Location 1\n"
+		"${decoration:opt}\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%BP_per_vertex_in = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%BP_a3_per_vertex_in = OpTypeArray %BP_per_vertex_in %c_u32_3\n"
+		"%BP_ip_a3_per_vertex_in = OpTypePointer Input %BP_a3_per_vertex_in\n"
+
+		"%BP_gl_in = OpVariable %BP_ip_a3_per_vertex_in Input\n"
+		"%BP_out_color = OpVariable %op_v4f32 Output\n"
+		"%BP_in_color = OpVariable %ip_a3v4f32 Input\n"
+		"%BP_out_gl_position = OpVariable %op_v4f32 Output\n"
+		"${pre_main:opt}\n"
+
+		"%BP_main = OpFunction %void None %fun\n"
+		"%BP_label = OpLabel\n"
+		"%BP_gl_in_0_gl_position = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_0 %c_i32_0\n"
+		"%BP_gl_in_1_gl_position = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_1 %c_i32_0\n"
+		"%BP_gl_in_2_gl_position = OpAccessChain %ip_v4f32 %BP_gl_in %c_i32_2 %c_i32_0\n"
+
+		"%BP_in_position_0 = OpLoad %v4f32 %BP_gl_in_0_gl_position\n"
+		"%BP_in_position_1 = OpLoad %v4f32 %BP_gl_in_1_gl_position\n"
+		"%BP_in_position_2 = OpLoad %v4f32 %BP_gl_in_2_gl_position \n"
+
+		"%BP_in_color_0_ptr = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_0\n"
+		"%BP_in_color_1_ptr = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_1\n"
+		"%BP_in_color_2_ptr = OpAccessChain %ip_v4f32 %BP_in_color %c_i32_2\n"
+
+		"%BP_in_color_0 = OpLoad %v4f32 %BP_in_color_0_ptr\n"
+		"%BP_in_color_1 = OpLoad %v4f32 %BP_in_color_1_ptr\n"
+		"%BP_in_color_2 = OpLoad %v4f32 %BP_in_color_2_ptr\n"
+
+		"%BP_transformed_in_color_0 = OpFunctionCall %v4f32 %test_code %BP_in_color_0\n"
+		"%BP_transformed_in_color_1 = OpFunctionCall %v4f32 %test_code %BP_in_color_1\n"
+		"%BP_transformed_in_color_2 = OpFunctionCall %v4f32 %test_code %BP_in_color_2\n"
+
+
+		"OpStore %BP_out_gl_position %BP_in_position_0\n"
+		"OpStore %BP_out_color %BP_transformed_in_color_0\n"
+		"OpEmitVertex\n"
+
+		"OpStore %BP_out_gl_position %BP_in_position_1\n"
+		"OpStore %BP_out_color %BP_transformed_in_color_1\n"
+		"OpEmitVertex\n"
+
+		"OpStore %BP_out_gl_position %BP_in_position_2\n"
+		"OpStore %BP_out_color %BP_transformed_in_color_2\n"
+		"OpEmitVertex\n"
+
+		"OpEndPrimitive\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+		"${testfun}\n";
+	return tcu::StringTemplate(geometryShaderBoilerplate).specialize(fragments);
+}
+
+// Creates fragment-shader assembly by specializing a boilerplate StringTemplate
+// on fragments, which must (at least) map "testfun" to an OpFunction definition
+// for %test_code that takes and returns a %v4f32.  Boilerplate IDs are prefixed
+// with "BP_" to avoid collisions with fragments.
+//
+// Derived from this GLSL:
+//
+// layout(location = 1) in highp vec4 vtxColor;
+// layout(location = 0) out highp vec4 fragColor;
+// highp vec4 testfun(highp vec4 x) { return x; }
+// void main(void) { fragColor = testfun(vtxColor); }
+//
+// with modifications including passing vtxColor by value and ripping out
+// testfun() definition.
+string makeFragmentShaderAssembly(const map<string, string>& fragments)
+{
+	static const char fragmentShaderBoilerplate[] =
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Fragment %BP_main \"main\" %BP_vtxColor %BP_fragColor\n"
+		"OpExecutionMode %BP_main OriginUpperLeft\n"
+		"${debug:opt}\n"
+		"OpName %BP_main \"main\"\n"
+		"OpName %BP_fragColor \"fragColor\"\n"
+		"OpName %BP_vtxColor \"vtxColor\"\n"
+		"OpName %test_code \"testfun(vf4;\"\n"
+		"OpDecorate %BP_fragColor Location 0\n"
+		"OpDecorate %BP_vtxColor Location 1\n"
+		"${decoration:opt}\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%BP_fragColor = OpVariable %op_v4f32 Output\n"
+		"%BP_vtxColor = OpVariable %ip_v4f32 Input\n"
+		"${pre_main:opt}\n"
+		"%BP_main = OpFunction %void None %fun\n"
+		"%BP_label_main = OpLabel\n"
+		"%BP_tmp1 = OpLoad %v4f32 %BP_vtxColor\n"
+		"%BP_tmp2 = OpFunctionCall %v4f32 %test_code %BP_tmp1\n"
+		"OpStore %BP_fragColor %BP_tmp2\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+		"${testfun}\n";
+	return tcu::StringTemplate(fragmentShaderBoilerplate).specialize(fragments);
+}
+
+// Creates fragments that specialize into a simple pass-through shader (of any kind).
+map<string, string> passthruFragments(void)
+{
+	map<string, string> fragments;
+	fragments["testfun"] =
+		// A %test_code function that returns its argument unchanged.
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"OpReturnValue %param1\n"
+		"OpFunctionEnd\n";
+	return fragments;
+}
+
+// Adds shader assembly text to dst.spirvAsmSources for all shader kinds.
+// Vertex shader gets custom code from context, the rest are pass-through.
+void addShaderCodeCustomVertex(vk::SourceCollections& dst, InstanceContext context)
+{
+	map<string, string> passthru = passthruFragments();
+	dst.spirvAsmSources.add("vert") << makeVertexShaderAssembly(context.testCodeFragments);
+	dst.spirvAsmSources.add("frag") << makeFragmentShaderAssembly(passthru);
+}
+
+// Adds shader assembly text to dst.spirvAsmSources for all shader kinds.
+// Tessellation control shader gets custom code from context, the rest are
+// pass-through.
+void addShaderCodeCustomTessControl(vk::SourceCollections& dst, InstanceContext context)
+{
+	map<string, string> passthru = passthruFragments();
+	dst.spirvAsmSources.add("vert") << makeVertexShaderAssembly(passthru);
+	dst.spirvAsmSources.add("tessc") << makeTessControlShaderAssembly(context.testCodeFragments);
+	dst.spirvAsmSources.add("tesse") << makeTessEvalShaderAssembly(passthru);
+	dst.spirvAsmSources.add("frag") << makeFragmentShaderAssembly(passthru);
+}
+
+// Adds shader assembly text to dst.spirvAsmSources for all shader kinds.
+// Tessellation evaluation shader gets custom code from context, the rest are
+// pass-through.
+void addShaderCodeCustomTessEval(vk::SourceCollections& dst, InstanceContext context)
+{
+	map<string, string> passthru = passthruFragments();
+	dst.spirvAsmSources.add("vert") << makeVertexShaderAssembly(passthru);
+	dst.spirvAsmSources.add("tessc") << makeTessControlShaderAssembly(passthru);
+	dst.spirvAsmSources.add("tesse") << makeTessEvalShaderAssembly(context.testCodeFragments);
+	dst.spirvAsmSources.add("frag") << makeFragmentShaderAssembly(passthru);
+}
+
+// Adds shader assembly text to dst.spirvAsmSources for all shader kinds.
+// Geometry shader gets custom code from context, the rest are pass-through.
+void addShaderCodeCustomGeometry(vk::SourceCollections& dst, InstanceContext context)
+{
+	map<string, string> passthru = passthruFragments();
+	dst.spirvAsmSources.add("vert") << makeVertexShaderAssembly(passthru);
+	dst.spirvAsmSources.add("geom") << makeGeometryShaderAssembly(context.testCodeFragments);
+	dst.spirvAsmSources.add("frag") << makeFragmentShaderAssembly(passthru);
+}
+
+// Adds shader assembly text to dst.spirvAsmSources for all shader kinds.
+// Fragment shader gets custom code from context, the rest are pass-through.
+void addShaderCodeCustomFragment(vk::SourceCollections& dst, InstanceContext context)
+{
+	map<string, string> passthru = passthruFragments();
+	dst.spirvAsmSources.add("vert") << makeVertexShaderAssembly(passthru);
+	dst.spirvAsmSources.add("frag") << makeFragmentShaderAssembly(context.testCodeFragments);
+}
+
+void createCombinedModule(vk::SourceCollections& dst, InstanceContext)
+{
+	// \todo [2015-12-07 awoloszyn] Make tessellation / geometry conditional
+	// \todo [2015-12-07 awoloszyn] Remove OpName and OpMemberName at some point
+	dst.spirvAsmSources.add("module") <<
+		"OpCapability Shader\n"
+		"OpCapability Geometry\n"
+		"OpCapability Tessellation\n"
+		"OpMemoryModel Logical GLSL450\n"
+
+		"OpEntryPoint Vertex %vert_main \"main\" %vert_Position %vert_vtxColor %vert_color %vert_vtxPosition %vert_vertex_id %vert_instance_id\n"
+		"OpEntryPoint Geometry %geom_main \"main\" %geom_out_gl_position %geom_gl_in %geom_out_color %geom_in_color\n"
+		"OpEntryPoint TessellationControl %tessc_main \"main\" %tessc_out_color %tessc_gl_InvocationID %tessc_in_color %tessc_out_position %tessc_in_position %tessc_gl_TessLevelOuter %tessc_gl_TessLevelInner\n"
+		"OpEntryPoint TessellationEvaluation %tesse_main \"main\" %tesse_stream %tesse_gl_tessCoord %tesse_in_position %tesse_out_color %tesse_in_color \n"
+		"OpEntryPoint Fragment %frag_main \"main\" %frag_vtxColor %frag_fragColor\n"
+
+		"OpExecutionMode %geom_main Triangles\n"
+		"OpExecutionMode %geom_main Invocations 0\n"
+		"OpExecutionMode %geom_main OutputTriangleStrip\n"
+		"OpExecutionMode %geom_main OutputVertices 3\n"
+
+		"OpExecutionMode %tessc_main OutputVertices 3\n"
+
+		"OpExecutionMode %tesse_main Triangles\n"
+
+		"OpExecutionMode %frag_main OriginUpperLeft\n"
+
+		"OpName %vert_main \"main\"\n"
+		"OpName %vert_vtxPosition \"vtxPosition\"\n"
+		"OpName %vert_Position \"position\"\n"
+		"OpName %vert_vtxColor \"vtxColor\"\n"
+		"OpName %vert_color \"color\"\n"
+		"OpName %vert_vertex_id \"gl_VertexIndex\"\n"
+		"OpName %vert_instance_id \"gl_InstanceIndex\"\n"
+		"OpName %geom_main \"main\"\n"
+		"OpName %geom_per_vertex_in \"gl_PerVertex\"\n"
+		"OpMemberName %geom_per_vertex_in 0 \"gl_Position\"\n"
+		"OpMemberName %geom_per_vertex_in 1 \"gl_PointSize\"\n"
+		"OpMemberName %geom_per_vertex_in 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %geom_per_vertex_in 3 \"gl_CullDistance\"\n"
+		"OpName %geom_gl_in \"gl_in\"\n"
+		"OpName %geom_out_color \"out_color\"\n"
+		"OpName %geom_in_color \"in_color\"\n"
+		"OpName %tessc_main \"main\"\n"
+		"OpName %tessc_out_color \"out_color\"\n"
+		"OpName %tessc_gl_InvocationID \"gl_InvocationID\"\n"
+		"OpName %tessc_in_color \"in_color\"\n"
+		"OpName %tessc_out_position \"out_position\"\n"
+		"OpName %tessc_in_position \"in_position\"\n"
+		"OpName %tessc_gl_TessLevelOuter \"gl_TessLevelOuter\"\n"
+		"OpName %tessc_gl_TessLevelInner \"gl_TessLevelInner\"\n"
+		"OpName %tesse_main \"main\"\n"
+		"OpName %tesse_per_vertex_out \"gl_PerVertex\"\n"
+		"OpMemberName %tesse_per_vertex_out 0 \"gl_Position\"\n"
+		"OpMemberName %tesse_per_vertex_out 1 \"gl_PointSize\"\n"
+		"OpMemberName %tesse_per_vertex_out 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %tesse_per_vertex_out 3 \"gl_CullDistance\"\n"
+		"OpName %tesse_stream \"\"\n"
+		"OpName %tesse_gl_tessCoord \"gl_TessCoord\"\n"
+		"OpName %tesse_in_position \"in_position\"\n"
+		"OpName %tesse_out_color \"out_color\"\n"
+		"OpName %tesse_in_color \"in_color\"\n"
+		"OpName %frag_main \"main\"\n"
+		"OpName %frag_fragColor \"fragColor\"\n"
+		"OpName %frag_vtxColor \"vtxColor\"\n"
+
+		"; Vertex decorations\n"
+		"OpDecorate %vert_vtxPosition Location 2\n"
+		"OpDecorate %vert_Position Location 0\n"
+		"OpDecorate %vert_vtxColor Location 1\n"
+		"OpDecorate %vert_color Location 1\n"
+		"OpDecorate %vert_vertex_id BuiltIn VertexIndex\n"
+		"OpDecorate %vert_instance_id BuiltIn InstanceIndex\n"
+
+		"; Geometry decorations\n"
+		"OpDecorate %geom_out_gl_position BuiltIn Position\n"
+		"OpMemberDecorate %geom_per_vertex_in 0 BuiltIn Position\n"
+		"OpMemberDecorate %geom_per_vertex_in 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %geom_per_vertex_in 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %geom_per_vertex_in 3 BuiltIn CullDistance\n"
+		"OpDecorate %geom_per_vertex_in Block\n"
+		"OpDecorate %geom_out_color Location 1\n"
+		"OpDecorate %geom_out_color Stream 0\n"
+		"OpDecorate %geom_in_color Location 1\n"
+
+		"; Tessellation Control decorations\n"
+		"OpDecorate %tessc_out_color Location 1\n"
+		"OpDecorate %tessc_gl_InvocationID BuiltIn InvocationId\n"
+		"OpDecorate %tessc_in_color Location 1\n"
+		"OpDecorate %tessc_out_position Location 2\n"
+		"OpDecorate %tessc_in_position Location 2\n"
+		"OpDecorate %tessc_gl_TessLevelOuter Patch\n"
+		"OpDecorate %tessc_gl_TessLevelOuter BuiltIn TessLevelOuter\n"
+		"OpDecorate %tessc_gl_TessLevelInner Patch\n"
+		"OpDecorate %tessc_gl_TessLevelInner BuiltIn TessLevelInner\n"
+
+		"; Tessellation Evaluation decorations\n"
+		"OpMemberDecorate %tesse_per_vertex_out 0 BuiltIn Position\n"
+		"OpMemberDecorate %tesse_per_vertex_out 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %tesse_per_vertex_out 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %tesse_per_vertex_out 3 BuiltIn CullDistance\n"
+		"OpDecorate %tesse_per_vertex_out Block\n"
+		"OpDecorate %tesse_gl_tessCoord BuiltIn TessCoord\n"
+		"OpDecorate %tesse_in_position Location 2\n"
+		"OpDecorate %tesse_out_color Location 1\n"
+		"OpDecorate %tesse_in_color Location 1\n"
+
+		"; Fragment decorations\n"
+		"OpDecorate %frag_fragColor Location 0\n"
+		"OpDecorate %frag_vtxColor Location 1\n"
+
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+
+		"; Vertex Variables\n"
+		"%vert_vtxPosition = OpVariable %op_v4f32 Output\n"
+		"%vert_Position = OpVariable %ip_v4f32 Input\n"
+		"%vert_vtxColor = OpVariable %op_v4f32 Output\n"
+		"%vert_color = OpVariable %ip_v4f32 Input\n"
+		"%vert_vertex_id = OpVariable %ip_i32 Input\n"
+		"%vert_instance_id = OpVariable %ip_i32 Input\n"
+
+		"; Geometry Variables\n"
+		"%geom_per_vertex_in = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%geom_a3_per_vertex_in = OpTypeArray %geom_per_vertex_in %c_u32_3\n"
+		"%geom_ip_a3_per_vertex_in = OpTypePointer Input %geom_a3_per_vertex_in\n"
+		"%geom_gl_in = OpVariable %geom_ip_a3_per_vertex_in Input\n"
+		"%geom_out_color = OpVariable %op_v4f32 Output\n"
+		"%geom_in_color = OpVariable %ip_a3v4f32 Input\n"
+		"%geom_out_gl_position = OpVariable %op_v4f32 Output\n"
+
+		"; Tessellation Control Variables\n"
+		"%tessc_out_color = OpVariable %op_a3v4f32 Output\n"
+		"%tessc_gl_InvocationID = OpVariable %ip_i32 Input\n"
+		"%tessc_in_color = OpVariable %ip_a32v4f32 Input\n"
+		"%tessc_out_position = OpVariable %op_a3v4f32 Output\n"
+		"%tessc_in_position = OpVariable %ip_a32v4f32 Input\n"
+		"%tessc_gl_TessLevelOuter = OpVariable %op_a4f32 Output\n"
+		"%tessc_gl_TessLevelInner = OpVariable %op_a2f32 Output\n"
+
+		"; Tessellation Evaluation Decorations\n"
+		"%tesse_per_vertex_out = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%tesse_op_per_vertex_out = OpTypePointer Output %tesse_per_vertex_out\n"
+		"%tesse_stream = OpVariable %tesse_op_per_vertex_out Output\n"
+		"%tesse_gl_tessCoord = OpVariable %ip_v3f32 Input\n"
+		"%tesse_in_position = OpVariable %ip_a32v4f32 Input\n"
+		"%tesse_out_color = OpVariable %op_v4f32 Output\n"
+		"%tesse_in_color = OpVariable %ip_a32v4f32 Input\n"
+
+		"; Fragment Variables\n"
+		"%frag_fragColor = OpVariable %op_v4f32 Output\n"
+		"%frag_vtxColor = OpVariable %ip_v4f32 Input\n"
+
+		"; Vertex Entry\n"
+		"%vert_main = OpFunction %void None %fun\n"
+		"%vert_label = OpLabel\n"
+		"%vert_tmp_position = OpLoad %v4f32 %vert_Position\n"
+		"OpStore %vert_vtxPosition %vert_tmp_position\n"
+		"%vert_tmp_color = OpLoad %v4f32 %vert_color\n"
+		"OpStore %vert_vtxColor %vert_tmp_color\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"; Geometry Entry\n"
+		"%geom_main = OpFunction %void None %fun\n"
+		"%geom_label = OpLabel\n"
+		"%geom_gl_in_0_gl_position = OpAccessChain %ip_v4f32 %geom_gl_in %c_i32_0 %c_i32_0\n"
+		"%geom_gl_in_1_gl_position = OpAccessChain %ip_v4f32 %geom_gl_in %c_i32_1 %c_i32_0\n"
+		"%geom_gl_in_2_gl_position = OpAccessChain %ip_v4f32 %geom_gl_in %c_i32_2 %c_i32_0\n"
+		"%geom_in_position_0 = OpLoad %v4f32 %geom_gl_in_0_gl_position\n"
+		"%geom_in_position_1 = OpLoad %v4f32 %geom_gl_in_1_gl_position\n"
+		"%geom_in_position_2 = OpLoad %v4f32 %geom_gl_in_2_gl_position \n"
+		"%geom_in_color_0_ptr = OpAccessChain %ip_v4f32 %geom_in_color %c_i32_0\n"
+		"%geom_in_color_1_ptr = OpAccessChain %ip_v4f32 %geom_in_color %c_i32_1\n"
+		"%geom_in_color_2_ptr = OpAccessChain %ip_v4f32 %geom_in_color %c_i32_2\n"
+		"%geom_in_color_0 = OpLoad %v4f32 %geom_in_color_0_ptr\n"
+		"%geom_in_color_1 = OpLoad %v4f32 %geom_in_color_1_ptr\n"
+		"%geom_in_color_2 = OpLoad %v4f32 %geom_in_color_2_ptr\n"
+		"OpStore %geom_out_gl_position %geom_in_position_0\n"
+		"OpStore %geom_out_color %geom_in_color_0\n"
+		"OpEmitVertex\n"
+		"OpStore %geom_out_gl_position %geom_in_position_1\n"
+		"OpStore %geom_out_color %geom_in_color_1\n"
+		"OpEmitVertex\n"
+		"OpStore %geom_out_gl_position %geom_in_position_2\n"
+		"OpStore %geom_out_color %geom_in_color_2\n"
+		"OpEmitVertex\n"
+		"OpEndPrimitive\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"; Tessellation Control Entry\n"
+		"%tessc_main = OpFunction %void None %fun\n"
+		"%tessc_label = OpLabel\n"
+		"%tessc_invocation_id = OpLoad %i32 %tessc_gl_InvocationID\n"
+		"%tessc_in_color_ptr = OpAccessChain %ip_v4f32 %tessc_in_color %tessc_invocation_id\n"
+		"%tessc_in_position_ptr = OpAccessChain %ip_v4f32 %tessc_in_position %tessc_invocation_id\n"
+		"%tessc_in_color_val = OpLoad %v4f32 %tessc_in_color_ptr\n"
+		"%tessc_in_position_val = OpLoad %v4f32 %tessc_in_position_ptr\n"
+		"%tessc_out_color_ptr = OpAccessChain %op_v4f32 %tessc_out_color %tessc_invocation_id\n"
+		"%tessc_out_position_ptr = OpAccessChain %op_v4f32 %tessc_out_position %tessc_invocation_id\n"
+		"OpStore %tessc_out_color_ptr %tessc_in_color_val\n"
+		"OpStore %tessc_out_position_ptr %tessc_in_position_val\n"
+		"%tessc_is_first_invocation = OpIEqual %bool %tessc_invocation_id %c_i32_0\n"
+		"OpSelectionMerge %tessc_merge_label None\n"
+		"OpBranchConditional %tessc_is_first_invocation %tessc_first_invocation %tessc_merge_label\n"
+		"%tessc_first_invocation = OpLabel\n"
+		"%tessc_tess_outer_0 = OpAccessChain %op_f32 %tessc_gl_TessLevelOuter %c_i32_0\n"
+		"%tessc_tess_outer_1 = OpAccessChain %op_f32 %tessc_gl_TessLevelOuter %c_i32_1\n"
+		"%tessc_tess_outer_2 = OpAccessChain %op_f32 %tessc_gl_TessLevelOuter %c_i32_2\n"
+		"%tessc_tess_inner = OpAccessChain %op_f32 %tessc_gl_TessLevelInner %c_i32_0\n"
+		"OpStore %tessc_tess_outer_0 %c_f32_1\n"
+		"OpStore %tessc_tess_outer_1 %c_f32_1\n"
+		"OpStore %tessc_tess_outer_2 %c_f32_1\n"
+		"OpStore %tessc_tess_inner %c_f32_1\n"
+		"OpBranch %tessc_merge_label\n"
+		"%tessc_merge_label = OpLabel\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"; Tessellation Evaluation Entry\n"
+		"%tesse_main = OpFunction %void None %fun\n"
+		"%tesse_label = OpLabel\n"
+		"%tesse_tc_0_ptr = OpAccessChain %ip_f32 %tesse_gl_tessCoord %c_u32_0\n"
+		"%tesse_tc_1_ptr = OpAccessChain %ip_f32 %tesse_gl_tessCoord %c_u32_1\n"
+		"%tesse_tc_2_ptr = OpAccessChain %ip_f32 %tesse_gl_tessCoord %c_u32_2\n"
+		"%tesse_tc_0 = OpLoad %f32 %tesse_tc_0_ptr\n"
+		"%tesse_tc_1 = OpLoad %f32 %tesse_tc_1_ptr\n"
+		"%tesse_tc_2 = OpLoad %f32 %tesse_tc_2_ptr\n"
+		"%tesse_in_pos_0_ptr = OpAccessChain %ip_v4f32 %tesse_in_position %c_i32_0\n"
+		"%tesse_in_pos_1_ptr = OpAccessChain %ip_v4f32 %tesse_in_position %c_i32_1\n"
+		"%tesse_in_pos_2_ptr = OpAccessChain %ip_v4f32 %tesse_in_position %c_i32_2\n"
+		"%tesse_in_pos_0 = OpLoad %v4f32 %tesse_in_pos_0_ptr\n"
+		"%tesse_in_pos_1 = OpLoad %v4f32 %tesse_in_pos_1_ptr\n"
+		"%tesse_in_pos_2 = OpLoad %v4f32 %tesse_in_pos_2_ptr\n"
+		"%tesse_in_pos_0_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_0 %tesse_in_pos_0\n"
+		"%tesse_in_pos_1_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_1 %tesse_in_pos_1\n"
+		"%tesse_in_pos_2_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_2 %tesse_in_pos_2\n"
+		"%tesse_out_pos_ptr = OpAccessChain %op_v4f32 %tesse_stream %c_i32_0\n"
+		"%tesse_in_pos_0_plus_pos_1 = OpFAdd %v4f32 %tesse_in_pos_0_weighted %tesse_in_pos_1_weighted\n"
+		"%tesse_computed_out = OpFAdd %v4f32 %tesse_in_pos_0_plus_pos_1 %tesse_in_pos_2_weighted\n"
+		"OpStore %tesse_out_pos_ptr %tesse_computed_out\n"
+		"%tesse_in_clr_0_ptr = OpAccessChain %ip_v4f32 %tesse_in_color %c_i32_0\n"
+		"%tesse_in_clr_1_ptr = OpAccessChain %ip_v4f32 %tesse_in_color %c_i32_1\n"
+		"%tesse_in_clr_2_ptr = OpAccessChain %ip_v4f32 %tesse_in_color %c_i32_2\n"
+		"%tesse_in_clr_0 = OpLoad %v4f32 %tesse_in_clr_0_ptr\n"
+		"%tesse_in_clr_1 = OpLoad %v4f32 %tesse_in_clr_1_ptr\n"
+		"%tesse_in_clr_2 = OpLoad %v4f32 %tesse_in_clr_2_ptr\n"
+		"%tesse_in_clr_0_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_0 %tesse_in_clr_0\n"
+		"%tesse_in_clr_1_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_1 %tesse_in_clr_1\n"
+		"%tesse_in_clr_2_weighted = OpVectorTimesScalar %v4f32 %tesse_tc_2 %tesse_in_clr_2\n"
+		"%tesse_in_clr_0_plus_col_1 = OpFAdd %v4f32 %tesse_in_clr_0_weighted %tesse_in_clr_1_weighted\n"
+		"%tesse_computed_clr = OpFAdd %v4f32 %tesse_in_clr_0_plus_col_1 %tesse_in_clr_2_weighted\n"
+		"OpStore %tesse_out_color %tesse_computed_clr\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"; Fragment Entry\n"
+		"%frag_main = OpFunction %void None %fun\n"
+		"%frag_label_main = OpLabel\n"
+		"%frag_tmp1 = OpLoad %v4f32 %frag_vtxColor\n"
+		"OpStore %frag_fragColor %frag_tmp1\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+}
+
+// This has two shaders of each stage. The first
+// is a passthrough, the second inverts the color.
+void createMultipleEntries(vk::SourceCollections& dst, InstanceContext)
+{
+	dst.spirvAsmSources.add("vert") <<
+	// This module contains 2 vertex shaders. One that is a passthrough
+	// and a second that inverts the color of the output (1.0 - color).
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Vertex %main \"vert1\" %Position %vtxColor %color %vtxPosition %vertex_id %instance_id\n"
+		"OpEntryPoint Vertex %main2 \"vert2\" %Position %vtxColor %color %vtxPosition %vertex_id %instance_id\n"
+
+		"OpName %main \"vert1\"\n"
+		"OpName %main2 \"vert2\"\n"
+		"OpName %vtxPosition \"vtxPosition\"\n"
+		"OpName %Position \"position\"\n"
+		"OpName %vtxColor \"vtxColor\"\n"
+		"OpName %color \"color\"\n"
+		"OpName %vertex_id \"gl_VertexIndex\"\n"
+		"OpName %instance_id \"gl_InstanceIndex\"\n"
+
+		"OpDecorate %vtxPosition Location 2\n"
+		"OpDecorate %Position Location 0\n"
+		"OpDecorate %vtxColor Location 1\n"
+		"OpDecorate %color Location 1\n"
+		"OpDecorate %vertex_id BuiltIn VertexIndex\n"
+		"OpDecorate %instance_id BuiltIn InstanceIndex\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%cval = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+		"%vtxPosition = OpVariable %op_v4f32 Output\n"
+		"%Position = OpVariable %ip_v4f32 Input\n"
+		"%vtxColor = OpVariable %op_v4f32 Output\n"
+		"%color = OpVariable %ip_v4f32 Input\n"
+		"%vertex_id = OpVariable %ip_i32 Input\n"
+		"%instance_id = OpVariable %ip_i32 Input\n"
+
+		"%main = OpFunction %void None %fun\n"
+		"%label = OpLabel\n"
+		"%tmp_position = OpLoad %v4f32 %Position\n"
+		"OpStore %vtxPosition %tmp_position\n"
+		"%tmp_color = OpLoad %v4f32 %color\n"
+		"OpStore %vtxColor %tmp_color\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"%main2 = OpFunction %void None %fun\n"
+		"%label2 = OpLabel\n"
+		"%tmp_position2 = OpLoad %v4f32 %Position\n"
+		"OpStore %vtxPosition %tmp_position2\n"
+		"%tmp_color2 = OpLoad %v4f32 %color\n"
+		"%tmp_color3 = OpFSub %v4f32 %cval %tmp_color2\n"
+		"%tmp_color4 = OpVectorInsertDynamic %v4f32 %tmp_color3 %c_f32_1 %c_i32_3\n"
+		"OpStore %vtxColor %tmp_color4\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+
+	dst.spirvAsmSources.add("frag") <<
+		// This is a single module that contains 2 fragment shaders.
+		// One that passes color through and the other that inverts the output
+		// color (1.0 - color).
+		"OpCapability Shader\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Fragment %main \"frag1\" %vtxColor %fragColor\n"
+		"OpEntryPoint Fragment %main2 \"frag2\" %vtxColor %fragColor\n"
+		"OpExecutionMode %main OriginUpperLeft\n"
+		"OpExecutionMode %main2 OriginUpperLeft\n"
+
+		"OpName %main \"frag1\"\n"
+		"OpName %main2 \"frag2\"\n"
+		"OpName %fragColor \"fragColor\"\n"
+		"OpName %vtxColor \"vtxColor\"\n"
+		"OpDecorate %fragColor Location 0\n"
+		"OpDecorate %vtxColor Location 1\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%cval = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+		"%fragColor = OpVariable %op_v4f32 Output\n"
+		"%vtxColor = OpVariable %ip_v4f32 Input\n"
+
+		"%main = OpFunction %void None %fun\n"
+		"%label_main = OpLabel\n"
+		"%tmp1 = OpLoad %v4f32 %vtxColor\n"
+		"OpStore %fragColor %tmp1\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"%main2 = OpFunction %void None %fun\n"
+		"%label_main2 = OpLabel\n"
+		"%tmp2 = OpLoad %v4f32 %vtxColor\n"
+		"%tmp3 = OpFSub %v4f32 %cval %tmp2\n"
+		"%tmp4 = OpVectorInsertDynamic %v4f32 %tmp3 %c_f32_1 %c_i32_3\n"
+		"OpStore %fragColor %tmp4\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+
+	dst.spirvAsmSources.add("geom") <<
+		"OpCapability Geometry\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint Geometry %geom1_main \"geom1\" %out_gl_position %gl_in %out_color %in_color\n"
+		"OpEntryPoint Geometry %geom2_main \"geom2\" %out_gl_position %gl_in %out_color %in_color\n"
+		"OpExecutionMode %geom1_main Triangles\n"
+		"OpExecutionMode %geom2_main Triangles\n"
+		"OpExecutionMode %geom1_main Invocations 0\n"
+		"OpExecutionMode %geom2_main Invocations 0\n"
+		"OpExecutionMode %geom1_main OutputTriangleStrip\n"
+		"OpExecutionMode %geom2_main OutputTriangleStrip\n"
+		"OpExecutionMode %geom1_main OutputVertices 3\n"
+		"OpExecutionMode %geom2_main OutputVertices 3\n"
+		"OpName %geom1_main \"geom1\"\n"
+		"OpName %geom2_main \"geom2\"\n"
+		"OpName %per_vertex_in \"gl_PerVertex\"\n"
+		"OpMemberName %per_vertex_in 0 \"gl_Position\"\n"
+		"OpMemberName %per_vertex_in 1 \"gl_PointSize\"\n"
+		"OpMemberName %per_vertex_in 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %per_vertex_in 3 \"gl_CullDistance\"\n"
+		"OpName %gl_in \"gl_in\"\n"
+		"OpName %out_color \"out_color\"\n"
+		"OpName %in_color \"in_color\"\n"
+		"OpDecorate %out_gl_position BuiltIn Position\n"
+		"OpMemberDecorate %per_vertex_in 0 BuiltIn Position\n"
+		"OpMemberDecorate %per_vertex_in 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %per_vertex_in 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %per_vertex_in 3 BuiltIn CullDistance\n"
+		"OpDecorate %per_vertex_in Block\n"
+		"OpDecorate %out_color Location 1\n"
+		"OpDecorate %out_color Stream 0\n"
+		"OpDecorate %in_color Location 1\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%cval = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+		"%per_vertex_in = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%a3_per_vertex_in = OpTypeArray %per_vertex_in %c_u32_3\n"
+		"%ip_a3_per_vertex_in = OpTypePointer Input %a3_per_vertex_in\n"
+		"%gl_in = OpVariable %ip_a3_per_vertex_in Input\n"
+		"%out_color = OpVariable %op_v4f32 Output\n"
+		"%in_color = OpVariable %ip_a3v4f32 Input\n"
+		"%out_gl_position = OpVariable %op_v4f32 Output\n"
+
+		"%geom1_main = OpFunction %void None %fun\n"
+		"%geom1_label = OpLabel\n"
+		"%geom1_gl_in_0_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_0 %c_i32_0\n"
+		"%geom1_gl_in_1_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_1 %c_i32_0\n"
+		"%geom1_gl_in_2_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_2 %c_i32_0\n"
+		"%geom1_in_position_0 = OpLoad %v4f32 %geom1_gl_in_0_gl_position\n"
+		"%geom1_in_position_1 = OpLoad %v4f32 %geom1_gl_in_1_gl_position\n"
+		"%geom1_in_position_2 = OpLoad %v4f32 %geom1_gl_in_2_gl_position \n"
+		"%geom1_in_color_0_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_0\n"
+		"%geom1_in_color_1_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_1\n"
+		"%geom1_in_color_2_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_2\n"
+		"%geom1_in_color_0 = OpLoad %v4f32 %geom1_in_color_0_ptr\n"
+		"%geom1_in_color_1 = OpLoad %v4f32 %geom1_in_color_1_ptr\n"
+		"%geom1_in_color_2 = OpLoad %v4f32 %geom1_in_color_2_ptr\n"
+		"OpStore %out_gl_position %geom1_in_position_0\n"
+		"OpStore %out_color %geom1_in_color_0\n"
+		"OpEmitVertex\n"
+		"OpStore %out_gl_position %geom1_in_position_1\n"
+		"OpStore %out_color %geom1_in_color_1\n"
+		"OpEmitVertex\n"
+		"OpStore %out_gl_position %geom1_in_position_2\n"
+		"OpStore %out_color %geom1_in_color_2\n"
+		"OpEmitVertex\n"
+		"OpEndPrimitive\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"%geom2_main = OpFunction %void None %fun\n"
+		"%geom2_label = OpLabel\n"
+		"%geom2_gl_in_0_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_0 %c_i32_0\n"
+		"%geom2_gl_in_1_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_1 %c_i32_0\n"
+		"%geom2_gl_in_2_gl_position = OpAccessChain %ip_v4f32 %gl_in %c_i32_2 %c_i32_0\n"
+		"%geom2_in_position_0 = OpLoad %v4f32 %geom2_gl_in_0_gl_position\n"
+		"%geom2_in_position_1 = OpLoad %v4f32 %geom2_gl_in_1_gl_position\n"
+		"%geom2_in_position_2 = OpLoad %v4f32 %geom2_gl_in_2_gl_position \n"
+		"%geom2_in_color_0_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_0\n"
+		"%geom2_in_color_1_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_1\n"
+		"%geom2_in_color_2_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_2\n"
+		"%geom2_in_color_0 = OpLoad %v4f32 %geom2_in_color_0_ptr\n"
+		"%geom2_in_color_1 = OpLoad %v4f32 %geom2_in_color_1_ptr\n"
+		"%geom2_in_color_2 = OpLoad %v4f32 %geom2_in_color_2_ptr\n"
+		"%geom2_transformed_in_color_0 = OpFSub %v4f32 %cval %geom2_in_color_0\n"
+		"%geom2_transformed_in_color_1 = OpFSub %v4f32 %cval %geom2_in_color_1\n"
+		"%geom2_transformed_in_color_2 = OpFSub %v4f32 %cval %geom2_in_color_2\n"
+		"%geom2_transformed_in_color_0_a = OpVectorInsertDynamic %v4f32 %geom2_transformed_in_color_0 %c_f32_1 %c_i32_3\n"
+		"%geom2_transformed_in_color_1_a = OpVectorInsertDynamic %v4f32 %geom2_transformed_in_color_1 %c_f32_1 %c_i32_3\n"
+		"%geom2_transformed_in_color_2_a = OpVectorInsertDynamic %v4f32 %geom2_transformed_in_color_2 %c_f32_1 %c_i32_3\n"
+		"OpStore %out_gl_position %geom2_in_position_0\n"
+		"OpStore %out_color %geom2_transformed_in_color_0_a\n"
+		"OpEmitVertex\n"
+		"OpStore %out_gl_position %geom2_in_position_1\n"
+		"OpStore %out_color %geom2_transformed_in_color_1_a\n"
+		"OpEmitVertex\n"
+		"OpStore %out_gl_position %geom2_in_position_2\n"
+		"OpStore %out_color %geom2_transformed_in_color_2_a\n"
+		"OpEmitVertex\n"
+		"OpEndPrimitive\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+
+	dst.spirvAsmSources.add("tessc") <<
+		"OpCapability Tessellation\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint TessellationControl %tessc1_main \"tessc1\" %out_color %gl_InvocationID %in_color %out_position %in_position %gl_TessLevelOuter %gl_TessLevelInner\n"
+		"OpEntryPoint TessellationControl %tessc2_main \"tessc2\" %out_color %gl_InvocationID %in_color %out_position %in_position %gl_TessLevelOuter %gl_TessLevelInner\n"
+		"OpExecutionMode %tessc1_main OutputVertices 3\n"
+		"OpExecutionMode %tessc2_main OutputVertices 3\n"
+		"OpName %tessc1_main \"tessc1\"\n"
+		"OpName %tessc2_main \"tessc2\"\n"
+		"OpName %out_color \"out_color\"\n"
+		"OpName %gl_InvocationID \"gl_InvocationID\"\n"
+		"OpName %in_color \"in_color\"\n"
+		"OpName %out_position \"out_position\"\n"
+		"OpName %in_position \"in_position\"\n"
+		"OpName %gl_TessLevelOuter \"gl_TessLevelOuter\"\n"
+		"OpName %gl_TessLevelInner \"gl_TessLevelInner\"\n"
+		"OpDecorate %out_color Location 1\n"
+		"OpDecorate %gl_InvocationID BuiltIn InvocationId\n"
+		"OpDecorate %in_color Location 1\n"
+		"OpDecorate %out_position Location 2\n"
+		"OpDecorate %in_position Location 2\n"
+		"OpDecorate %gl_TessLevelOuter Patch\n"
+		"OpDecorate %gl_TessLevelOuter BuiltIn TessLevelOuter\n"
+		"OpDecorate %gl_TessLevelInner Patch\n"
+		"OpDecorate %gl_TessLevelInner BuiltIn TessLevelInner\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%cval = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+		"%out_color = OpVariable %op_a3v4f32 Output\n"
+		"%gl_InvocationID = OpVariable %ip_i32 Input\n"
+		"%in_color = OpVariable %ip_a32v4f32 Input\n"
+		"%out_position = OpVariable %op_a3v4f32 Output\n"
+		"%in_position = OpVariable %ip_a32v4f32 Input\n"
+		"%gl_TessLevelOuter = OpVariable %op_a4f32 Output\n"
+		"%gl_TessLevelInner = OpVariable %op_a2f32 Output\n"
+
+		"%tessc1_main = OpFunction %void None %fun\n"
+		"%tessc1_label = OpLabel\n"
+		"%tessc1_invocation_id = OpLoad %i32 %gl_InvocationID\n"
+		"%tessc1_in_color_ptr = OpAccessChain %ip_v4f32 %in_color %tessc1_invocation_id\n"
+		"%tessc1_in_position_ptr = OpAccessChain %ip_v4f32 %in_position %tessc1_invocation_id\n"
+		"%tessc1_in_color_val = OpLoad %v4f32 %tessc1_in_color_ptr\n"
+		"%tessc1_in_position_val = OpLoad %v4f32 %tessc1_in_position_ptr\n"
+		"%tessc1_out_color_ptr = OpAccessChain %op_v4f32 %out_color %tessc1_invocation_id\n"
+		"%tessc1_out_position_ptr = OpAccessChain %op_v4f32 %out_position %tessc1_invocation_id\n"
+		"OpStore %tessc1_out_color_ptr %tessc1_in_color_val\n"
+		"OpStore %tessc1_out_position_ptr %tessc1_in_position_val\n"
+		"%tessc1_is_first_invocation = OpIEqual %bool %tessc1_invocation_id %c_i32_0\n"
+		"OpSelectionMerge %tessc1_merge_label None\n"
+		"OpBranchConditional %tessc1_is_first_invocation %tessc1_first_invocation %tessc1_merge_label\n"
+		"%tessc1_first_invocation = OpLabel\n"
+		"%tessc1_tess_outer_0 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_0\n"
+		"%tessc1_tess_outer_1 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_1\n"
+		"%tessc1_tess_outer_2 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_2\n"
+		"%tessc1_tess_inner = OpAccessChain %op_f32 %gl_TessLevelInner %c_i32_0\n"
+		"OpStore %tessc1_tess_outer_0 %c_f32_1\n"
+		"OpStore %tessc1_tess_outer_1 %c_f32_1\n"
+		"OpStore %tessc1_tess_outer_2 %c_f32_1\n"
+		"OpStore %tessc1_tess_inner %c_f32_1\n"
+		"OpBranch %tessc1_merge_label\n"
+		"%tessc1_merge_label = OpLabel\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"%tessc2_main = OpFunction %void None %fun\n"
+		"%tessc2_label = OpLabel\n"
+		"%tessc2_invocation_id = OpLoad %i32 %gl_InvocationID\n"
+		"%tessc2_in_color_ptr = OpAccessChain %ip_v4f32 %in_color %tessc2_invocation_id\n"
+		"%tessc2_in_position_ptr = OpAccessChain %ip_v4f32 %in_position %tessc2_invocation_id\n"
+		"%tessc2_in_color_val = OpLoad %v4f32 %tessc2_in_color_ptr\n"
+		"%tessc2_in_position_val = OpLoad %v4f32 %tessc2_in_position_ptr\n"
+		"%tessc2_out_color_ptr = OpAccessChain %op_v4f32 %out_color %tessc2_invocation_id\n"
+		"%tessc2_out_position_ptr = OpAccessChain %op_v4f32 %out_position %tessc2_invocation_id\n"
+		"%tessc2_transformed_color = OpFSub %v4f32 %cval %tessc2_in_color_val\n"
+		"%tessc2_transformed_color_a = OpVectorInsertDynamic %v4f32 %tessc2_transformed_color %c_f32_1 %c_i32_3\n"
+		"OpStore %tessc2_out_color_ptr %tessc2_transformed_color_a\n"
+		"OpStore %tessc2_out_position_ptr %tessc2_in_position_val\n"
+		"%tessc2_is_first_invocation = OpIEqual %bool %tessc2_invocation_id %c_i32_0\n"
+		"OpSelectionMerge %tessc2_merge_label None\n"
+		"OpBranchConditional %tessc2_is_first_invocation %tessc2_first_invocation %tessc2_merge_label\n"
+		"%tessc2_first_invocation = OpLabel\n"
+		"%tessc2_tess_outer_0 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_0\n"
+		"%tessc2_tess_outer_1 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_1\n"
+		"%tessc2_tess_outer_2 = OpAccessChain %op_f32 %gl_TessLevelOuter %c_i32_2\n"
+		"%tessc2_tess_inner = OpAccessChain %op_f32 %gl_TessLevelInner %c_i32_0\n"
+		"OpStore %tessc2_tess_outer_0 %c_f32_1\n"
+		"OpStore %tessc2_tess_outer_1 %c_f32_1\n"
+		"OpStore %tessc2_tess_outer_2 %c_f32_1\n"
+		"OpStore %tessc2_tess_inner %c_f32_1\n"
+		"OpBranch %tessc2_merge_label\n"
+		"%tessc2_merge_label = OpLabel\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+
+	dst.spirvAsmSources.add("tesse") <<
+		"OpCapability Tessellation\n"
+		"OpMemoryModel Logical GLSL450\n"
+		"OpEntryPoint TessellationEvaluation %tesse1_main \"tesse1\" %stream %gl_tessCoord %in_position %out_color %in_color \n"
+		"OpEntryPoint TessellationEvaluation %tesse2_main \"tesse2\" %stream %gl_tessCoord %in_position %out_color %in_color \n"
+		"OpExecutionMode %tesse1_main Triangles\n"
+		"OpExecutionMode %tesse2_main Triangles\n"
+		"OpName %tesse1_main \"tesse1\"\n"
+		"OpName %tesse2_main \"tesse2\"\n"
+		"OpName %per_vertex_out \"gl_PerVertex\"\n"
+		"OpMemberName %per_vertex_out 0 \"gl_Position\"\n"
+		"OpMemberName %per_vertex_out 1 \"gl_PointSize\"\n"
+		"OpMemberName %per_vertex_out 2 \"gl_ClipDistance\"\n"
+		"OpMemberName %per_vertex_out 3 \"gl_CullDistance\"\n"
+		"OpName %stream \"\"\n"
+		"OpName %gl_tessCoord \"gl_TessCoord\"\n"
+		"OpName %in_position \"in_position\"\n"
+		"OpName %out_color \"out_color\"\n"
+		"OpName %in_color \"in_color\"\n"
+		"OpMemberDecorate %per_vertex_out 0 BuiltIn Position\n"
+		"OpMemberDecorate %per_vertex_out 1 BuiltIn PointSize\n"
+		"OpMemberDecorate %per_vertex_out 2 BuiltIn ClipDistance\n"
+		"OpMemberDecorate %per_vertex_out 3 BuiltIn CullDistance\n"
+		"OpDecorate %per_vertex_out Block\n"
+		"OpDecorate %gl_tessCoord BuiltIn TessCoord\n"
+		"OpDecorate %in_position Location 2\n"
+		"OpDecorate %out_color Location 1\n"
+		"OpDecorate %in_color Location 1\n"
+		SPIRV_ASSEMBLY_TYPES
+		SPIRV_ASSEMBLY_CONSTANTS
+		SPIRV_ASSEMBLY_ARRAYS
+		"%cval = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+		"%per_vertex_out = OpTypeStruct %v4f32 %f32 %a1f32 %a1f32\n"
+		"%op_per_vertex_out = OpTypePointer Output %per_vertex_out\n"
+		"%stream = OpVariable %op_per_vertex_out Output\n"
+		"%gl_tessCoord = OpVariable %ip_v3f32 Input\n"
+		"%in_position = OpVariable %ip_a32v4f32 Input\n"
+		"%out_color = OpVariable %op_v4f32 Output\n"
+		"%in_color = OpVariable %ip_a32v4f32 Input\n"
+
+		"%tesse1_main = OpFunction %void None %fun\n"
+		"%tesse1_label = OpLabel\n"
+		"%tesse1_tc_0_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_0\n"
+		"%tesse1_tc_1_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_1\n"
+		"%tesse1_tc_2_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_2\n"
+		"%tesse1_tc_0 = OpLoad %f32 %tesse1_tc_0_ptr\n"
+		"%tesse1_tc_1 = OpLoad %f32 %tesse1_tc_1_ptr\n"
+		"%tesse1_tc_2 = OpLoad %f32 %tesse1_tc_2_ptr\n"
+		"%tesse1_in_pos_0_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_0\n"
+		"%tesse1_in_pos_1_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_1\n"
+		"%tesse1_in_pos_2_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_2\n"
+		"%tesse1_in_pos_0 = OpLoad %v4f32 %tesse1_in_pos_0_ptr\n"
+		"%tesse1_in_pos_1 = OpLoad %v4f32 %tesse1_in_pos_1_ptr\n"
+		"%tesse1_in_pos_2 = OpLoad %v4f32 %tesse1_in_pos_2_ptr\n"
+		"%tesse1_in_pos_0_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_0 %tesse1_in_pos_0\n"
+		"%tesse1_in_pos_1_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_1 %tesse1_in_pos_1\n"
+		"%tesse1_in_pos_2_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_2 %tesse1_in_pos_2\n"
+		"%tesse1_out_pos_ptr = OpAccessChain %op_v4f32 %stream %c_i32_0\n"
+		"%tesse1_in_pos_0_plus_pos_1 = OpFAdd %v4f32 %tesse1_in_pos_0_weighted %tesse1_in_pos_1_weighted\n"
+		"%tesse1_computed_out = OpFAdd %v4f32 %tesse1_in_pos_0_plus_pos_1 %tesse1_in_pos_2_weighted\n"
+		"OpStore %tesse1_out_pos_ptr %tesse1_computed_out\n"
+		"%tesse1_in_clr_0_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_0\n"
+		"%tesse1_in_clr_1_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_1\n"
+		"%tesse1_in_clr_2_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_2\n"
+		"%tesse1_in_clr_0 = OpLoad %v4f32 %tesse1_in_clr_0_ptr\n"
+		"%tesse1_in_clr_1 = OpLoad %v4f32 %tesse1_in_clr_1_ptr\n"
+		"%tesse1_in_clr_2 = OpLoad %v4f32 %tesse1_in_clr_2_ptr\n"
+		"%tesse1_in_clr_0_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_0 %tesse1_in_clr_0\n"
+		"%tesse1_in_clr_1_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_1 %tesse1_in_clr_1\n"
+		"%tesse1_in_clr_2_weighted = OpVectorTimesScalar %v4f32 %tesse1_tc_2 %tesse1_in_clr_2\n"
+		"%tesse1_in_clr_0_plus_col_1 = OpFAdd %v4f32 %tesse1_in_clr_0_weighted %tesse1_in_clr_1_weighted\n"
+		"%tesse1_computed_clr = OpFAdd %v4f32 %tesse1_in_clr_0_plus_col_1 %tesse1_in_clr_2_weighted\n"
+		"OpStore %out_color %tesse1_computed_clr\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n"
+
+		"%tesse2_main = OpFunction %void None %fun\n"
+		"%tesse2_label = OpLabel\n"
+		"%tesse2_tc_0_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_0\n"
+		"%tesse2_tc_1_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_1\n"
+		"%tesse2_tc_2_ptr = OpAccessChain %ip_f32 %gl_tessCoord %c_u32_2\n"
+		"%tesse2_tc_0 = OpLoad %f32 %tesse2_tc_0_ptr\n"
+		"%tesse2_tc_1 = OpLoad %f32 %tesse2_tc_1_ptr\n"
+		"%tesse2_tc_2 = OpLoad %f32 %tesse2_tc_2_ptr\n"
+		"%tesse2_in_pos_0_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_0\n"
+		"%tesse2_in_pos_1_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_1\n"
+		"%tesse2_in_pos_2_ptr = OpAccessChain %ip_v4f32 %in_position %c_i32_2\n"
+		"%tesse2_in_pos_0 = OpLoad %v4f32 %tesse2_in_pos_0_ptr\n"
+		"%tesse2_in_pos_1 = OpLoad %v4f32 %tesse2_in_pos_1_ptr\n"
+		"%tesse2_in_pos_2 = OpLoad %v4f32 %tesse2_in_pos_2_ptr\n"
+		"%tesse2_in_pos_0_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_0 %tesse2_in_pos_0\n"
+		"%tesse2_in_pos_1_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_1 %tesse2_in_pos_1\n"
+		"%tesse2_in_pos_2_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_2 %tesse2_in_pos_2\n"
+		"%tesse2_out_pos_ptr = OpAccessChain %op_v4f32 %stream %c_i32_0\n"
+		"%tesse2_in_pos_0_plus_pos_1 = OpFAdd %v4f32 %tesse2_in_pos_0_weighted %tesse2_in_pos_1_weighted\n"
+		"%tesse2_computed_out = OpFAdd %v4f32 %tesse2_in_pos_0_plus_pos_1 %tesse2_in_pos_2_weighted\n"
+		"OpStore %tesse2_out_pos_ptr %tesse2_computed_out\n"
+		"%tesse2_in_clr_0_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_0\n"
+		"%tesse2_in_clr_1_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_1\n"
+		"%tesse2_in_clr_2_ptr = OpAccessChain %ip_v4f32 %in_color %c_i32_2\n"
+		"%tesse2_in_clr_0 = OpLoad %v4f32 %tesse2_in_clr_0_ptr\n"
+		"%tesse2_in_clr_1 = OpLoad %v4f32 %tesse2_in_clr_1_ptr\n"
+		"%tesse2_in_clr_2 = OpLoad %v4f32 %tesse2_in_clr_2_ptr\n"
+		"%tesse2_in_clr_0_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_0 %tesse2_in_clr_0\n"
+		"%tesse2_in_clr_1_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_1 %tesse2_in_clr_1\n"
+		"%tesse2_in_clr_2_weighted = OpVectorTimesScalar %v4f32 %tesse2_tc_2 %tesse2_in_clr_2\n"
+		"%tesse2_in_clr_0_plus_col_1 = OpFAdd %v4f32 %tesse2_in_clr_0_weighted %tesse2_in_clr_1_weighted\n"
+		"%tesse2_computed_clr = OpFAdd %v4f32 %tesse2_in_clr_0_plus_col_1 %tesse2_in_clr_2_weighted\n"
+		"%tesse2_clr_transformed = OpFSub %v4f32 %cval %tesse2_computed_clr\n"
+		"%tesse2_clr_transformed_a = OpVectorInsertDynamic %v4f32 %tesse2_clr_transformed %c_f32_1 %c_i32_3\n"
+		"OpStore %out_color %tesse2_clr_transformed_a\n"
+		"OpReturn\n"
+		"OpFunctionEnd\n";
+}
+
+// Sets up and runs a Vulkan pipeline, then spot-checks the resulting image.
+// Feeds the pipeline a set of colored triangles, which then must occur in the
+// rendered image.  The surface is cleared before executing the pipeline, so
+// whatever the shaders draw can be directly spot-checked.
+TestStatus runAndVerifyDefaultPipeline (Context& context, InstanceContext instance)
+{
+	const VkDevice								vkDevice				= context.getDevice();
+	const DeviceInterface&						vk						= context.getDeviceInterface();
+	const VkQueue								queue					= context.getUniversalQueue();
+	const deUint32								queueFamilyIndex		= context.getUniversalQueueFamilyIndex();
+	const tcu::UVec2							renderSize				(256, 256);
+	vector<ModuleHandleSp>						modules;
+	map<VkShaderStageFlagBits, VkShaderModule>	moduleByStage;
+	const int									testSpecificSeed		= 31354125;
+	const int									seed					= context.getTestContext().getCommandLine().getBaseSeed() ^ testSpecificSeed;
+	bool										supportsGeometry		= false;
+	bool										supportsTessellation	= false;
+	bool										hasTessellation         = false;
+
+	const VkPhysicalDeviceFeatures&				features				= context.getDeviceFeatures();
+	supportsGeometry		= features.geometryShader == VK_TRUE;
+	supportsTessellation	= features.tessellationShader == VK_TRUE;
+	hasTessellation			= (instance.requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT) ||
+								(instance.requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT);
+
+	if (hasTessellation && !supportsTessellation)
+	{
+		throw tcu::NotSupportedError(std::string("Tessellation not supported"));
+	}
+
+	if ((instance.requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT) &&
+		!supportsGeometry)
+	{
+		throw tcu::NotSupportedError(std::string("Geometry not supported"));
+	}
+
+	de::Random(seed).shuffle(instance.inputColors, instance.inputColors+4);
+	de::Random(seed).shuffle(instance.outputColors, instance.outputColors+4);
+	const Vec4								vertexData[]			=
+	{
+		// Upper left corner:
+		Vec4(-1.0f, -1.0f, 0.0f, 1.0f), instance.inputColors[0].toVec(),
+		Vec4(-0.5f, -1.0f, 0.0f, 1.0f), instance.inputColors[0].toVec(),
+		Vec4(-1.0f, -0.5f, 0.0f, 1.0f), instance.inputColors[0].toVec(),
+
+		// Upper right corner:
+		Vec4(+0.5f, -1.0f, 0.0f, 1.0f), instance.inputColors[1].toVec(),
+		Vec4(+1.0f, -1.0f, 0.0f, 1.0f), instance.inputColors[1].toVec(),
+		Vec4(+1.0f, -0.5f, 0.0f, 1.0f), instance.inputColors[1].toVec(),
+
+		// Lower left corner:
+		Vec4(-1.0f, +0.5f, 0.0f, 1.0f), instance.inputColors[2].toVec(),
+		Vec4(-0.5f, +1.0f, 0.0f, 1.0f), instance.inputColors[2].toVec(),
+		Vec4(-1.0f, +1.0f, 0.0f, 1.0f), instance.inputColors[2].toVec(),
+
+		// Lower right corner:
+		Vec4(+1.0f, +0.5f, 0.0f, 1.0f), instance.inputColors[3].toVec(),
+		Vec4(+1.0f, +1.0f, 0.0f, 1.0f), instance.inputColors[3].toVec(),
+		Vec4(+0.5f, +1.0f, 0.0f, 1.0f), instance.inputColors[3].toVec()
+	};
+	const size_t							singleVertexDataSize	= 2 * sizeof(Vec4);
+	const size_t							vertexCount				= sizeof(vertexData) / singleVertexDataSize;
+
+	const VkBufferCreateInfo				vertexBufferParams		=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	//	VkStructureType		sType;
+		DE_NULL,								//	const void*			pNext;
+		0u,										//	VkBufferCreateFlags	flags;
+		(VkDeviceSize)sizeof(vertexData),		//	VkDeviceSize		size;
+		VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,		//	VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,				//	VkSharingMode		sharingMode;
+		1u,										//	deUint32			queueFamilyCount;
+		&queueFamilyIndex,						//	const deUint32*		pQueueFamilyIndices;
+	};
+	const Unique<VkBuffer>					vertexBuffer			(createBuffer(vk, vkDevice, &vertexBufferParams));
+	const UniquePtr<Allocation>				vertexBufferMemory		(context.getDefaultAllocator().allocate(getBufferMemoryRequirements(vk, vkDevice, *vertexBuffer), MemoryRequirement::HostVisible));
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *vertexBuffer, vertexBufferMemory->getMemory(), vertexBufferMemory->getOffset()));
+
+	const VkDeviceSize						imageSizeBytes			= (VkDeviceSize)(sizeof(deUint32)*renderSize.x()*renderSize.y());
+	const VkBufferCreateInfo				readImageBufferParams	=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,		//	VkStructureType		sType;
+		DE_NULL,									//	const void*			pNext;
+		0u,											//	VkBufferCreateFlags	flags;
+		imageSizeBytes,								//	VkDeviceSize		size;
+		VK_BUFFER_USAGE_TRANSFER_DST_BIT,			//	VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,					//	VkSharingMode		sharingMode;
+		1u,											//	deUint32			queueFamilyCount;
+		&queueFamilyIndex,							//	const deUint32*		pQueueFamilyIndices;
+	};
+	const Unique<VkBuffer>					readImageBuffer			(createBuffer(vk, vkDevice, &readImageBufferParams));
+	const UniquePtr<Allocation>				readImageBufferMemory	(context.getDefaultAllocator().allocate(getBufferMemoryRequirements(vk, vkDevice, *readImageBuffer), MemoryRequirement::HostVisible));
+
+	VK_CHECK(vk.bindBufferMemory(vkDevice, *readImageBuffer, readImageBufferMemory->getMemory(), readImageBufferMemory->getOffset()));
+
+	const VkImageCreateInfo					imageParams				=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,									//	VkStructureType		sType;
+		DE_NULL,																//	const void*			pNext;
+		0u,																		//	VkImageCreateFlags	flags;
+		VK_IMAGE_TYPE_2D,														//	VkImageType			imageType;
+		VK_FORMAT_R8G8B8A8_UNORM,												//	VkFormat			format;
+		{ renderSize.x(), renderSize.y(), 1 },									//	VkExtent3D			extent;
+		1u,																		//	deUint32			mipLevels;
+		1u,																		//	deUint32			arraySize;
+		VK_SAMPLE_COUNT_1_BIT,													//	deUint32			samples;
+		VK_IMAGE_TILING_OPTIMAL,												//	VkImageTiling		tiling;
+		VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|VK_IMAGE_USAGE_TRANSFER_SRC_BIT,	//	VkImageUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,												//	VkSharingMode		sharingMode;
+		1u,																		//	deUint32			queueFamilyCount;
+		&queueFamilyIndex,														//	const deUint32*		pQueueFamilyIndices;
+		VK_IMAGE_LAYOUT_UNDEFINED,												//	VkImageLayout		initialLayout;
+	};
+
+	const Unique<VkImage>					image					(createImage(vk, vkDevice, &imageParams));
+	const UniquePtr<Allocation>				imageMemory				(context.getDefaultAllocator().allocate(getImageMemoryRequirements(vk, vkDevice, *image), MemoryRequirement::Any));
+
+	VK_CHECK(vk.bindImageMemory(vkDevice, *image, imageMemory->getMemory(), imageMemory->getOffset()));
+
+	const VkAttachmentDescription			colorAttDesc			=
+	{
+		0u,												//	VkAttachmentDescriptionFlags	flags;
+		VK_FORMAT_R8G8B8A8_UNORM,						//	VkFormat						format;
+		VK_SAMPLE_COUNT_1_BIT,							//	deUint32						samples;
+		VK_ATTACHMENT_LOAD_OP_CLEAR,					//	VkAttachmentLoadOp				loadOp;
+		VK_ATTACHMENT_STORE_OP_STORE,					//	VkAttachmentStoreOp				storeOp;
+		VK_ATTACHMENT_LOAD_OP_DONT_CARE,				//	VkAttachmentLoadOp				stencilLoadOp;
+		VK_ATTACHMENT_STORE_OP_DONT_CARE,				//	VkAttachmentStoreOp				stencilStoreOp;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		//	VkImageLayout					initialLayout;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		//	VkImageLayout					finalLayout;
+	};
+	const VkAttachmentReference				colorAttRef				=
+	{
+		0u,												//	deUint32		attachment;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		//	VkImageLayout	layout;
+	};
+	const VkSubpassDescription				subpassDesc				=
+	{
+		0u,												//	VkSubpassDescriptionFlags		flags;
+		VK_PIPELINE_BIND_POINT_GRAPHICS,				//	VkPipelineBindPoint				pipelineBindPoint;
+		0u,												//	deUint32						inputCount;
+		DE_NULL,										//	const VkAttachmentReference*	pInputAttachments;
+		1u,												//	deUint32						colorCount;
+		&colorAttRef,									//	const VkAttachmentReference*	pColorAttachments;
+		DE_NULL,										//	const VkAttachmentReference*	pResolveAttachments;
+		DE_NULL,										//	const VkAttachmentReference*	pDepthStencilAttachment;
+		0u,												//	deUint32						preserveCount;
+		DE_NULL,										//	const VkAttachmentReference*	pPreserveAttachments;
+
+	};
+	const VkRenderPassCreateInfo			renderPassParams		=
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,		//	VkStructureType					sType;
+		DE_NULL,										//	const void*						pNext;
+		(VkRenderPassCreateFlags)0,
+		1u,												//	deUint32						attachmentCount;
+		&colorAttDesc,									//	const VkAttachmentDescription*	pAttachments;
+		1u,												//	deUint32						subpassCount;
+		&subpassDesc,									//	const VkSubpassDescription*		pSubpasses;
+		0u,												//	deUint32						dependencyCount;
+		DE_NULL,										//	const VkSubpassDependency*		pDependencies;
+	};
+	const Unique<VkRenderPass>				renderPass				(createRenderPass(vk, vkDevice, &renderPassParams));
+
+	const VkImageViewCreateInfo				colorAttViewParams		=
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		//	VkStructureType				sType;
+		DE_NULL,										//	const void*					pNext;
+		0u,												//	VkImageViewCreateFlags		flags;
+		*image,											//	VkImage						image;
+		VK_IMAGE_VIEW_TYPE_2D,							//	VkImageViewType				viewType;
+		VK_FORMAT_R8G8B8A8_UNORM,						//	VkFormat					format;
+		{
+			VK_COMPONENT_SWIZZLE_R,
+			VK_COMPONENT_SWIZZLE_G,
+			VK_COMPONENT_SWIZZLE_B,
+			VK_COMPONENT_SWIZZLE_A
+		},												//	VkChannelMapping			channels;
+		{
+			VK_IMAGE_ASPECT_COLOR_BIT,						//	VkImageAspectFlags	aspectMask;
+			0u,												//	deUint32			baseMipLevel;
+			1u,												//	deUint32			mipLevels;
+			0u,												//	deUint32			baseArrayLayer;
+			1u,												//	deUint32			arraySize;
+		},												//	VkImageSubresourceRange		subresourceRange;
+	};
+	const Unique<VkImageView>				colorAttView			(createImageView(vk, vkDevice, &colorAttViewParams));
+
+
+	// Pipeline layout
+	const VkPipelineLayoutCreateInfo		pipelineLayoutParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,			//	VkStructureType					sType;
+		DE_NULL,												//	const void*						pNext;
+		(VkPipelineLayoutCreateFlags)0,
+		0u,														//	deUint32						descriptorSetCount;
+		DE_NULL,												//	const VkDescriptorSetLayout*	pSetLayouts;
+		0u,														//	deUint32						pushConstantRangeCount;
+		DE_NULL,												//	const VkPushConstantRange*		pPushConstantRanges;
+	};
+	const Unique<VkPipelineLayout>			pipelineLayout			(createPipelineLayout(vk, vkDevice, &pipelineLayoutParams));
+
+	// Pipeline
+	vector<VkPipelineShaderStageCreateInfo>		shaderStageParams;
+	// We need these vectors to make sure that information about specialization constants for each stage can outlive createGraphicsPipeline().
+	vector<vector<VkSpecializationMapEntry> >	specConstantEntries;
+	vector<VkSpecializationInfo>				specializationInfos;
+	createPipelineShaderStages(vk, vkDevice, instance, context, modules, shaderStageParams);
+
+	// And we don't want the reallocation of these vectors to invalidate pointers pointing to their contents.
+	specConstantEntries.reserve(shaderStageParams.size());
+	specializationInfos.reserve(shaderStageParams.size());
+
+	// Patch the specialization info field in PipelineShaderStageCreateInfos.
+	for (vector<VkPipelineShaderStageCreateInfo>::iterator stageInfo = shaderStageParams.begin(); stageInfo != shaderStageParams.end(); ++stageInfo)
+	{
+		const StageToSpecConstantMap::const_iterator stageIt = instance.specConstants.find(stageInfo->stage);
+
+		if (stageIt != instance.specConstants.end())
+		{
+			const size_t						numSpecConstants	= stageIt->second.size();
+			vector<VkSpecializationMapEntry>	entries;
+			VkSpecializationInfo				specInfo;
+
+			entries.resize(numSpecConstants);
+
+			// Only support 32-bit integers as spec constants now. And their constant IDs are numbered sequentially starting from 0.
+			for (size_t ndx = 0; ndx < numSpecConstants; ++ndx)
+			{
+				entries[ndx].constantID	= (deUint32)ndx;
+				entries[ndx].offset		= deUint32(ndx * sizeof(deInt32));
+				entries[ndx].size		= sizeof(deInt32);
+			}
+
+			specConstantEntries.push_back(entries);
+
+			specInfo.mapEntryCount	= (deUint32)numSpecConstants;
+			specInfo.pMapEntries	= specConstantEntries.back().data();
+			specInfo.dataSize		= numSpecConstants * sizeof(deInt32);
+			specInfo.pData			= stageIt->second.data();
+			specializationInfos.push_back(specInfo);
+
+			stageInfo->pSpecializationInfo = &specializationInfos.back();
+		}
+	}
+	const VkPipelineDepthStencilStateCreateInfo	depthStencilParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	//	VkStructureType		sType;
+		DE_NULL,													//	const void*			pNext;
+		(VkPipelineDepthStencilStateCreateFlags)0,
+		DE_FALSE,													//	deUint32			depthTestEnable;
+		DE_FALSE,													//	deUint32			depthWriteEnable;
+		VK_COMPARE_OP_ALWAYS,										//	VkCompareOp			depthCompareOp;
+		DE_FALSE,													//	deUint32			depthBoundsTestEnable;
+		DE_FALSE,													//	deUint32			stencilTestEnable;
+		{
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilFailOp;
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilPassOp;
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilDepthFailOp;
+			VK_COMPARE_OP_ALWAYS,										//	VkCompareOp	stencilCompareOp;
+			0u,															//	deUint32	stencilCompareMask;
+			0u,															//	deUint32	stencilWriteMask;
+			0u,															//	deUint32	stencilReference;
+		},															//	VkStencilOpState	front;
+		{
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilFailOp;
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilPassOp;
+			VK_STENCIL_OP_KEEP,											//	VkStencilOp	stencilDepthFailOp;
+			VK_COMPARE_OP_ALWAYS,										//	VkCompareOp	stencilCompareOp;
+			0u,															//	deUint32	stencilCompareMask;
+			0u,															//	deUint32	stencilWriteMask;
+			0u,															//	deUint32	stencilReference;
+		},															//	VkStencilOpState	back;
+		-1.0f,														//	float				minDepthBounds;
+		+1.0f,														//	float				maxDepthBounds;
+	};
+	const VkViewport						viewport0				=
+	{
+		0.0f,														//	float	originX;
+		0.0f,														//	float	originY;
+		(float)renderSize.x(),										//	float	width;
+		(float)renderSize.y(),										//	float	height;
+		0.0f,														//	float	minDepth;
+		1.0f,														//	float	maxDepth;
+	};
+	const VkRect2D							scissor0				=
+	{
+		{
+			0u,															//	deInt32	x;
+			0u,															//	deInt32	y;
+		},															//	VkOffset2D	offset;
+		{
+			renderSize.x(),												//	deInt32	width;
+			renderSize.y(),												//	deInt32	height;
+		},															//	VkExtent2D	extent;
+	};
+	const VkPipelineViewportStateCreateInfo		viewportParams			=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,		//	VkStructureType		sType;
+		DE_NULL,													//	const void*			pNext;
+		(VkPipelineViewportStateCreateFlags)0,
+		1u,															//	deUint32			viewportCount;
+		&viewport0,
+		1u,
+		&scissor0
+	};
+	const VkSampleMask							sampleMask				= ~0u;
+	const VkPipelineMultisampleStateCreateInfo	multisampleParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		(VkPipelineMultisampleStateCreateFlags)0,
+		VK_SAMPLE_COUNT_1_BIT,										//	VkSampleCountFlagBits	rasterSamples;
+		DE_FALSE,													//	deUint32				sampleShadingEnable;
+		0.0f,														//	float					minSampleShading;
+		&sampleMask,												//	const VkSampleMask*		pSampleMask;
+		DE_FALSE,													//	VkBool32				alphaToCoverageEnable;
+		DE_FALSE,													//	VkBool32				alphaToOneEnable;
+	};
+	const VkPipelineRasterizationStateCreateInfo	rasterParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,	//	VkStructureType	sType;
+		DE_NULL,													//	const void*		pNext;
+		(VkPipelineRasterizationStateCreateFlags)0,
+		DE_TRUE,													//	deUint32		depthClipEnable;
+		DE_FALSE,													//	deUint32		rasterizerDiscardEnable;
+		VK_POLYGON_MODE_FILL,										//	VkFillMode		fillMode;
+		VK_CULL_MODE_NONE,											//	VkCullMode		cullMode;
+		VK_FRONT_FACE_COUNTER_CLOCKWISE,							//	VkFrontFace		frontFace;
+		VK_FALSE,													//	VkBool32		depthBiasEnable;
+		0.0f,														//	float			depthBias;
+		0.0f,														//	float			depthBiasClamp;
+		0.0f,														//	float			slopeScaledDepthBias;
+		1.0f,														//	float			lineWidth;
+	};
+	const VkPrimitiveTopology topology = hasTessellation? VK_PRIMITIVE_TOPOLOGY_PATCH_LIST: VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
+	const VkPipelineInputAssemblyStateCreateInfo	inputAssemblyParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	//	VkStructureType		sType;
+		DE_NULL,														//	const void*			pNext;
+		(VkPipelineInputAssemblyStateCreateFlags)0,
+		topology,														//	VkPrimitiveTopology	topology;
+		DE_FALSE,														//	deUint32			primitiveRestartEnable;
+	};
+	const VkVertexInputBindingDescription		vertexBinding0 =
+	{
+		0u,									// deUint32					binding;
+		deUint32(singleVertexDataSize),		// deUint32					strideInBytes;
+		VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputStepRate	stepRate;
+	};
+	const VkVertexInputAttributeDescription		vertexAttrib0[2] =
+	{
+		{
+			0u,									// deUint32	location;
+			0u,									// deUint32	binding;
+			VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+			0u									// deUint32	offsetInBytes;
+		},
+		{
+			1u,									// deUint32	location;
+			0u,									// deUint32	binding;
+			VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat	format;
+			sizeof(Vec4),						// deUint32	offsetInBytes;
+		}
+	};
+
+	const VkPipelineVertexInputStateCreateInfo	vertexInputStateParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	//	VkStructureType								sType;
+		DE_NULL,													//	const void*									pNext;
+		(VkPipelineVertexInputStateCreateFlags)0,
+		1u,															//	deUint32									bindingCount;
+		&vertexBinding0,											//	const VkVertexInputBindingDescription*		pVertexBindingDescriptions;
+		2u,															//	deUint32									attributeCount;
+		vertexAttrib0,												//	const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+	};
+	const VkPipelineColorBlendAttachmentState	attBlendParams			=
+	{
+		DE_FALSE,													//	deUint32		blendEnable;
+		VK_BLEND_FACTOR_ONE,										//	VkBlend			srcBlendColor;
+		VK_BLEND_FACTOR_ZERO,										//	VkBlend			destBlendColor;
+		VK_BLEND_OP_ADD,											//	VkBlendOp		blendOpColor;
+		VK_BLEND_FACTOR_ONE,										//	VkBlend			srcBlendAlpha;
+		VK_BLEND_FACTOR_ZERO,										//	VkBlend			destBlendAlpha;
+		VK_BLEND_OP_ADD,											//	VkBlendOp		blendOpAlpha;
+		(VK_COLOR_COMPONENT_R_BIT|
+		 VK_COLOR_COMPONENT_G_BIT|
+		 VK_COLOR_COMPONENT_B_BIT|
+		 VK_COLOR_COMPONENT_A_BIT),									//	VkChannelFlags	channelWriteMask;
+	};
+	const VkPipelineColorBlendStateCreateInfo	blendParams				=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	//	VkStructureType								sType;
+		DE_NULL,													//	const void*									pNext;
+		(VkPipelineColorBlendStateCreateFlags)0,
+		DE_FALSE,													//	VkBool32									logicOpEnable;
+		VK_LOGIC_OP_COPY,											//	VkLogicOp									logicOp;
+		1u,															//	deUint32									attachmentCount;
+		&attBlendParams,											//	const VkPipelineColorBlendAttachmentState*	pAttachments;
+		{ 0.0f, 0.0f, 0.0f, 0.0f },									//	float										blendConst[4];
+	};
+	const VkPipelineDynamicStateCreateInfo	dynamicStateInfo		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,	//	VkStructureType			sType;
+		DE_NULL,												//	const void*				pNext;
+		(VkPipelineDynamicStateCreateFlags)0,
+		0u,														//	deUint32				dynamicStateCount;
+		DE_NULL													//	const VkDynamicState*	pDynamicStates;
+	};
+
+	const VkPipelineTessellationStateCreateInfo	tessellationState	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+		DE_NULL,
+		(VkPipelineTessellationStateCreateFlags)0,
+		3u
+	};
+
+	const VkPipelineTessellationStateCreateInfo* tessellationInfo	=	hasTessellation ? &tessellationState: DE_NULL;
+	const VkGraphicsPipelineCreateInfo		pipelineParams			=
+	{
+		VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,		//	VkStructureType									sType;
+		DE_NULL,												//	const void*										pNext;
+		0u,														//	VkPipelineCreateFlags							flags;
+		(deUint32)shaderStageParams.size(),						//	deUint32										stageCount;
+		&shaderStageParams[0],									//	const VkPipelineShaderStageCreateInfo*			pStages;
+		&vertexInputStateParams,								//	const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+		&inputAssemblyParams,									//	const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+		tessellationInfo,										//	const VkPipelineTessellationStateCreateInfo*	pTessellationState;
+		&viewportParams,										//	const VkPipelineViewportStateCreateInfo*		pViewportState;
+		&rasterParams,											//	const VkPipelineRasterStateCreateInfo*			pRasterState;
+		&multisampleParams,										//	const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+		&depthStencilParams,									//	const VkPipelineDepthStencilStateCreateInfo*	pDepthStencilState;
+		&blendParams,											//	const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+		&dynamicStateInfo,										//	const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+		*pipelineLayout,										//	VkPipelineLayout								layout;
+		*renderPass,											//	VkRenderPass									renderPass;
+		0u,														//	deUint32										subpass;
+		DE_NULL,												//	VkPipeline										basePipelineHandle;
+		0u,														//	deInt32											basePipelineIndex;
+	};
+
+	const Unique<VkPipeline>				pipeline				(createGraphicsPipeline(vk, vkDevice, DE_NULL, &pipelineParams));
+
+	// Framebuffer
+	const VkFramebufferCreateInfo			framebufferParams		=
+	{
+		VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,				//	VkStructureType		sType;
+		DE_NULL,												//	const void*			pNext;
+		(VkFramebufferCreateFlags)0,
+		*renderPass,											//	VkRenderPass		renderPass;
+		1u,														//	deUint32			attachmentCount;
+		&*colorAttView,											//	const VkImageView*	pAttachments;
+		(deUint32)renderSize.x(),								//	deUint32			width;
+		(deUint32)renderSize.y(),								//	deUint32			height;
+		1u,														//	deUint32			layers;
+	};
+	const Unique<VkFramebuffer>				framebuffer				(createFramebuffer(vk, vkDevice, &framebufferParams));
+
+	const VkCommandPoolCreateInfo			cmdPoolParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,					//	VkStructureType			sType;
+		DE_NULL,													//	const void*				pNext;
+		VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,				//	VkCmdPoolCreateFlags	flags;
+		queueFamilyIndex,											//	deUint32				queueFamilyIndex;
+	};
+	const Unique<VkCommandPool>				cmdPool					(createCommandPool(vk, vkDevice, &cmdPoolParams));
+
+	// Command buffer
+	const VkCommandBufferAllocateInfo		cmdBufParams			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,			//	VkStructureType			sType;
+		DE_NULL,												//	const void*				pNext;
+		*cmdPool,												//	VkCmdPool				pool;
+		VK_COMMAND_BUFFER_LEVEL_PRIMARY,						//	VkCmdBufferLevel		level;
+		1u,														//	deUint32				count;
+	};
+	const Unique<VkCommandBuffer>			cmdBuf					(allocateCommandBuffer(vk, vkDevice, &cmdBufParams));
+
+	const VkCommandBufferBeginInfo			cmdBufBeginParams		=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,			//	VkStructureType				sType;
+		DE_NULL,												//	const void*					pNext;
+		(VkCommandBufferUsageFlags)0,
+		(const VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+
+	// Record commands
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuf, &cmdBufBeginParams));
+
+	{
+		const VkMemoryBarrier		vertFlushBarrier	=
+		{
+			VK_STRUCTURE_TYPE_MEMORY_BARRIER,			//	VkStructureType		sType;
+			DE_NULL,									//	const void*			pNext;
+			VK_ACCESS_HOST_WRITE_BIT,					//	VkMemoryOutputFlags	outputMask;
+			VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,		//	VkMemoryInputFlags	inputMask;
+		};
+		const VkImageMemoryBarrier	colorAttBarrier		=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		//	VkStructureType			sType;
+			DE_NULL,									//	const void*				pNext;
+			0u,											//	VkMemoryOutputFlags		outputMask;
+			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		//	VkMemoryInputFlags		inputMask;
+			VK_IMAGE_LAYOUT_UNDEFINED,					//	VkImageLayout			oldLayout;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	//	VkImageLayout			newLayout;
+			queueFamilyIndex,							//	deUint32				srcQueueFamilyIndex;
+			queueFamilyIndex,							//	deUint32				destQueueFamilyIndex;
+			*image,										//	VkImage					image;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,					//	VkImageAspect	aspect;
+				0u,											//	deUint32		baseMipLevel;
+				1u,											//	deUint32		mipLevels;
+				0u,											//	deUint32		baseArraySlice;
+				1u,											//	deUint32		arraySize;
+			}											//	VkImageSubresourceRange	subresourceRange;
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, (VkDependencyFlags)0, 1, &vertFlushBarrier, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &colorAttBarrier);
+	}
+
+	{
+		const VkClearValue			clearValue		= makeClearValueColorF32(0.125f, 0.25f, 0.75f, 1.0f);
+		const VkRenderPassBeginInfo	passBeginParams	=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,			//	VkStructureType		sType;
+			DE_NULL,											//	const void*			pNext;
+			*renderPass,										//	VkRenderPass		renderPass;
+			*framebuffer,										//	VkFramebuffer		framebuffer;
+			{ { 0, 0 }, { renderSize.x(), renderSize.y() } },	//	VkRect2D			renderArea;
+			1u,													//	deUint32			clearValueCount;
+			&clearValue,										//	const VkClearValue*	pClearValues;
+		};
+		vk.cmdBeginRenderPass(*cmdBuf, &passBeginParams, VK_SUBPASS_CONTENTS_INLINE);
+	}
+
+	vk.cmdBindPipeline(*cmdBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
+	{
+		const VkDeviceSize bindingOffset = 0;
+		vk.cmdBindVertexBuffers(*cmdBuf, 0u, 1u, &vertexBuffer.get(), &bindingOffset);
+	}
+	vk.cmdDraw(*cmdBuf, deUint32(vertexCount), 1u /*run pipeline once*/, 0u /*first vertex*/, 0u /*first instanceIndex*/);
+	vk.cmdEndRenderPass(*cmdBuf);
+
+	{
+		const VkImageMemoryBarrier	renderFinishBarrier	=
+		{
+			VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		//	VkStructureType			sType;
+			DE_NULL,									//	const void*				pNext;
+			VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		//	VkMemoryOutputFlags		outputMask;
+			VK_ACCESS_TRANSFER_READ_BIT,				//	VkMemoryInputFlags		inputMask;
+			VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	//	VkImageLayout			oldLayout;
+			VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		//	VkImageLayout			newLayout;
+			queueFamilyIndex,							//	deUint32				srcQueueFamilyIndex;
+			queueFamilyIndex,							//	deUint32				destQueueFamilyIndex;
+			*image,										//	VkImage					image;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,					//	VkImageAspectFlags	aspectMask;
+				0u,											//	deUint32			baseMipLevel;
+				1u,											//	deUint32			mipLevels;
+				0u,											//	deUint32			baseArraySlice;
+				1u,											//	deUint32			arraySize;
+			}											//	VkImageSubresourceRange	subresourceRange;
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &renderFinishBarrier);
+	}
+
+	{
+		const VkBufferImageCopy	copyParams	=
+		{
+			(VkDeviceSize)0u,						//	VkDeviceSize			bufferOffset;
+			(deUint32)renderSize.x(),				//	deUint32				bufferRowLength;
+			(deUint32)renderSize.y(),				//	deUint32				bufferImageHeight;
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,				//	VkImageAspect		aspect;
+				0u,										//	deUint32			mipLevel;
+				0u,										//	deUint32			arrayLayer;
+				1u,										//	deUint32			arraySize;
+			},										//	VkImageSubresourceCopy	imageSubresource;
+			{ 0u, 0u, 0u },							//	VkOffset3D				imageOffset;
+			{ renderSize.x(), renderSize.y(), 1u }	//	VkExtent3D				imageExtent;
+		};
+		vk.cmdCopyImageToBuffer(*cmdBuf, *image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *readImageBuffer, 1u, &copyParams);
+	}
+
+	{
+		const VkBufferMemoryBarrier	copyFinishBarrier	=
+		{
+			VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,	//	VkStructureType		sType;
+			DE_NULL,									//	const void*			pNext;
+			VK_ACCESS_TRANSFER_WRITE_BIT,				//	VkMemoryOutputFlags	outputMask;
+			VK_ACCESS_HOST_READ_BIT,					//	VkMemoryInputFlags	inputMask;
+			queueFamilyIndex,							//	deUint32			srcQueueFamilyIndex;
+			queueFamilyIndex,							//	deUint32			destQueueFamilyIndex;
+			*readImageBuffer,							//	VkBuffer			buffer;
+			0u,											//	VkDeviceSize		offset;
+			imageSizeBytes								//	VkDeviceSize		size;
+		};
+		vk.cmdPipelineBarrier(*cmdBuf, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &copyFinishBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuf));
+
+	// Upload vertex data
+	{
+		const VkMappedMemoryRange	range			=
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	//	VkStructureType	sType;
+			DE_NULL,								//	const void*		pNext;
+			vertexBufferMemory->getMemory(),		//	VkDeviceMemory	mem;
+			0,										//	VkDeviceSize	offset;
+			(VkDeviceSize)sizeof(vertexData),		//	VkDeviceSize	size;
+		};
+		void*						vertexBufPtr	= vertexBufferMemory->getHostPtr();
+
+		deMemcpy(vertexBufPtr, &vertexData[0], sizeof(vertexData));
+		VK_CHECK(vk.flushMappedMemoryRanges(vkDevice, 1u, &range));
+	}
+
+	// Submit & wait for completion
+	{
+		const VkFenceCreateInfo	fenceParams	=
+		{
+			VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	//	VkStructureType		sType;
+			DE_NULL,								//	const void*			pNext;
+			0u,										//	VkFenceCreateFlags	flags;
+		};
+		const Unique<VkFence>	fence		(createFence(vk, vkDevice, &fenceParams));
+		const VkSubmitInfo		submitInfo	=
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,
+			(const VkSemaphore*)DE_NULL,
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&cmdBuf.get(),
+			0u,
+			(const VkSemaphore*)DE_NULL,
+		};
+
+		VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+		VK_CHECK(vk.waitForFences(vkDevice, 1u, &fence.get(), DE_TRUE, ~0ull));
+	}
+
+	const void* imagePtr	= readImageBufferMemory->getHostPtr();
+	const tcu::ConstPixelBufferAccess pixelBuffer(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8),
+												  renderSize.x(), renderSize.y(), 1, imagePtr);
+	// Log image
+	{
+		const VkMappedMemoryRange	range		=
+		{
+			VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	//	VkStructureType	sType;
+			DE_NULL,								//	const void*		pNext;
+			readImageBufferMemory->getMemory(),		//	VkDeviceMemory	mem;
+			0,										//	VkDeviceSize	offset;
+			imageSizeBytes,							//	VkDeviceSize	size;
+		};
+
+		VK_CHECK(vk.invalidateMappedMemoryRanges(vkDevice, 1u, &range));
+		context.getTestContext().getLog() << TestLog::Image("Result", "Result", pixelBuffer);
+	}
+
+	const RGBA threshold(1, 1, 1, 1);
+	const RGBA upperLeft(pixelBuffer.getPixel(1, 1));
+	if (!tcu::compareThreshold(upperLeft, instance.outputColors[0], threshold))
+		return TestStatus::fail("Upper left corner mismatch");
+
+	const RGBA upperRight(pixelBuffer.getPixel(pixelBuffer.getWidth() - 1, 1));
+	if (!tcu::compareThreshold(upperRight, instance.outputColors[1], threshold))
+		return TestStatus::fail("Upper right corner mismatch");
+
+	const RGBA lowerLeft(pixelBuffer.getPixel(1, pixelBuffer.getHeight() - 1));
+	if (!tcu::compareThreshold(lowerLeft, instance.outputColors[2], threshold))
+		return TestStatus::fail("Lower left corner mismatch");
+
+	const RGBA lowerRight(pixelBuffer.getPixel(pixelBuffer.getWidth() - 1, pixelBuffer.getHeight() - 1));
+	if (!tcu::compareThreshold(lowerRight, instance.outputColors[3], threshold))
+		return TestStatus::fail("Lower right corner mismatch");
+
+	return TestStatus::pass("Rendered output matches input");
+}
+
+void createTestsForAllStages (const std::string& name, const RGBA (&inputColors)[4], const RGBA (&outputColors)[4], const map<string, string>& testCodeFragments, const vector<deInt32>& specConstants, tcu::TestCaseGroup* tests)
+{
+	const ShaderElement		vertFragPipelineStages[]		=
+	{
+		ShaderElement("vert", "main", VK_SHADER_STAGE_VERTEX_BIT),
+		ShaderElement("frag", "main", VK_SHADER_STAGE_FRAGMENT_BIT),
+	};
+
+	const ShaderElement		tessPipelineStages[]			=
+	{
+		ShaderElement("vert", "main", VK_SHADER_STAGE_VERTEX_BIT),
+		ShaderElement("tessc", "main", VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT),
+		ShaderElement("tesse", "main", VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT),
+		ShaderElement("frag", "main", VK_SHADER_STAGE_FRAGMENT_BIT),
+	};
+
+	const ShaderElement		geomPipelineStages[]				=
+	{
+		ShaderElement("vert", "main", VK_SHADER_STAGE_VERTEX_BIT),
+		ShaderElement("geom", "main", VK_SHADER_STAGE_GEOMETRY_BIT),
+		ShaderElement("frag", "main", VK_SHADER_STAGE_FRAGMENT_BIT),
+	};
+
+	StageToSpecConstantMap	specConstantMap;
+
+	specConstantMap[VK_SHADER_STAGE_VERTEX_BIT] = specConstants;
+	addFunctionCaseWithPrograms<InstanceContext>(tests, name + "_vert", "", addShaderCodeCustomVertex, runAndVerifyDefaultPipeline,
+												 createInstanceContext(vertFragPipelineStages, inputColors, outputColors, testCodeFragments, specConstantMap));
+
+	specConstantMap.clear();
+	specConstantMap[VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT] = specConstants;
+	addFunctionCaseWithPrograms<InstanceContext>(tests, name + "_tessc", "", addShaderCodeCustomTessControl, runAndVerifyDefaultPipeline,
+												 createInstanceContext(tessPipelineStages, inputColors, outputColors, testCodeFragments, specConstantMap));
+
+	specConstantMap.clear();
+	specConstantMap[VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT] = specConstants;
+	addFunctionCaseWithPrograms<InstanceContext>(tests, name + "_tesse", "", addShaderCodeCustomTessEval, runAndVerifyDefaultPipeline,
+												 createInstanceContext(tessPipelineStages, inputColors, outputColors, testCodeFragments, specConstantMap));
+
+	specConstantMap.clear();
+	specConstantMap[VK_SHADER_STAGE_GEOMETRY_BIT] = specConstants;
+	addFunctionCaseWithPrograms<InstanceContext>(tests, name + "_geom", "", addShaderCodeCustomGeometry, runAndVerifyDefaultPipeline,
+												 createInstanceContext(geomPipelineStages, inputColors, outputColors, testCodeFragments, specConstantMap));
+
+	specConstantMap.clear();
+	specConstantMap[VK_SHADER_STAGE_FRAGMENT_BIT] = specConstants;
+	addFunctionCaseWithPrograms<InstanceContext>(tests, name + "_frag", "", addShaderCodeCustomFragment, runAndVerifyDefaultPipeline,
+												 createInstanceContext(vertFragPipelineStages, inputColors, outputColors, testCodeFragments, specConstantMap));
+}
+
+inline void createTestsForAllStages (const std::string& name, const RGBA (&inputColors)[4], const RGBA (&outputColors)[4], const map<string, string>& testCodeFragments, tcu::TestCaseGroup* tests)
+{
+	vector<deInt32> noSpecConstants;
+	createTestsForAllStages(name, inputColors, outputColors, testCodeFragments, noSpecConstants, tests);
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createOpSourceTests (tcu::TestContext& testCtx)
+{
+	struct NameCodePair { string name, code; };
+	RGBA							defaultColors[4];
+	de::MovePtr<tcu::TestCaseGroup> opSourceTests			(new tcu::TestCaseGroup(testCtx, "opsource", "OpSource instruction"));
+	const std::string				opsourceGLSLWithFile	= "%opsrcfile = OpString \"foo.vert\"\nOpSource GLSL 450 %opsrcfile ";
+	map<string, string>				fragments				= passthruFragments();
+	const NameCodePair				tests[]					=
+	{
+		{"unknown", "OpSource Unknown 321"},
+		{"essl", "OpSource ESSL 310"},
+		{"glsl", "OpSource GLSL 450"},
+		{"opencl_cpp", "OpSource OpenCL_CPP 120"},
+		{"opencl_c", "OpSource OpenCL_C 120"},
+		{"multiple", "OpSource GLSL 450\nOpSource GLSL 450"},
+		{"file", opsourceGLSLWithFile},
+		{"source", opsourceGLSLWithFile + "\"void main(){}\""},
+		// Longest possible source string: SPIR-V limits instructions to 65535
+		// words, of which the first 4 are opsourceGLSLWithFile; the rest will
+		// contain 65530 UTF8 characters (one word each) plus one last word
+		// containing 3 ASCII characters and \0.
+		{"longsource", opsourceGLSLWithFile + '"' + makeLongUTF8String(65530) + "ccc" + '"'}
+	};
+
+	getDefaultColors(defaultColors);
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameCodePair); ++testNdx)
+	{
+		fragments["debug"] = tests[testNdx].code;
+		createTestsForAllStages(tests[testNdx].name, defaultColors, defaultColors, fragments, opSourceTests.get());
+	}
+
+	return opSourceTests.release();
+}
+
+tcu::TestCaseGroup* createOpSourceContinuedTests (tcu::TestContext& testCtx)
+{
+	struct NameCodePair { string name, code; };
+	RGBA								defaultColors[4];
+	de::MovePtr<tcu::TestCaseGroup>		opSourceTests		(new tcu::TestCaseGroup(testCtx, "opsourcecontinued", "OpSourceContinued instruction"));
+	map<string, string>					fragments			= passthruFragments();
+	const std::string					opsource			= "%opsrcfile = OpString \"foo.vert\"\nOpSource GLSL 450 %opsrcfile \"void main(){}\"\n";
+	const NameCodePair					tests[]				=
+	{
+		{"empty", opsource + "OpSourceContinued \"\""},
+		{"short", opsource + "OpSourceContinued \"abcde\""},
+		{"multiple", opsource + "OpSourceContinued \"abcde\"\nOpSourceContinued \"fghij\""},
+		// Longest possible source string: SPIR-V limits instructions to 65535
+		// words, of which the first one is OpSourceContinued/length; the rest
+		// will contain 65533 UTF8 characters (one word each) plus one last word
+		// containing 3 ASCII characters and \0.
+		{"long", opsource + "OpSourceContinued \"" + makeLongUTF8String(65533) + "ccc\""}
+	};
+
+	getDefaultColors(defaultColors);
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameCodePair); ++testNdx)
+	{
+		fragments["debug"] = tests[testNdx].code;
+		createTestsForAllStages(tests[testNdx].name, defaultColors, defaultColors, fragments, opSourceTests.get());
+	}
+
+	return opSourceTests.release();
+}
+
+tcu::TestCaseGroup* createOpNoLineTests(tcu::TestContext& testCtx)
+{
+	RGBA								 defaultColors[4];
+	de::MovePtr<tcu::TestCaseGroup>		 opLineTests		 (new tcu::TestCaseGroup(testCtx, "opnoline", "OpNoLine instruction"));
+	map<string, string>					 fragments;
+	getDefaultColors(defaultColors);
+	fragments["debug"]			=
+		"%name = OpString \"name\"\n";
+
+	fragments["pre_main"]	=
+		"OpNoLine\n"
+		"OpNoLine\n"
+		"OpLine %name 1 1\n"
+		"OpNoLine\n"
+		"OpLine %name 1 1\n"
+		"OpLine %name 1 1\n"
+		"%second_function = OpFunction %v4f32 None %v4f32_function\n"
+		"OpNoLine\n"
+		"OpLine %name 1 1\n"
+		"OpNoLine\n"
+		"OpLine %name 1 1\n"
+		"OpLine %name 1 1\n"
+		"%second_param1 = OpFunctionParameter %v4f32\n"
+		"OpNoLine\n"
+		"OpNoLine\n"
+		"%label_secondfunction = OpLabel\n"
+		"OpNoLine\n"
+		"OpReturnValue %second_param1\n"
+		"OpFunctionEnd\n"
+		"OpNoLine\n"
+		"OpNoLine\n";
+
+	fragments["testfun"]		=
+		// A %test_code function that returns its argument unchanged.
+		"OpNoLine\n"
+		"OpNoLine\n"
+		"OpLine %name 1 1\n"
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"OpNoLine\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"OpNoLine\n"
+		"OpNoLine\n"
+		"%label_testfun = OpLabel\n"
+		"OpNoLine\n"
+		"%val1 = OpFunctionCall %v4f32 %second_function %param1\n"
+		"OpReturnValue %val1\n"
+		"OpFunctionEnd\n"
+		"OpLine %name 1 1\n"
+		"OpNoLine\n";
+
+	createTestsForAllStages("opnoline", defaultColors, defaultColors, fragments, opLineTests.get());
+
+	return opLineTests.release();
+}
+
+
+tcu::TestCaseGroup* createOpLineTests(tcu::TestContext& testCtx)
+{
+	RGBA													defaultColors[4];
+	de::MovePtr<tcu::TestCaseGroup>							opLineTests			(new tcu::TestCaseGroup(testCtx, "opline", "OpLine instruction"));
+	map<string, string>										fragments;
+	std::vector<std::pair<std::string, std::string> >		problemStrings;
+
+	problemStrings.push_back(std::make_pair<std::string, std::string>("empty_name", ""));
+	problemStrings.push_back(std::make_pair<std::string, std::string>("short_name", "short_name"));
+	problemStrings.push_back(std::make_pair<std::string, std::string>("long_name", makeLongUTF8String(65530) + "ccc"));
+	getDefaultColors(defaultColors);
+
+	fragments["debug"]			=
+		"%other_name = OpString \"other_name\"\n";
+
+	fragments["pre_main"]	=
+		"OpLine %file_name 32 0\n"
+		"OpLine %file_name 32 32\n"
+		"OpLine %file_name 32 40\n"
+		"OpLine %other_name 32 40\n"
+		"OpLine %other_name 0 100\n"
+		"OpLine %other_name 0 4294967295\n"
+		"OpLine %other_name 4294967295 0\n"
+		"OpLine %other_name 32 40\n"
+		"OpLine %file_name 0 0\n"
+		"%second_function = OpFunction %v4f32 None %v4f32_function\n"
+		"OpLine %file_name 1 0\n"
+		"%second_param1 = OpFunctionParameter %v4f32\n"
+		"OpLine %file_name 1 3\n"
+		"OpLine %file_name 1 2\n"
+		"%label_secondfunction = OpLabel\n"
+		"OpLine %file_name 0 2\n"
+		"OpReturnValue %second_param1\n"
+		"OpFunctionEnd\n"
+		"OpLine %file_name 0 2\n"
+		"OpLine %file_name 0 2\n";
+
+	fragments["testfun"]		=
+		// A %test_code function that returns its argument unchanged.
+		"OpLine %file_name 1 0\n"
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"OpLine %file_name 16 330\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"OpLine %file_name 14 442\n"
+		"%label_testfun = OpLabel\n"
+		"OpLine %file_name 11 1024\n"
+		"%val1 = OpFunctionCall %v4f32 %second_function %param1\n"
+		"OpLine %file_name 2 97\n"
+		"OpReturnValue %val1\n"
+		"OpFunctionEnd\n"
+		"OpLine %file_name 5 32\n";
+
+	for (size_t i = 0; i < problemStrings.size(); ++i)
+	{
+		map<string, string> testFragments = fragments;
+		testFragments["debug"] += "%file_name = OpString \"" + problemStrings[i].second + "\"\n";
+		createTestsForAllStages(string("opline") + "_" + problemStrings[i].first, defaultColors, defaultColors, testFragments, opLineTests.get());
+	}
+
+	return opLineTests.release();
+}
+
+tcu::TestCaseGroup* createOpConstantNullTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> opConstantNullTests		(new tcu::TestCaseGroup(testCtx, "opconstantnull", "OpConstantNull instruction"));
+	RGBA							colors[4];
+
+
+	const char						functionStart[] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%lbl    = OpLabel\n";
+
+	const char						functionEnd[]	=
+		"OpReturnValue %transformed_param\n"
+		"OpFunctionEnd\n";
+
+	struct NameConstantsCode
+	{
+		string name;
+		string constants;
+		string code;
+	};
+
+	NameConstantsCode tests[] =
+	{
+		{
+			"vec4",
+			"%cnull = OpConstantNull %v4f32\n",
+			"%transformed_param = OpFAdd %v4f32 %param1 %cnull\n"
+		},
+		{
+			"float",
+			"%cnull = OpConstantNull %f32\n",
+			"%vp = OpVariable %fp_v4f32 Function\n"
+			"%v  = OpLoad %v4f32 %vp\n"
+			"%v0 = OpVectorInsertDynamic %v4f32 %v %cnull %c_i32_0\n"
+			"%v1 = OpVectorInsertDynamic %v4f32 %v0 %cnull %c_i32_1\n"
+			"%v2 = OpVectorInsertDynamic %v4f32 %v1 %cnull %c_i32_2\n"
+			"%v3 = OpVectorInsertDynamic %v4f32 %v2 %cnull %c_i32_3\n"
+			"%transformed_param = OpFAdd %v4f32 %param1 %v3\n"
+		},
+		{
+			"bool",
+			"%cnull             = OpConstantNull %bool\n",
+			"%v                 = OpVariable %fp_v4f32 Function\n"
+			"                     OpStore %v %param1\n"
+			"                     OpSelectionMerge %false_label None\n"
+			"                     OpBranchConditional %cnull %true_label %false_label\n"
+			"%true_label        = OpLabel\n"
+			"                     OpStore %v %c_v4f32_0_5_0_5_0_5_0_5\n"
+			"                     OpBranch %false_label\n"
+			"%false_label       = OpLabel\n"
+			"%transformed_param = OpLoad %v4f32 %v\n"
+		},
+		{
+			"i32",
+			"%cnull             = OpConstantNull %i32\n",
+			"%v                 = OpVariable %fp_v4f32 Function %c_v4f32_0_5_0_5_0_5_0_5\n"
+			"%b                 = OpIEqual %bool %cnull %c_i32_0\n"
+			"                     OpSelectionMerge %false_label None\n"
+			"                     OpBranchConditional %b %true_label %false_label\n"
+			"%true_label        = OpLabel\n"
+			"                     OpStore %v %param1\n"
+			"                     OpBranch %false_label\n"
+			"%false_label       = OpLabel\n"
+			"%transformed_param = OpLoad %v4f32 %v\n"
+		},
+		{
+			"struct",
+			"%stype             = OpTypeStruct %f32 %v4f32\n"
+			"%fp_stype          = OpTypePointer Function %stype\n"
+			"%cnull             = OpConstantNull %stype\n",
+			"%v                 = OpVariable %fp_stype Function %cnull\n"
+			"%f                 = OpAccessChain %fp_v4f32 %v %c_i32_1\n"
+			"%f_val             = OpLoad %v4f32 %f\n"
+			"%transformed_param = OpFAdd %v4f32 %param1 %f_val\n"
+		},
+		{
+			"array",
+			"%a4_v4f32          = OpTypeArray %v4f32 %c_u32_4\n"
+			"%fp_a4_v4f32       = OpTypePointer Function %a4_v4f32\n"
+			"%cnull             = OpConstantNull %a4_v4f32\n",
+			"%v                 = OpVariable %fp_a4_v4f32 Function %cnull\n"
+			"%f                 = OpAccessChain %fp_v4f32 %v %c_u32_0\n"
+			"%f1                = OpAccessChain %fp_v4f32 %v %c_u32_1\n"
+			"%f2                = OpAccessChain %fp_v4f32 %v %c_u32_2\n"
+			"%f3                = OpAccessChain %fp_v4f32 %v %c_u32_3\n"
+			"%f_val             = OpLoad %v4f32 %f\n"
+			"%f1_val            = OpLoad %v4f32 %f1\n"
+			"%f2_val            = OpLoad %v4f32 %f2\n"
+			"%f3_val            = OpLoad %v4f32 %f3\n"
+			"%t0                = OpFAdd %v4f32 %param1 %f_val\n"
+			"%t1                = OpFAdd %v4f32 %t0 %f1_val\n"
+			"%t2                = OpFAdd %v4f32 %t1 %f2_val\n"
+			"%transformed_param = OpFAdd %v4f32 %t2 %f3_val\n"
+		},
+		{
+			"matrix",
+			"%mat4x4_f32        = OpTypeMatrix %v4f32 4\n"
+			"%cnull             = OpConstantNull %mat4x4_f32\n",
+			// Our null matrix * any vector should result in a zero vector.
+			"%v                 = OpVectorTimesMatrix %v4f32 %param1 %cnull\n"
+			"%transformed_param = OpFAdd %v4f32 %param1 %v\n"
+		}
+	};
+
+	getHalfColorsFullAlpha(colors);
+
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameConstantsCode); ++testNdx)
+	{
+		map<string, string> fragments;
+		fragments["pre_main"] = tests[testNdx].constants;
+		fragments["testfun"] = string(functionStart) + tests[testNdx].code + functionEnd;
+		createTestsForAllStages(tests[testNdx].name, colors, colors, fragments, opConstantNullTests.get());
+	}
+	return opConstantNullTests.release();
+}
+tcu::TestCaseGroup* createOpConstantCompositeTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> opConstantCompositeTests		(new tcu::TestCaseGroup(testCtx, "opconstantcomposite", "OpConstantComposite instruction"));
+	RGBA							inputColors[4];
+	RGBA							outputColors[4];
+
+
+	const char						functionStart[]	 =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%lbl    = OpLabel\n";
+
+	const char						functionEnd[]		=
+		"OpReturnValue %transformed_param\n"
+		"OpFunctionEnd\n";
+
+	struct NameConstantsCode
+	{
+		string name;
+		string constants;
+		string code;
+	};
+
+	NameConstantsCode tests[] =
+	{
+		{
+			"vec4",
+
+			"%cval              = OpConstantComposite %v4f32 %c_f32_0_5 %c_f32_0_5 %c_f32_0_5 %c_f32_0\n",
+			"%transformed_param = OpFAdd %v4f32 %param1 %cval\n"
+		},
+		{
+			"struct",
+
+			"%stype             = OpTypeStruct %v4f32 %f32\n"
+			"%fp_stype          = OpTypePointer Function %stype\n"
+			"%f32_n_1           = OpConstant %f32 -1.0\n"
+			"%f32_1_5           = OpConstant %f32 !0x3fc00000\n" // +1.5
+			"%cvec              = OpConstantComposite %v4f32 %f32_1_5 %f32_1_5 %f32_1_5 %c_f32_1\n"
+			"%cval              = OpConstantComposite %stype %cvec %f32_n_1\n",
+
+			"%v                 = OpVariable %fp_stype Function %cval\n"
+			"%vec_ptr           = OpAccessChain %fp_v4f32 %v %c_u32_0\n"
+			"%f32_ptr           = OpAccessChain %fp_f32 %v %c_u32_1\n"
+			"%vec_val           = OpLoad %v4f32 %vec_ptr\n"
+			"%f32_val           = OpLoad %f32 %f32_ptr\n"
+			"%tmp1              = OpVectorTimesScalar %v4f32 %c_v4f32_1_1_1_1 %f32_val\n" // vec4(-1)
+			"%tmp2              = OpFAdd %v4f32 %tmp1 %param1\n" // param1 + vec4(-1)
+			"%transformed_param = OpFAdd %v4f32 %tmp2 %vec_val\n" // param1 + vec4(-1) + vec4(1.5, 1.5, 1.5, 1.0)
+		},
+		{
+			// [1|0|0|0.5] [x] = x + 0.5
+			// [0|1|0|0.5] [y] = y + 0.5
+			// [0|0|1|0.5] [z] = z + 0.5
+			// [0|0|0|1  ] [1] = 1
+			"matrix",
+
+			"%mat4x4_f32          = OpTypeMatrix %v4f32 4\n"
+		    "%v4f32_1_0_0_0       = OpConstantComposite %v4f32 %c_f32_1 %c_f32_0 %c_f32_0 %c_f32_0\n"
+		    "%v4f32_0_1_0_0       = OpConstantComposite %v4f32 %c_f32_0 %c_f32_1 %c_f32_0 %c_f32_0\n"
+		    "%v4f32_0_0_1_0       = OpConstantComposite %v4f32 %c_f32_0 %c_f32_0 %c_f32_1 %c_f32_0\n"
+		    "%v4f32_0_5_0_5_0_5_1 = OpConstantComposite %v4f32 %c_f32_0_5 %c_f32_0_5 %c_f32_0_5 %c_f32_1\n"
+			"%cval                = OpConstantComposite %mat4x4_f32 %v4f32_1_0_0_0 %v4f32_0_1_0_0 %v4f32_0_0_1_0 %v4f32_0_5_0_5_0_5_1\n",
+
+			"%transformed_param   = OpMatrixTimesVector %v4f32 %cval %param1\n"
+		},
+		{
+			"array",
+
+			"%c_v4f32_1_1_1_0     = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+			"%fp_a4f32            = OpTypePointer Function %a4f32\n"
+			"%f32_n_1             = OpConstant %f32 -1.0\n"
+			"%f32_1_5             = OpConstant %f32 !0x3fc00000\n" // +1.5
+			"%carr                = OpConstantComposite %a4f32 %c_f32_0 %f32_n_1 %f32_1_5 %c_f32_0\n",
+
+			"%v                   = OpVariable %fp_a4f32 Function %carr\n"
+			"%f                   = OpAccessChain %fp_f32 %v %c_u32_0\n"
+			"%f1                  = OpAccessChain %fp_f32 %v %c_u32_1\n"
+			"%f2                  = OpAccessChain %fp_f32 %v %c_u32_2\n"
+			"%f3                  = OpAccessChain %fp_f32 %v %c_u32_3\n"
+			"%f_val               = OpLoad %f32 %f\n"
+			"%f1_val              = OpLoad %f32 %f1\n"
+			"%f2_val              = OpLoad %f32 %f2\n"
+			"%f3_val              = OpLoad %f32 %f3\n"
+			"%ftot1               = OpFAdd %f32 %f_val %f1_val\n"
+			"%ftot2               = OpFAdd %f32 %ftot1 %f2_val\n"
+			"%ftot3               = OpFAdd %f32 %ftot2 %f3_val\n"  // 0 - 1 + 1.5 + 0
+			"%add_vec             = OpVectorTimesScalar %v4f32 %c_v4f32_1_1_1_0 %ftot3\n"
+			"%transformed_param   = OpFAdd %v4f32 %param1 %add_vec\n"
+		},
+		{
+			//
+			// [
+			//   {
+			//      0.0,
+			//      [ 1.0, 1.0, 1.0, 1.0]
+			//   },
+			//   {
+			//      1.0,
+			//      [ 0.0, 0.5, 0.0, 0.0]
+			//   }, //     ^^^
+			//   {
+			//      0.0,
+			//      [ 1.0, 1.0, 1.0, 1.0]
+			//   }
+			// ]
+			"array_of_struct_of_array",
+
+			"%c_v4f32_1_1_1_0     = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_0\n"
+			"%fp_a4f32            = OpTypePointer Function %a4f32\n"
+			"%stype               = OpTypeStruct %f32 %a4f32\n"
+			"%a3stype             = OpTypeArray %stype %c_u32_3\n"
+			"%fp_a3stype          = OpTypePointer Function %a3stype\n"
+			"%ca4f32_0            = OpConstantComposite %a4f32 %c_f32_0 %c_f32_0_5 %c_f32_0 %c_f32_0\n"
+			"%ca4f32_1            = OpConstantComposite %a4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_1\n"
+			"%cstype1             = OpConstantComposite %stype %c_f32_0 %ca4f32_1\n"
+			"%cstype2             = OpConstantComposite %stype %c_f32_1 %ca4f32_0\n"
+			"%carr                = OpConstantComposite %a3stype %cstype1 %cstype2 %cstype1",
+
+			"%v                   = OpVariable %fp_a3stype Function %carr\n"
+			"%f                   = OpAccessChain %fp_f32 %v %c_u32_1 %c_u32_1 %c_u32_1\n"
+			"%f_l                 = OpLoad %f32 %f\n"
+			"%add_vec             = OpVectorTimesScalar %v4f32 %c_v4f32_1_1_1_0 %f_l\n"
+			"%transformed_param   = OpFAdd %v4f32 %param1 %add_vec\n"
+		}
+	};
+
+	getHalfColorsFullAlpha(inputColors);
+	outputColors[0] = RGBA(255, 255, 255, 255);
+	outputColors[1] = RGBA(255, 127, 127, 255);
+	outputColors[2] = RGBA(127, 255, 127, 255);
+	outputColors[3] = RGBA(127, 127, 255, 255);
+
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameConstantsCode); ++testNdx)
+	{
+		map<string, string> fragments;
+		fragments["pre_main"] = tests[testNdx].constants;
+		fragments["testfun"] = string(functionStart) + tests[testNdx].code + functionEnd;
+		createTestsForAllStages(tests[testNdx].name, inputColors, outputColors, fragments, opConstantCompositeTests.get());
+	}
+	return opConstantCompositeTests.release();
+}
+
+tcu::TestCaseGroup* createSelectionBlockOrderTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group				(new tcu::TestCaseGroup(testCtx, "selection_block_order", "Out-of-order blocks for selection"));
+	RGBA							inputColors[4];
+	RGBA							outputColors[4];
+	map<string, string>				fragments;
+
+	// vec4 test_code(vec4 param) {
+	//   vec4 result = param;
+	//   for (int i = 0; i < 4; ++i) {
+	//     if (i == 0) result[i] = 0.;
+	//     else        result[i] = 1. - result[i];
+	//   }
+	//   return result;
+	// }
+	const char						function[]			=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1    = OpFunctionParameter %v4f32\n"
+		"%lbl       = OpLabel\n"
+		"%iptr      = OpVariable %fp_i32 Function\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %iptr %c_i32_0\n"
+		"             OpStore %result %param1\n"
+		"             OpBranch %loop\n"
+
+		// Loop entry block.
+		"%loop      = OpLabel\n"
+		"%ival      = OpLoad %i32 %iptr\n"
+		"%lt_4      = OpSLessThan %bool %ival %c_i32_4\n"
+		"             OpLoopMerge %exit %loop None\n"
+		"             OpBranchConditional %lt_4 %if_entry %exit\n"
+
+		// Merge block for loop.
+		"%exit      = OpLabel\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+
+		// If-statement entry block.
+		"%if_entry  = OpLabel\n"
+		"%loc       = OpAccessChain %fp_f32 %result %ival\n"
+		"%eq_0      = OpIEqual %bool %ival %c_i32_0\n"
+		"             OpSelectionMerge %if_exit None\n"
+		"             OpBranchConditional %eq_0 %if_true %if_false\n"
+
+		// False branch for if-statement.
+		"%if_false  = OpLabel\n"
+		"%val       = OpLoad %f32 %loc\n"
+		"%sub       = OpFSub %f32 %c_f32_1 %val\n"
+		"             OpStore %loc %sub\n"
+		"             OpBranch %if_exit\n"
+
+		// Merge block for if-statement.
+		"%if_exit   = OpLabel\n"
+		"%ival_next = OpIAdd %i32 %ival %c_i32_1\n"
+		"             OpStore %iptr %ival_next\n"
+		"             OpBranch %loop\n"
+
+		// True branch for if-statement.
+		"%if_true   = OpLabel\n"
+		"             OpStore %loc %c_f32_0\n"
+		"             OpBranch %if_exit\n"
+
+		"             OpFunctionEnd\n";
+
+	fragments["testfun"]	= function;
+
+	inputColors[0]			= RGBA(127, 127, 127, 0);
+	inputColors[1]			= RGBA(127, 0,   0,   0);
+	inputColors[2]			= RGBA(0,   127, 0,   0);
+	inputColors[3]			= RGBA(0,   0,   127, 0);
+
+	outputColors[0]			= RGBA(0, 128, 128, 255);
+	outputColors[1]			= RGBA(0, 255, 255, 255);
+	outputColors[2]			= RGBA(0, 128, 255, 255);
+	outputColors[3]			= RGBA(0, 255, 128, 255);
+
+	createTestsForAllStages("out_of_order", inputColors, outputColors, fragments, group.get());
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createSwitchBlockOrderTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group				(new tcu::TestCaseGroup(testCtx, "switch_block_order", "Out-of-order blocks for switch"));
+	RGBA							inputColors[4];
+	RGBA							outputColors[4];
+	map<string, string>				fragments;
+
+	const char						typesAndConstants[]	=
+		"%c_f32_p2  = OpConstant %f32 0.2\n"
+		"%c_f32_p4  = OpConstant %f32 0.4\n"
+		"%c_f32_p6  = OpConstant %f32 0.6\n"
+		"%c_f32_p8  = OpConstant %f32 0.8\n";
+
+	// vec4 test_code(vec4 param) {
+	//   vec4 result = param;
+	//   for (int i = 0; i < 4; ++i) {
+	//     switch (i) {
+	//       case 0: result[i] += .2; break;
+	//       case 1: result[i] += .6; break;
+	//       case 2: result[i] += .4; break;
+	//       case 3: result[i] += .8; break;
+	//       default: break; // unreachable
+	//     }
+	//   }
+	//   return result;
+	// }
+	const char						function[]			=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1    = OpFunctionParameter %v4f32\n"
+		"%lbl       = OpLabel\n"
+		"%iptr      = OpVariable %fp_i32 Function\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %iptr %c_i32_0\n"
+		"             OpStore %result %param1\n"
+		"             OpBranch %loop\n"
+
+		// Loop entry block.
+		"%loop      = OpLabel\n"
+		"%ival      = OpLoad %i32 %iptr\n"
+		"%lt_4      = OpSLessThan %bool %ival %c_i32_4\n"
+		"             OpLoopMerge %exit %loop None\n"
+		"             OpBranchConditional %lt_4 %switch_entry %exit\n"
+
+		// Merge block for loop.
+		"%exit      = OpLabel\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+
+		// Switch-statement entry block.
+		"%switch_entry   = OpLabel\n"
+		"%loc            = OpAccessChain %fp_f32 %result %ival\n"
+		"%val            = OpLoad %f32 %loc\n"
+		"                  OpSelectionMerge %switch_exit None\n"
+		"                  OpSwitch %ival %switch_default 0 %case0 1 %case1 2 %case2 3 %case3\n"
+
+		"%case2          = OpLabel\n"
+		"%addp4          = OpFAdd %f32 %val %c_f32_p4\n"
+		"                  OpStore %loc %addp4\n"
+		"                  OpBranch %switch_exit\n"
+
+		"%switch_default = OpLabel\n"
+		"                  OpUnreachable\n"
+
+		"%case3          = OpLabel\n"
+		"%addp8          = OpFAdd %f32 %val %c_f32_p8\n"
+		"                  OpStore %loc %addp8\n"
+		"                  OpBranch %switch_exit\n"
+
+		"%case0          = OpLabel\n"
+		"%addp2          = OpFAdd %f32 %val %c_f32_p2\n"
+		"                  OpStore %loc %addp2\n"
+		"                  OpBranch %switch_exit\n"
+
+		// Merge block for switch-statement.
+		"%switch_exit    = OpLabel\n"
+		"%ival_next      = OpIAdd %i32 %ival %c_i32_1\n"
+		"                  OpStore %iptr %ival_next\n"
+		"                  OpBranch %loop\n"
+
+		"%case1          = OpLabel\n"
+		"%addp6          = OpFAdd %f32 %val %c_f32_p6\n"
+		"                  OpStore %loc %addp6\n"
+		"                  OpBranch %switch_exit\n"
+
+		"                  OpFunctionEnd\n";
+
+	fragments["pre_main"]	= typesAndConstants;
+	fragments["testfun"]	= function;
+
+	inputColors[0]			= RGBA(127, 27,  127, 51);
+	inputColors[1]			= RGBA(127, 0,   0,   51);
+	inputColors[2]			= RGBA(0,   27,  0,   51);
+	inputColors[3]			= RGBA(0,   0,   127, 51);
+
+	outputColors[0]			= RGBA(178, 180, 229, 255);
+	outputColors[1]			= RGBA(178, 153, 102, 255);
+	outputColors[2]			= RGBA(51,  180, 102, 255);
+	outputColors[3]			= RGBA(51,  153, 229, 255);
+
+	createTestsForAllStages("out_of_order", inputColors, outputColors, fragments, group.get());
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createDecorationGroupTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group				(new tcu::TestCaseGroup(testCtx, "decoration_group", "Decoration group tests"));
+	RGBA							inputColors[4];
+	RGBA							outputColors[4];
+	map<string, string>				fragments;
+
+	const char						decorations[]		=
+		"OpDecorate %array_group         ArrayStride 4\n"
+		"OpDecorate %struct_member_group Offset 0\n"
+		"%array_group         = OpDecorationGroup\n"
+		"%struct_member_group = OpDecorationGroup\n"
+
+		"OpDecorate %group1 RelaxedPrecision\n"
+		"OpDecorate %group3 RelaxedPrecision\n"
+		"OpDecorate %group3 Invariant\n"
+		"OpDecorate %group3 Restrict\n"
+		"%group0 = OpDecorationGroup\n"
+		"%group1 = OpDecorationGroup\n"
+		"%group3 = OpDecorationGroup\n";
+
+	const char						typesAndConstants[]	=
+		"%a3f32     = OpTypeArray %f32 %c_u32_3\n"
+		"%struct1   = OpTypeStruct %a3f32\n"
+		"%struct2   = OpTypeStruct %a3f32\n"
+		"%fp_struct1 = OpTypePointer Function %struct1\n"
+		"%fp_struct2 = OpTypePointer Function %struct2\n"
+		"%c_f32_2    = OpConstant %f32 2.\n"
+		"%c_f32_n2   = OpConstant %f32 -2.\n"
+
+		"%c_a3f32_1 = OpConstantComposite %a3f32 %c_f32_1 %c_f32_2 %c_f32_1\n"
+		"%c_a3f32_2 = OpConstantComposite %a3f32 %c_f32_n1 %c_f32_n2 %c_f32_n1\n"
+		"%c_struct1 = OpConstantComposite %struct1 %c_a3f32_1\n"
+		"%c_struct2 = OpConstantComposite %struct2 %c_a3f32_2\n";
+
+	const char						function[]			=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param     = OpFunctionParameter %v4f32\n"
+		"%entry     = OpLabel\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"%v_struct1 = OpVariable %fp_struct1 Function\n"
+		"%v_struct2 = OpVariable %fp_struct2 Function\n"
+		"             OpStore %result %param\n"
+		"             OpStore %v_struct1 %c_struct1\n"
+		"             OpStore %v_struct2 %c_struct2\n"
+		"%ptr1      = OpAccessChain %fp_f32 %v_struct1 %c_i32_0 %c_i32_2\n"
+		"%val1      = OpLoad %f32 %ptr1\n"
+		"%ptr2      = OpAccessChain %fp_f32 %v_struct2 %c_i32_0 %c_i32_2\n"
+		"%val2      = OpLoad %f32 %ptr2\n"
+		"%addvalues = OpFAdd %f32 %val1 %val2\n"
+		"%ptr       = OpAccessChain %fp_f32 %result %c_i32_1\n"
+		"%val       = OpLoad %f32 %ptr\n"
+		"%addresult = OpFAdd %f32 %addvalues %val\n"
+		"             OpStore %ptr %addresult\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+		"             OpFunctionEnd\n";
+
+	struct CaseNameDecoration
+	{
+		string name;
+		string decoration;
+	};
+
+	CaseNameDecoration tests[] =
+	{
+		{
+			"same_decoration_group_on_multiple_types",
+			"OpGroupMemberDecorate %struct_member_group %struct1 0 %struct2 0\n"
+		},
+		{
+			"empty_decoration_group",
+			"OpGroupDecorate %group0      %a3f32\n"
+			"OpGroupDecorate %group0      %result\n"
+		},
+		{
+			"one_element_decoration_group",
+			"OpGroupDecorate %array_group %a3f32\n"
+		},
+		{
+			"multiple_elements_decoration_group",
+			"OpGroupDecorate %group3      %v_struct1\n"
+		},
+		{
+			"multiple_decoration_groups_on_same_variable",
+			"OpGroupDecorate %group0      %v_struct2\n"
+			"OpGroupDecorate %group1      %v_struct2\n"
+			"OpGroupDecorate %group3      %v_struct2\n"
+		},
+		{
+			"same_decoration_group_multiple_times",
+			"OpGroupDecorate %group1      %addvalues\n"
+			"OpGroupDecorate %group1      %addvalues\n"
+			"OpGroupDecorate %group1      %addvalues\n"
+		},
+
+	};
+
+	getHalfColorsFullAlpha(inputColors);
+	getHalfColorsFullAlpha(outputColors);
+
+	for (size_t idx = 0; idx < (sizeof(tests) / sizeof(tests[0])); ++idx)
+	{
+		fragments["decoration"]	= decorations + tests[idx].decoration;
+		fragments["pre_main"]	= typesAndConstants;
+		fragments["testfun"]	= function;
+
+		createTestsForAllStages(tests[idx].name, inputColors, outputColors, fragments, group.get());
+	}
+
+	return group.release();
+}
+
+struct SpecConstantTwoIntGraphicsCase
+{
+	const char*		caseName;
+	const char*		scDefinition0;
+	const char*		scDefinition1;
+	const char*		scResultType;
+	const char*		scOperation;
+	deInt32			scActualValue0;
+	deInt32			scActualValue1;
+	const char*		resultOperation;
+	RGBA			expectedColors[4];
+
+					SpecConstantTwoIntGraphicsCase (const char* name,
+											const char* definition0,
+											const char* definition1,
+											const char* resultType,
+											const char* operation,
+											deInt32		value0,
+											deInt32		value1,
+											const char* resultOp,
+											const RGBA	(&output)[4])
+						: caseName			(name)
+						, scDefinition0		(definition0)
+						, scDefinition1		(definition1)
+						, scResultType		(resultType)
+						, scOperation		(operation)
+						, scActualValue0	(value0)
+						, scActualValue1	(value1)
+						, resultOperation	(resultOp)
+	{
+		expectedColors[0] = output[0];
+		expectedColors[1] = output[1];
+		expectedColors[2] = output[2];
+		expectedColors[3] = output[3];
+	}
+};
+
+tcu::TestCaseGroup* createSpecConstantTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group				(new tcu::TestCaseGroup(testCtx, "opspecconstantop", "Test the OpSpecConstantOp instruction"));
+	vector<SpecConstantTwoIntGraphicsCase>	cases;
+	RGBA							inputColors[4];
+	RGBA							outputColors0[4];
+	RGBA							outputColors1[4];
+	RGBA							outputColors2[4];
+
+	const char	decorations1[]			=
+		"OpDecorate %sc_0  SpecId 0\n"
+		"OpDecorate %sc_1  SpecId 1\n";
+
+	const char	typesAndConstants1[]	=
+		"%sc_0      = OpSpecConstant${SC_DEF0}\n"
+		"%sc_1      = OpSpecConstant${SC_DEF1}\n"
+		"%sc_op     = OpSpecConstantOp ${SC_RESULT_TYPE} ${SC_OP}\n";
+
+	const char	function1[]				=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param     = OpFunctionParameter %v4f32\n"
+		"%label     = OpLabel\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %result %param\n"
+		"%gen       = ${GEN_RESULT}\n"
+		"%index     = OpIAdd %i32 %gen %c_i32_1\n"
+		"%loc       = OpAccessChain %fp_f32 %result %index\n"
+		"%val       = OpLoad %f32 %loc\n"
+		"%add       = OpFAdd %f32 %val %c_f32_0_5\n"
+		"             OpStore %loc %add\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+		"             OpFunctionEnd\n";
+
+	inputColors[0] = RGBA(127, 127, 127, 255);
+	inputColors[1] = RGBA(127, 0,   0,   255);
+	inputColors[2] = RGBA(0,   127, 0,   255);
+	inputColors[3] = RGBA(0,   0,   127, 255);
+
+	// Derived from inputColors[x] by adding 128 to inputColors[x][0].
+	outputColors0[0] = RGBA(255, 127, 127, 255);
+	outputColors0[1] = RGBA(255, 0,   0,   255);
+	outputColors0[2] = RGBA(128, 127, 0,   255);
+	outputColors0[3] = RGBA(128, 0,   127, 255);
+
+	// Derived from inputColors[x] by adding 128 to inputColors[x][1].
+	outputColors1[0] = RGBA(127, 255, 127, 255);
+	outputColors1[1] = RGBA(127, 128, 0,   255);
+	outputColors1[2] = RGBA(0,   255, 0,   255);
+	outputColors1[3] = RGBA(0,   128, 127, 255);
+
+	// Derived from inputColors[x] by adding 128 to inputColors[x][2].
+	outputColors2[0] = RGBA(127, 127, 255, 255);
+	outputColors2[1] = RGBA(127, 0,   128, 255);
+	outputColors2[2] = RGBA(0,   127, 128, 255);
+	outputColors2[3] = RGBA(0,   0,   255, 255);
+
+	const char addZeroToSc[]		= "OpIAdd %i32 %c_i32_0 %sc_op";
+	const char selectTrueUsingSc[]	= "OpSelect %i32 %sc_op %c_i32_1 %c_i32_0";
+	const char selectFalseUsingSc[]	= "OpSelect %i32 %sc_op %c_i32_0 %c_i32_1";
+
+	cases.push_back(SpecConstantTwoIntGraphicsCase("iadd",					" %i32 0",		" %i32 0",		"%i32",		"IAdd                 %sc_0 %sc_1",				19,		-20,	addZeroToSc,		outputColors0));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("isub",					" %i32 0",		" %i32 0",		"%i32",		"ISub                 %sc_0 %sc_1",				19,		20,		addZeroToSc,		outputColors0));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("imul",					" %i32 0",		" %i32 0",		"%i32",		"IMul                 %sc_0 %sc_1",				-1,		-1,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("sdiv",					" %i32 0",		" %i32 0",		"%i32",		"SDiv                 %sc_0 %sc_1",				-126,	126,	addZeroToSc,		outputColors0));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("udiv",					" %i32 0",		" %i32 0",		"%i32",		"UDiv                 %sc_0 %sc_1",				126,	126,	addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("srem",					" %i32 0",		" %i32 0",		"%i32",		"SRem                 %sc_0 %sc_1",				3,		2,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("smod",					" %i32 0",		" %i32 0",		"%i32",		"SMod                 %sc_0 %sc_1",				3,		2,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("umod",					" %i32 0",		" %i32 0",		"%i32",		"UMod                 %sc_0 %sc_1",				1001,	500,	addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("bitwiseand",			" %i32 0",		" %i32 0",		"%i32",		"BitwiseAnd           %sc_0 %sc_1",				0x33,	0x0d,	addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("bitwiseor",				" %i32 0",		" %i32 0",		"%i32",		"BitwiseOr            %sc_0 %sc_1",				0,		1,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("bitwisexor",			" %i32 0",		" %i32 0",		"%i32",		"BitwiseXor           %sc_0 %sc_1",				0x2e,	0x2f,	addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("shiftrightlogical",		" %i32 0",		" %i32 0",		"%i32",		"ShiftRightLogical    %sc_0 %sc_1",				2,		1,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("shiftrightarithmetic",	" %i32 0",		" %i32 0",		"%i32",		"ShiftRightArithmetic %sc_0 %sc_1",				-4,		2,		addZeroToSc,		outputColors0));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("shiftleftlogical",		" %i32 0",		" %i32 0",		"%i32",		"ShiftLeftLogical     %sc_0 %sc_1",				1,		0,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("slessthan",				" %i32 0",		" %i32 0",		"%bool",	"SLessThan            %sc_0 %sc_1",				-20,	-10,	selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("ulessthan",				" %i32 0",		" %i32 0",		"%bool",	"ULessThan            %sc_0 %sc_1",				10,		20,		selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("sgreaterthan",			" %i32 0",		" %i32 0",		"%bool",	"SGreaterThan         %sc_0 %sc_1",				-1000,	50,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("ugreaterthan",			" %i32 0",		" %i32 0",		"%bool",	"UGreaterThan         %sc_0 %sc_1",				10,		5,		selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("slessthanequal",		" %i32 0",		" %i32 0",		"%bool",	"SLessThanEqual       %sc_0 %sc_1",				-10,	-10,	selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("ulessthanequal",		" %i32 0",		" %i32 0",		"%bool",	"ULessThanEqual       %sc_0 %sc_1",				50,		100,	selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("sgreaterthanequal",		" %i32 0",		" %i32 0",		"%bool",	"SGreaterThanEqual    %sc_0 %sc_1",				-1000,	50,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("ugreaterthanequal",		" %i32 0",		" %i32 0",		"%bool",	"UGreaterThanEqual    %sc_0 %sc_1",				10,		10,		selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("iequal",				" %i32 0",		" %i32 0",		"%bool",	"IEqual               %sc_0 %sc_1",				42,		24,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("logicaland",			"True %bool",	"True %bool",	"%bool",	"LogicalAnd           %sc_0 %sc_1",				0,		1,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("logicalor",				"False %bool",	"False %bool",	"%bool",	"LogicalOr            %sc_0 %sc_1",				1,		0,		selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("logicalequal",			"True %bool",	"True %bool",	"%bool",	"LogicalEqual         %sc_0 %sc_1",				0,		1,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("logicalnotequal",		"False %bool",	"False %bool",	"%bool",	"LogicalNotEqual      %sc_0 %sc_1",				1,		0,		selectTrueUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("snegate",				" %i32 0",		" %i32 0",		"%i32",		"SNegate              %sc_0",					-1,		0,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("not",					" %i32 0",		" %i32 0",		"%i32",		"Not                  %sc_0",					-2,		0,		addZeroToSc,		outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("logicalnot",			"False %bool",	"False %bool",	"%bool",	"LogicalNot           %sc_0",					1,		0,		selectFalseUsingSc,	outputColors2));
+	cases.push_back(SpecConstantTwoIntGraphicsCase("select",				"False %bool",	" %i32 0",		"%i32",		"Select               %sc_0 %sc_1 %c_i32_0",	1,		1,		addZeroToSc,		outputColors2));
+	// OpSConvert, OpFConvert: these two instructions involve ints/floats of different bitwidths.
+	// \todo[2015-12-1 antiagainst] OpQuantizeToF16
+
+	for (size_t caseNdx = 0; caseNdx < cases.size(); ++caseNdx)
+	{
+		map<string, string>	specializations;
+		map<string, string>	fragments;
+		vector<deInt32>		specConstants;
+
+		specializations["SC_DEF0"]			= cases[caseNdx].scDefinition0;
+		specializations["SC_DEF1"]			= cases[caseNdx].scDefinition1;
+		specializations["SC_RESULT_TYPE"]	= cases[caseNdx].scResultType;
+		specializations["SC_OP"]			= cases[caseNdx].scOperation;
+		specializations["GEN_RESULT"]		= cases[caseNdx].resultOperation;
+
+		fragments["decoration"]				= tcu::StringTemplate(decorations1).specialize(specializations);
+		fragments["pre_main"]				= tcu::StringTemplate(typesAndConstants1).specialize(specializations);
+		fragments["testfun"]				= tcu::StringTemplate(function1).specialize(specializations);
+
+		specConstants.push_back(cases[caseNdx].scActualValue0);
+		specConstants.push_back(cases[caseNdx].scActualValue1);
+
+		createTestsForAllStages(cases[caseNdx].caseName, inputColors, cases[caseNdx].expectedColors, fragments, specConstants, group.get());
+	}
+
+	const char	decorations2[]			=
+		"OpDecorate %sc_0  SpecId 0\n"
+		"OpDecorate %sc_1  SpecId 1\n"
+		"OpDecorate %sc_2  SpecId 2\n";
+
+	const char	typesAndConstants2[]	=
+		"%v3i32     = OpTypeVector %i32 3\n"
+
+		"%sc_0      = OpSpecConstant %i32 0\n"
+		"%sc_1      = OpSpecConstant %i32 0\n"
+		"%sc_2      = OpSpecConstant %i32 0\n"
+
+		"%vec3_0      = OpConstantComposite %v3i32 %c_i32_0 %c_i32_0 %c_i32_0\n"
+		"%sc_vec3_0   = OpSpecConstantOp %v3i32 CompositeInsert  %sc_0        %vec3_0    0\n"     // (sc_0, 0, 0)
+		"%sc_vec3_1   = OpSpecConstantOp %v3i32 CompositeInsert  %sc_1        %vec3_0    1\n"     // (0, sc_1, 0)
+		"%sc_vec3_2   = OpSpecConstantOp %v3i32 CompositeInsert  %sc_2        %vec3_0    2\n"     // (0, 0, sc_2)
+		"%sc_vec3_01  = OpSpecConstantOp %v3i32 VectorShuffle    %sc_vec3_0   %sc_vec3_1 1 0 4\n" // (0,    sc_0, sc_1)
+		"%sc_vec3_012 = OpSpecConstantOp %v3i32 VectorShuffle    %sc_vec3_01  %sc_vec3_2 5 1 2\n" // (sc_2, sc_0, sc_1)
+		"%sc_ext_0    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            0\n"     // sc_2
+		"%sc_ext_1    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            1\n"     // sc_0
+		"%sc_ext_2    = OpSpecConstantOp %i32   CompositeExtract %sc_vec3_012            2\n"     // sc_1
+		"%sc_sub      = OpSpecConstantOp %i32   ISub             %sc_ext_0    %sc_ext_1\n"        // (sc_2 - sc_0)
+		"%sc_final    = OpSpecConstantOp %i32   IMul             %sc_sub      %sc_ext_2\n";       // (sc_2 - sc_0) * sc_1
+
+	const char	function2[]				=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param     = OpFunctionParameter %v4f32\n"
+		"%label     = OpLabel\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %result %param\n"
+		"%loc       = OpAccessChain %fp_f32 %result %sc_final\n"
+		"%val       = OpLoad %f32 %loc\n"
+		"%add       = OpFAdd %f32 %val %c_f32_0_5\n"
+		"             OpStore %loc %add\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+		"             OpFunctionEnd\n";
+
+	map<string, string>	fragments;
+	vector<deInt32>		specConstants;
+
+	fragments["decoration"]	= decorations2;
+	fragments["pre_main"]	= typesAndConstants2;
+	fragments["testfun"]	= function2;
+
+	specConstants.push_back(56789);
+	specConstants.push_back(-2);
+	specConstants.push_back(56788);
+
+	createTestsForAllStages("vector_related", inputColors, outputColors2, fragments, specConstants, group.get());
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createOpPhiTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group				(new tcu::TestCaseGroup(testCtx, "opphi", "Test the OpPhi instruction"));
+	RGBA							inputColors[4];
+	RGBA							outputColors1[4];
+	RGBA							outputColors2[4];
+	RGBA							outputColors3[4];
+	map<string, string>				fragments1;
+	map<string, string>				fragments2;
+	map<string, string>				fragments3;
+
+	const char	typesAndConstants1[]	=
+		"%c_f32_p2  = OpConstant %f32 0.2\n"
+		"%c_f32_p4  = OpConstant %f32 0.4\n"
+		"%c_f32_p5  = OpConstant %f32 0.5\n"
+		"%c_f32_p8  = OpConstant %f32 0.8\n";
+
+	// vec4 test_code(vec4 param) {
+	//   vec4 result = param;
+	//   for (int i = 0; i < 4; ++i) {
+	//     float operand;
+	//     switch (i) {
+	//       case 0: operand = .2; break;
+	//       case 1: operand = .5; break;
+	//       case 2: operand = .4; break;
+	//       case 3: operand = .0; break;
+	//       default: break; // unreachable
+	//     }
+	//     result[i] += operand;
+	//   }
+	//   return result;
+	// }
+	const char	function1[]				=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1    = OpFunctionParameter %v4f32\n"
+		"%lbl       = OpLabel\n"
+		"%iptr      = OpVariable %fp_i32 Function\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %iptr %c_i32_0\n"
+		"             OpStore %result %param1\n"
+		"             OpBranch %loop\n"
+
+		"%loop      = OpLabel\n"
+		"%ival      = OpLoad %i32 %iptr\n"
+		"%lt_4      = OpSLessThan %bool %ival %c_i32_4\n"
+		"             OpLoopMerge %exit %loop None\n"
+		"             OpBranchConditional %lt_4 %entry %exit\n"
+
+		"%entry     = OpLabel\n"
+		"%loc       = OpAccessChain %fp_f32 %result %ival\n"
+		"%val       = OpLoad %f32 %loc\n"
+		"             OpSelectionMerge %phi None\n"
+		"             OpSwitch %ival %default 0 %case0 1 %case1 2 %case2 3 %case3\n"
+
+		"%case0     = OpLabel\n"
+		"             OpBranch %phi\n"
+		"%case1     = OpLabel\n"
+		"             OpBranch %phi\n"
+		"%case2     = OpLabel\n"
+		"             OpBranch %phi\n"
+		"%case3     = OpLabel\n"
+		"             OpBranch %phi\n"
+
+		"%default   = OpLabel\n"
+		"             OpUnreachable\n"
+
+		"%phi       = OpLabel\n"
+		"%operand   = OpPhi %f32 %c_f32_p4 %case2 %c_f32_p5 %case1 %c_f32_p2 %case0 %c_f32_0 %case3\n" // not in the order of blocks
+		"%add       = OpFAdd %f32 %val %operand\n"
+		"             OpStore %loc %add\n"
+		"%ival_next = OpIAdd %i32 %ival %c_i32_1\n"
+		"             OpStore %iptr %ival_next\n"
+		"             OpBranch %loop\n"
+
+		"%exit      = OpLabel\n"
+		"%ret       = OpLoad %v4f32 %result\n"
+		"             OpReturnValue %ret\n"
+
+		"             OpFunctionEnd\n";
+
+	fragments1["pre_main"]	= typesAndConstants1;
+	fragments1["testfun"]	= function1;
+
+	getHalfColorsFullAlpha(inputColors);
+
+	outputColors1[0]		= RGBA(178, 255, 229, 255);
+	outputColors1[1]		= RGBA(178, 127, 102, 255);
+	outputColors1[2]		= RGBA(51,  255, 102, 255);
+	outputColors1[3]		= RGBA(51,  127, 229, 255);
+
+	createTestsForAllStages("out_of_order", inputColors, outputColors1, fragments1, group.get());
+
+	const char	typesAndConstants2[]	=
+		"%c_f32_p2  = OpConstant %f32 0.2\n";
+
+	// Add .4 to the second element of the given parameter.
+	const char	function2[]				=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param     = OpFunctionParameter %v4f32\n"
+		"%entry     = OpLabel\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %result %param\n"
+		"%loc       = OpAccessChain %fp_f32 %result %c_i32_1\n"
+		"%val       = OpLoad %f32 %loc\n"
+		"             OpBranch %phi\n"
+
+		"%phi        = OpLabel\n"
+		"%step       = OpPhi %i32 %c_i32_0  %entry %step_next  %phi\n"
+		"%accum      = OpPhi %f32 %val      %entry %accum_next %phi\n"
+		"%step_next  = OpIAdd %i32 %step  %c_i32_1\n"
+		"%accum_next = OpFAdd %f32 %accum %c_f32_p2\n"
+		"%still_loop = OpSLessThan %bool %step %c_i32_2\n"
+		"              OpLoopMerge %exit %phi None\n"
+		"              OpBranchConditional %still_loop %phi %exit\n"
+
+		"%exit       = OpLabel\n"
+		"              OpStore %loc %accum\n"
+		"%ret        = OpLoad %v4f32 %result\n"
+		"              OpReturnValue %ret\n"
+
+		"              OpFunctionEnd\n";
+
+	fragments2["pre_main"]	= typesAndConstants2;
+	fragments2["testfun"]	= function2;
+
+	outputColors2[0]			= RGBA(127, 229, 127, 255);
+	outputColors2[1]			= RGBA(127, 102, 0,   255);
+	outputColors2[2]			= RGBA(0,   229, 0,   255);
+	outputColors2[3]			= RGBA(0,   102, 127, 255);
+
+	createTestsForAllStages("induction", inputColors, outputColors2, fragments2, group.get());
+
+	const char	typesAndConstants3[]	=
+		"%true      = OpConstantTrue %bool\n"
+		"%false     = OpConstantFalse %bool\n"
+		"%c_f32_p2  = OpConstant %f32 0.2\n";
+
+	// Swap the second and the third element of the given parameter.
+	const char	function3[]				=
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param     = OpFunctionParameter %v4f32\n"
+		"%entry     = OpLabel\n"
+		"%result    = OpVariable %fp_v4f32 Function\n"
+		"             OpStore %result %param\n"
+		"%a_loc     = OpAccessChain %fp_f32 %result %c_i32_1\n"
+		"%a_init    = OpLoad %f32 %a_loc\n"
+		"%b_loc     = OpAccessChain %fp_f32 %result %c_i32_2\n"
+		"%b_init    = OpLoad %f32 %b_loc\n"
+		"             OpBranch %phi\n"
+
+		"%phi        = OpLabel\n"
+		"%still_loop = OpPhi %bool %true   %entry %false  %phi\n"
+		"%a_next     = OpPhi %f32  %a_init %entry %b_next %phi\n"
+		"%b_next     = OpPhi %f32  %b_init %entry %a_next %phi\n"
+		"              OpLoopMerge %exit %phi None\n"
+		"              OpBranchConditional %still_loop %phi %exit\n"
+
+		"%exit       = OpLabel\n"
+		"              OpStore %a_loc %a_next\n"
+		"              OpStore %b_loc %b_next\n"
+		"%ret        = OpLoad %v4f32 %result\n"
+		"              OpReturnValue %ret\n"
+
+		"              OpFunctionEnd\n";
+
+	fragments3["pre_main"]	= typesAndConstants3;
+	fragments3["testfun"]	= function3;
+
+	outputColors3[0]			= RGBA(127, 127, 127, 255);
+	outputColors3[1]			= RGBA(127, 0,   0,   255);
+	outputColors3[2]			= RGBA(0,   0,   127, 255);
+	outputColors3[3]			= RGBA(0,   127, 0,   255);
+
+	createTestsForAllStages("swap", inputColors, outputColors3, fragments3, group.get());
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createNoContractionTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> group			(new tcu::TestCaseGroup(testCtx, "nocontraction", "Test the NoContraction decoration"));
+	RGBA							inputColors[4];
+	RGBA							outputColors[4];
+
+	// With NoContraction, (1 + 2^-23) * (1 - 2^-23) - 1 should be conducted as a multiplication and an addition separately.
+	// For the multiplication, the result is 1 - 2^-46, which is out of the precision range for 32-bit float. (32-bit float
+	// only have 23-bit fraction.) So it will be rounded to 1. Or 0x1.fffffc. Then the final result is 0 or -0x1p-24.
+	// On the contrary, the result will be 2^-46, which is a normalized number perfectly representable as 32-bit float.
+	const char						constantsAndTypes[]	 =
+		"%c_vec4_0       = OpConstantComposite %v4f32 %c_f32_0 %c_f32_0 %c_f32_0 %c_f32_1\n"
+		"%c_vec4_1       = OpConstantComposite %v4f32 %c_f32_1 %c_f32_1 %c_f32_1 %c_f32_1\n"
+		"%c_f32_1pl2_23  = OpConstant %f32 0x1.000002p+0\n" // 1 + 2^-23
+		"%c_f32_1mi2_23  = OpConstant %f32 0x1.fffffcp-1\n" // 1 - 2^-23
+		"%c_f32_n1pn24   = OpConstant %f32 -0x1p-24\n"
+		;
+
+	const char						function[]	 =
+		"%test_code      = OpFunction %v4f32 None %v4f32_function\n"
+		"%param          = OpFunctionParameter %v4f32\n"
+		"%label          = OpLabel\n"
+		"%var1           = OpVariable %fp_f32 Function %c_f32_1pl2_23\n"
+		"%var2           = OpVariable %fp_f32 Function\n"
+		"%red            = OpCompositeExtract %f32 %param 0\n"
+		"%plus_red       = OpFAdd %f32 %c_f32_1mi2_23 %red\n"
+		"                  OpStore %var2 %plus_red\n"
+		"%val1           = OpLoad %f32 %var1\n"
+		"%val2           = OpLoad %f32 %var2\n"
+		"%mul            = OpFMul %f32 %val1 %val2\n"
+		"%add            = OpFAdd %f32 %mul %c_f32_n1\n"
+		"%is0            = OpFOrdEqual %bool %add %c_f32_0\n"
+		"%isn1n24         = OpFOrdEqual %bool %add %c_f32_n1pn24\n"
+		"%success        = OpLogicalOr %bool %is0 %isn1n24\n"
+		"%v4success      = OpCompositeConstruct %v4bool %success %success %success %success\n"
+		"%ret            = OpSelect %v4f32 %v4success %c_vec4_0 %c_vec4_1\n"
+		"                  OpReturnValue %ret\n"
+		"                  OpFunctionEnd\n";
+
+	struct CaseNameDecoration
+	{
+		string name;
+		string decoration;
+	};
+
+
+	CaseNameDecoration tests[] = {
+		{"multiplication",	"OpDecorate %mul NoContraction"},
+		{"addition",		"OpDecorate %add NoContraction"},
+		{"both",			"OpDecorate %mul NoContraction\nOpDecorate %add NoContraction"},
+	};
+
+	getHalfColorsFullAlpha(inputColors);
+
+	for (deUint8 idx = 0; idx < 4; ++idx)
+	{
+		inputColors[idx].setRed(0);
+		outputColors[idx] = RGBA(0, 0, 0, 255);
+	}
+
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(CaseNameDecoration); ++testNdx)
+	{
+		map<string, string> fragments;
+
+		fragments["decoration"] = tests[testNdx].decoration;
+		fragments["pre_main"] = constantsAndTypes;
+		fragments["testfun"] = function;
+
+		createTestsForAllStages(tests[testNdx].name, inputColors, outputColors, fragments, group.get());
+	}
+
+	return group.release();
+}
+
+tcu::TestCaseGroup* createMemoryAccessTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> memoryAccessTests (new tcu::TestCaseGroup(testCtx, "opmemoryaccess", "Memory Semantics"));
+	RGBA							colors[4];
+
+	const char						constantsAndTypes[]	 =
+		"%c_a2f32_1         = OpConstantComposite %a2f32 %c_f32_1 %c_f32_1\n"
+		"%fp_a2f32          = OpTypePointer Function %a2f32\n"
+		"%stype             = OpTypeStruct  %v4f32 %a2f32 %f32\n"
+		"%fp_stype          = OpTypePointer Function %stype\n";
+
+	const char						function[]	 =
+		"%test_code         = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1            = OpFunctionParameter %v4f32\n"
+		"%lbl               = OpLabel\n"
+		"%v1                = OpVariable %fp_v4f32 Function\n"
+		"%v2                = OpVariable %fp_a2f32 Function\n"
+		"%v3                = OpVariable %fp_f32 Function\n"
+		"%v                 = OpVariable %fp_stype Function\n"
+		"%vv                = OpVariable %fp_stype Function\n"
+		"%vvv               = OpVariable %fp_f32 Function\n"
+
+		"                     OpStore %v1 %c_v4f32_1_1_1_1\n"
+		"                     OpStore %v2 %c_a2f32_1\n"
+		"                     OpStore %v3 %c_f32_1\n"
+
+		"%p_v4f32          = OpAccessChain %fp_v4f32 %v %c_u32_0\n"
+		"%p_a2f32          = OpAccessChain %fp_a2f32 %v %c_u32_1\n"
+		"%p_f32            = OpAccessChain %fp_f32 %v %c_u32_2\n"
+		"%v1_v             = OpLoad %v4f32 %v1 ${access_type}\n"
+		"%v2_v             = OpLoad %a2f32 %v2 ${access_type}\n"
+		"%v3_v             = OpLoad %f32 %v3 ${access_type}\n"
+
+		"                    OpStore %p_v4f32 %v1_v ${access_type}\n"
+		"                    OpStore %p_a2f32 %v2_v ${access_type}\n"
+		"                    OpStore %p_f32 %v3_v ${access_type}\n"
+
+		"                    OpCopyMemory %vv %v ${access_type}\n"
+		"                    OpCopyMemory %vvv %p_f32 ${access_type}\n"
+
+		"%p_f32_2          = OpAccessChain %fp_f32 %vv %c_u32_2\n"
+		"%v_f32_2          = OpLoad %f32 %p_f32_2\n"
+		"%v_f32_3          = OpLoad %f32 %vvv\n"
+
+		"%ret1             = OpVectorTimesScalar %v4f32 %param1 %v_f32_2\n"
+		"%ret2             = OpVectorTimesScalar %v4f32 %ret1 %v_f32_3\n"
+		"                    OpReturnValue %ret2\n"
+		"                    OpFunctionEnd\n";
+
+	struct NameMemoryAccess
+	{
+		string name;
+		string accessType;
+	};
+
+
+	NameMemoryAccess tests[] =
+	{
+		{ "none", "" },
+		{ "volatile", "Volatile" },
+		{ "aligned",  "Aligned 1" },
+		{ "volatile_aligned",  "Volatile|Aligned 1" },
+		{ "nontemporal_aligned",  "Nontemporal|Aligned 1" },
+		{ "volatile_nontemporal",  "Volatile|Nontemporal" },
+		{ "volatile_nontermporal_aligned",  "Volatile|Nontemporal|Aligned 1" },
+	};
+
+	getHalfColorsFullAlpha(colors);
+
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameMemoryAccess); ++testNdx)
+	{
+		map<string, string> fragments;
+		map<string, string> memoryAccess;
+		memoryAccess["access_type"] = tests[testNdx].accessType;
+
+		fragments["pre_main"] = constantsAndTypes;
+		fragments["testfun"] = tcu::StringTemplate(function).specialize(memoryAccess);
+		createTestsForAllStages(tests[testNdx].name, colors, colors, fragments, memoryAccessTests.get());
+	}
+	return memoryAccessTests.release();
+}
+tcu::TestCaseGroup* createOpUndefTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>		opUndefTests		 (new tcu::TestCaseGroup(testCtx, "opundef", "Test OpUndef"));
+	RGBA								defaultColors[4];
+	map<string, string>					fragments;
+	getDefaultColors(defaultColors);
+
+	// First, simple cases that don't do anything with the OpUndef result.
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %type\n"
+		"OpReturnValue %param1\n"
+		"OpFunctionEnd\n"
+		;
+	struct NameCodePair { string name, code; };
+	const NameCodePair tests[] =
+	{
+		{"bool", "%type = OpTypeBool"},
+		{"vec2uint32", "%type = OpTypeVector %u32 2"},
+		{"image", "%type = OpTypeImage %f32 2D 0 0 0 1 Unknown"},
+		{"sampler", "%type = OpTypeSampler"},
+		{"sampledimage", "%img = OpTypeImage %f32 2D 0 0 0 1 Unknown\n" "%type = OpTypeSampledImage %img"},
+		{"pointer", "%type = OpTypePointer Function %i32"},
+		{"runtimearray", "%type = OpTypeRuntimeArray %f32"},
+		{"array", "%c_u32_100 = OpConstant %u32 100\n" "%type = OpTypeArray %i32 %c_u32_100"},
+		{"struct", "%type = OpTypeStruct %f32 %i32 %u32"}};
+	for (size_t testNdx = 0; testNdx < sizeof(tests) / sizeof(NameCodePair); ++testNdx)
+	{
+		fragments["pre_main"] = tests[testNdx].code;
+		createTestsForAllStages(tests[testNdx].name, defaultColors, defaultColors, fragments, opUndefTests.get());
+	}
+	fragments.clear();
+
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %f32\n"
+		"%zero = OpFMul %f32 %undef %c_f32_0\n"
+		"%a = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"%b = OpFAdd %f32 %a %zero\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %b %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("float32", defaultColors, defaultColors, fragments, opUndefTests.get());
+
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %i32\n"
+		"%zero = OpIMul %i32 %undef %c_i32_0\n"
+		"%a = OpVectorExtractDynamic %f32 %param1 %zero\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %a %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("sint32", defaultColors, defaultColors, fragments, opUndefTests.get());
+
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %u32\n"
+		"%zero = OpIMul %u32 %undef %c_i32_0\n"
+		"%a = OpVectorExtractDynamic %f32 %param1 %zero\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %a %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("uint32", defaultColors, defaultColors, fragments, opUndefTests.get());
+
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %v4f32\n"
+		"%vzero = OpVectorTimesScalar %v4f32 %undef %c_f32_0\n"
+		"%zero_0 = OpVectorExtractDynamic %f32 %vzero %c_i32_0\n"
+		"%zero_1 = OpVectorExtractDynamic %f32 %vzero %c_i32_1\n"
+		"%zero_2 = OpVectorExtractDynamic %f32 %vzero %c_i32_2\n"
+		"%zero_3 = OpVectorExtractDynamic %f32 %vzero %c_i32_3\n"
+		"%param1_0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"%param1_1 = OpVectorExtractDynamic %f32 %param1 %c_i32_1\n"
+		"%param1_2 = OpVectorExtractDynamic %f32 %param1 %c_i32_2\n"
+		"%param1_3 = OpVectorExtractDynamic %f32 %param1 %c_i32_3\n"
+		"%sum_0 = OpFAdd %f32 %param1_0 %zero_0\n"
+		"%sum_1 = OpFAdd %f32 %param1_1 %zero_1\n"
+		"%sum_2 = OpFAdd %f32 %param1_2 %zero_2\n"
+		"%sum_3 = OpFAdd %f32 %param1_3 %zero_3\n"
+		"%ret3 = OpVectorInsertDynamic %v4f32 %param1 %sum_3 %c_i32_3\n"
+		"%ret2 = OpVectorInsertDynamic %v4f32 %ret3 %sum_2 %c_i32_2\n"
+		"%ret1 = OpVectorInsertDynamic %v4f32 %ret2 %sum_1 %c_i32_1\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %ret1 %sum_0 %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("vec4float32", defaultColors, defaultColors, fragments, opUndefTests.get());
+
+	fragments["pre_main"] =
+		"%v2f32 = OpTypeVector %f32 2\n"
+		"%m2x2f32 = OpTypeMatrix %v2f32 2\n";
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%undef = OpUndef %m2x2f32\n"
+		"%mzero = OpMatrixTimesScalar %m2x2f32 %undef %c_f32_0\n"
+		"%zero_0 = OpCompositeExtract %f32 %mzero 0 0\n"
+		"%zero_1 = OpCompositeExtract %f32 %mzero 0 1\n"
+		"%zero_2 = OpCompositeExtract %f32 %mzero 1 0\n"
+		"%zero_3 = OpCompositeExtract %f32 %mzero 1 1\n"
+		"%param1_0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"%param1_1 = OpVectorExtractDynamic %f32 %param1 %c_i32_1\n"
+		"%param1_2 = OpVectorExtractDynamic %f32 %param1 %c_i32_2\n"
+		"%param1_3 = OpVectorExtractDynamic %f32 %param1 %c_i32_3\n"
+		"%sum_0 = OpFAdd %f32 %param1_0 %zero_0\n"
+		"%sum_1 = OpFAdd %f32 %param1_1 %zero_1\n"
+		"%sum_2 = OpFAdd %f32 %param1_2 %zero_2\n"
+		"%sum_3 = OpFAdd %f32 %param1_3 %zero_3\n"
+		"%ret3 = OpVectorInsertDynamic %v4f32 %param1 %sum_3 %c_i32_3\n"
+		"%ret2 = OpVectorInsertDynamic %v4f32 %ret3 %sum_2 %c_i32_2\n"
+		"%ret1 = OpVectorInsertDynamic %v4f32 %ret2 %sum_1 %c_i32_1\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %ret1 %sum_0 %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("matrix", defaultColors, defaultColors, fragments, opUndefTests.get());
+
+	return opUndefTests.release();
+}
+
+void createOpQuantizeSingleOptionTests(tcu::TestCaseGroup* testCtx)
+{
+	const RGBA		inputColors[4]		=
+	{
+		RGBA(0,		0,		0,		255),
+		RGBA(0,		0,		255,	255),
+		RGBA(0,		255,	0,		255),
+		RGBA(0,		255,	255,	255)
+	};
+
+	const RGBA		expectedColors[4]	=
+	{
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255)
+	};
+
+	const struct SingleFP16Possibility
+	{
+		const char* name;
+		const char* constant;  // Value to assign to %test_constant.
+		float		valueAsFloat;
+		const char* condition; // Must assign to %cond an expression that evaluates to true after %c = OpQuantizeToF16(%test_constant + 0).
+	}				tests[]				=
+	{
+		{
+			"negative",
+			"-0x1.3p1\n",
+			-constructNormalizedFloat(1, 0x300000),
+			"%cond = OpFOrdEqual %bool %c %test_constant\n"
+		}, // -19
+		{
+			"positive",
+			"0x1.0p7\n",
+			constructNormalizedFloat(7, 0x000000),
+			"%cond = OpFOrdEqual %bool %c %test_constant\n"
+		},  // +128
+		// SPIR-V requires that OpQuantizeToF16 flushes
+		// any numbers that would end up denormalized in F16 to zero.
+		{
+			"denorm",
+			"0x0.0006p-126\n",
+			std::ldexp(1.5f, -140),
+			"%cond = OpFOrdEqual %bool %c %c_f32_0\n"
+		},  // denorm
+		{
+			"negative_denorm",
+			"-0x0.0006p-126\n",
+			-std::ldexp(1.5f, -140),
+			"%cond = OpFOrdEqual %bool %c %c_f32_0\n"
+		}, // -denorm
+		{
+			"too_small",
+			"0x1.0p-16\n",
+			std::ldexp(1.0f, -16),
+			"%cond = OpFOrdEqual %bool %c %c_f32_0\n"
+		},     // too small positive
+		{
+			"negative_too_small",
+			"-0x1.0p-32\n",
+			-std::ldexp(1.0f, -32),
+			"%cond = OpFOrdEqual %bool %c %c_f32_0\n"
+		},      // too small negative
+		{
+			"negative_inf",
+			"-0x1.0p128\n",
+			-std::ldexp(1.0f, 128),
+
+			"%gz = OpFOrdLessThan %bool %c %c_f32_0\n"
+			"%inf = OpIsInf %bool %c\n"
+			"%cond = OpLogicalAnd %bool %gz %inf\n"
+		},     // -inf to -inf
+		{
+			"inf",
+			"0x1.0p128\n",
+			std::ldexp(1.0f, 128),
+
+			"%gz = OpFOrdGreaterThan %bool %c %c_f32_0\n"
+			"%inf = OpIsInf %bool %c\n"
+			"%cond = OpLogicalAnd %bool %gz %inf\n"
+		},     // +inf to +inf
+		{
+			"round_to_negative_inf",
+			"-0x1.0p32\n",
+			-std::ldexp(1.0f, 32),
+
+			"%gz = OpFOrdLessThan %bool %c %c_f32_0\n"
+			"%inf = OpIsInf %bool %c\n"
+			"%cond = OpLogicalAnd %bool %gz %inf\n"
+		},     // round to -inf
+		{
+			"round_to_inf",
+			"0x1.0p16\n",
+			std::ldexp(1.0f, 16),
+
+			"%gz = OpFOrdGreaterThan %bool %c %c_f32_0\n"
+			"%inf = OpIsInf %bool %c\n"
+			"%cond = OpLogicalAnd %bool %gz %inf\n"
+		},     // round to +inf
+		{
+			"nan",
+			"0x1.1p128\n",
+			std::numeric_limits<float>::quiet_NaN(),
+
+			// Test for any NaN value, as NaNs are not preserved
+			"%direct_quant = OpQuantizeToF16 %f32 %test_constant\n"
+			"%cond = OpIsNan %bool %direct_quant\n"
+		}, // nan
+		{
+			"negative_nan",
+			"-0x1.0001p128\n",
+			std::numeric_limits<float>::quiet_NaN(),
+
+			// Test for any NaN value, as NaNs are not preserved
+			"%direct_quant = OpQuantizeToF16 %f32 %test_constant\n"
+			"%cond = OpIsNan %bool %direct_quant\n"
+		} // -nan
+	};
+	const char*		constants			=
+		"%test_constant = OpConstant %f32 ";  // The value will be test.constant.
+
+	StringTemplate	function			(
+		"%test_code     = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1        = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%a             = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"%b             = OpFAdd %f32 %test_constant %a\n"
+		"%c             = OpQuantizeToF16 %f32 %b\n"
+		"${condition}\n"
+		"%v4cond        = OpCompositeConstruct %v4bool %cond %cond %cond %cond\n"
+		"%retval        = OpSelect %v4f32 %v4cond %c_v4f32_1_0_0_1 %param1\n"
+		"                 OpReturnValue %retval\n"
+		"OpFunctionEnd\n"
+	);
+
+	const char*		specDecorations		= "OpDecorate %test_constant SpecId 0\n";
+	const char*		specConstants		=
+			"%test_constant = OpSpecConstant %f32 0.\n"
+			"%c             = OpSpecConstantOp %f32 QuantizeToF16 %test_constant\n";
+
+	StringTemplate	specConstantFunction(
+		"%test_code     = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1        = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"${condition}\n"
+		"%v4cond        = OpCompositeConstruct %v4bool %cond %cond %cond %cond\n"
+		"%retval        = OpSelect %v4f32 %v4cond %c_v4f32_1_0_0_1 %param1\n"
+		"                 OpReturnValue %retval\n"
+		"OpFunctionEnd\n"
+	);
+
+	for (size_t idx = 0; idx < (sizeof(tests)/sizeof(tests[0])); ++idx)
+	{
+		map<string, string>								codeSpecialization;
+		map<string, string>								fragments;
+		codeSpecialization["condition"]					= tests[idx].condition;
+		fragments["testfun"]							= function.specialize(codeSpecialization);
+		fragments["pre_main"]							= string(constants) + tests[idx].constant + "\n";
+		createTestsForAllStages(tests[idx].name, inputColors, expectedColors, fragments, testCtx);
+	}
+
+	for (size_t idx = 0; idx < (sizeof(tests)/sizeof(tests[0])); ++idx)
+	{
+		map<string, string>								codeSpecialization;
+		map<string, string>								fragments;
+		vector<deInt32>									passConstants;
+		deInt32											specConstant;
+
+		codeSpecialization["condition"]					= tests[idx].condition;
+		fragments["testfun"]							= specConstantFunction.specialize(codeSpecialization);
+		fragments["decoration"]							= specDecorations;
+		fragments["pre_main"]							= specConstants;
+
+		memcpy(&specConstant, &tests[idx].valueAsFloat, sizeof(float));
+		passConstants.push_back(specConstant);
+
+		createTestsForAllStages(string("spec_const_") + tests[idx].name, inputColors, expectedColors, fragments, passConstants, testCtx);
+	}
+}
+
+void createOpQuantizeTwoPossibilityTests(tcu::TestCaseGroup* testCtx)
+{
+	RGBA inputColors[4] =  {
+		RGBA(0,		0,		0,		255),
+		RGBA(0,		0,		255,	255),
+		RGBA(0,		255,	0,		255),
+		RGBA(0,		255,	255,	255)
+	};
+
+	RGBA expectedColors[4] =
+	{
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255),
+		RGBA(255,	 0,		 0,		 255)
+	};
+
+	struct DualFP16Possibility
+	{
+		const char* name;
+		const char* input;
+		float		inputAsFloat;
+		const char* possibleOutput1;
+		const char* possibleOutput2;
+	} tests[] = {
+		{
+			"positive_round_up_or_round_down",
+			"0x1.3003p8",
+			constructNormalizedFloat(8, 0x300300),
+			"0x1.304p8",
+			"0x1.3p8"
+		},
+		{
+			"negative_round_up_or_round_down",
+			"-0x1.6008p-7",
+			-constructNormalizedFloat(-7, 0x600800),
+			"-0x1.6p-7",
+			"-0x1.604p-7"
+		},
+		{
+			"carry_bit",
+			"0x1.01ep2",
+			constructNormalizedFloat(2, 0x01e000),
+			"0x1.01cp2",
+			"0x1.02p2"
+		},
+		{
+			"carry_to_exponent",
+			"0x1.ffep1",
+			constructNormalizedFloat(1, 0xffe000),
+			"0x1.ffcp1",
+			"0x1.0p2"
+		},
+	};
+	StringTemplate constants (
+		"%input_const = OpConstant %f32 ${input}\n"
+		"%possible_solution1 = OpConstant %f32 ${output1}\n"
+		"%possible_solution2 = OpConstant %f32 ${output2}\n"
+		);
+
+	StringTemplate specConstants (
+		"%input_const = OpSpecConstant %f32 0.\n"
+		"%possible_solution1 = OpConstant %f32 ${output1}\n"
+		"%possible_solution2 = OpConstant %f32 ${output2}\n"
+	);
+
+	const char* specDecorations = "OpDecorate %input_const  SpecId 0\n";
+
+	const char* function  =
+		"%test_code     = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1        = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%a             = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		// For the purposes of this test we assume that 0.f will always get
+		// faithfully passed through the pipeline stages.
+		"%b             = OpFAdd %f32 %input_const %a\n"
+		"%c             = OpQuantizeToF16 %f32 %b\n"
+		"%eq_1          = OpFOrdEqual %bool %c %possible_solution1\n"
+		"%eq_2          = OpFOrdEqual %bool %c %possible_solution2\n"
+		"%cond          = OpLogicalOr %bool %eq_1 %eq_2\n"
+		"%v4cond        = OpCompositeConstruct %v4bool %cond %cond %cond %cond\n"
+		"%retval        = OpSelect %v4f32 %v4cond %c_v4f32_1_0_0_1 %param1"
+		"                 OpReturnValue %retval\n"
+		"OpFunctionEnd\n";
+
+	for(size_t idx = 0; idx < (sizeof(tests)/sizeof(tests[0])); ++idx) {
+		map<string, string>									fragments;
+		map<string, string>									constantSpecialization;
+
+		constantSpecialization["input"]						= tests[idx].input;
+		constantSpecialization["output1"]					= tests[idx].possibleOutput1;
+		constantSpecialization["output2"]					= tests[idx].possibleOutput2;
+		fragments["testfun"]								= function;
+		fragments["pre_main"]								= constants.specialize(constantSpecialization);
+		createTestsForAllStages(tests[idx].name, inputColors, expectedColors, fragments, testCtx);
+	}
+
+	for(size_t idx = 0; idx < (sizeof(tests)/sizeof(tests[0])); ++idx) {
+		map<string, string>									fragments;
+		map<string, string>									constantSpecialization;
+		vector<deInt32>										passConstants;
+		deInt32												specConstant;
+
+		constantSpecialization["output1"]					= tests[idx].possibleOutput1;
+		constantSpecialization["output2"]					= tests[idx].possibleOutput2;
+		fragments["testfun"]								= function;
+		fragments["decoration"]								= specDecorations;
+		fragments["pre_main"]								= specConstants.specialize(constantSpecialization);
+
+		memcpy(&specConstant, &tests[idx].inputAsFloat, sizeof(float));
+		passConstants.push_back(specConstant);
+
+		createTestsForAllStages(string("spec_const_") + tests[idx].name, inputColors, expectedColors, fragments, passConstants, testCtx);
+	}
+}
+
+tcu::TestCaseGroup* createOpQuantizeTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> opQuantizeTests (new tcu::TestCaseGroup(testCtx, "opquantize", "Test OpQuantizeToF16"));
+	createOpQuantizeSingleOptionTests(opQuantizeTests.get());
+	createOpQuantizeTwoPossibilityTests(opQuantizeTests.get());
+	return opQuantizeTests.release();
+}
+
+struct ShaderPermutation
+{
+	deUint8 vertexPermutation;
+	deUint8 geometryPermutation;
+	deUint8 tesscPermutation;
+	deUint8 tessePermutation;
+	deUint8 fragmentPermutation;
+};
+
+ShaderPermutation getShaderPermutation(deUint8 inputValue)
+{
+	ShaderPermutation	permutation =
+	{
+		static_cast<deUint8>(inputValue & 0x10? 1u: 0u),
+		static_cast<deUint8>(inputValue & 0x08? 1u: 0u),
+		static_cast<deUint8>(inputValue & 0x04? 1u: 0u),
+		static_cast<deUint8>(inputValue & 0x02? 1u: 0u),
+		static_cast<deUint8>(inputValue & 0x01? 1u: 0u)
+	};
+	return permutation;
+}
+
+tcu::TestCaseGroup* createModuleTests(tcu::TestContext& testCtx)
+{
+	RGBA								defaultColors[4];
+	RGBA								invertedColors[4];
+	de::MovePtr<tcu::TestCaseGroup>		moduleTests			(new tcu::TestCaseGroup(testCtx, "module", "Multiple entry points into shaders"));
+
+	const ShaderElement					combinedPipeline[]	=
+	{
+		ShaderElement("module", "main", VK_SHADER_STAGE_VERTEX_BIT),
+		ShaderElement("module", "main", VK_SHADER_STAGE_GEOMETRY_BIT),
+		ShaderElement("module", "main", VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT),
+		ShaderElement("module", "main", VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT),
+		ShaderElement("module", "main", VK_SHADER_STAGE_FRAGMENT_BIT)
+	};
+
+	getDefaultColors(defaultColors);
+	getInvertedDefaultColors(invertedColors);
+	addFunctionCaseWithPrograms<InstanceContext>(moduleTests.get(), "same_module", "", createCombinedModule, runAndVerifyDefaultPipeline, createInstanceContext(combinedPipeline, map<string, string>()));
+
+	const char* numbers[] =
+	{
+		"1", "2"
+	};
+
+	for (deInt8 idx = 0; idx < 32; ++idx)
+	{
+		ShaderPermutation			permutation		= getShaderPermutation(idx);
+		string						name			= string("vert") + numbers[permutation.vertexPermutation] + "_geom" + numbers[permutation.geometryPermutation] + "_tessc" + numbers[permutation.tesscPermutation] + "_tesse" + numbers[permutation.tessePermutation] + "_frag" + numbers[permutation.fragmentPermutation];
+		const ShaderElement			pipeline[]		=
+		{
+			ShaderElement("vert",	string("vert") +	numbers[permutation.vertexPermutation],		VK_SHADER_STAGE_VERTEX_BIT),
+			ShaderElement("geom",	string("geom") +	numbers[permutation.geometryPermutation],	VK_SHADER_STAGE_GEOMETRY_BIT),
+			ShaderElement("tessc",	string("tessc") +	numbers[permutation.tesscPermutation],		VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT),
+			ShaderElement("tesse",	string("tesse") +	numbers[permutation.tessePermutation],		VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT),
+			ShaderElement("frag",	string("frag") +	numbers[permutation.fragmentPermutation],	VK_SHADER_STAGE_FRAGMENT_BIT)
+		};
+
+		// If there are an even number of swaps, then it should be no-op.
+		// If there are an odd number, the color should be flipped.
+		if ((permutation.vertexPermutation + permutation.geometryPermutation + permutation.tesscPermutation + permutation.tessePermutation + permutation.fragmentPermutation) % 2 == 0)
+		{
+			addFunctionCaseWithPrograms<InstanceContext>(moduleTests.get(), name, "", createMultipleEntries, runAndVerifyDefaultPipeline, createInstanceContext(pipeline, defaultColors, defaultColors, map<string, string>()));
+		}
+		else
+		{
+			addFunctionCaseWithPrograms<InstanceContext>(moduleTests.get(), name, "", createMultipleEntries, runAndVerifyDefaultPipeline, createInstanceContext(pipeline, defaultColors, invertedColors, map<string, string>()));
+		}
+	}
+	return moduleTests.release();
+}
+
+tcu::TestCaseGroup* createLoopTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "loop", "Looping control flow"));
+	RGBA defaultColors[4];
+	getDefaultColors(defaultColors);
+	map<string, string> fragments;
+	fragments["pre_main"] =
+		"%c_f32_5 = OpConstant %f32 5.\n";
+
+	// A loop with a single block. The Continue Target is the loop block
+	// itself. In SPIR-V terms, the "loop construct" contains no blocks at all
+	// -- the "continue construct" forms the entire loop.
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+
+		"%entry = OpLabel\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";adds and subtracts 1.0 to %val in alternate iterations\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %loop\n"
+		"%delta = OpPhi %f32 %c_f32_1 %entry %minus_delta %loop\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val %loop\n"
+		"%val = OpFAdd %f32 %val1 %delta\n"
+		"%minus_delta = OpFSub %f32 %c_f32_0 %delta\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpLoopMerge %exit %loop None\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		"%result = OpVectorInsertDynamic %v4f32 %param1 %val %c_i32_0\n"
+		"OpReturnValue %result\n"
+
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("single_block", defaultColors, defaultColors, fragments, testGroup.get());
+
+	// Body comprised of multiple basic blocks.
+	const StringTemplate multiBlock(
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+
+		"%entry = OpLabel\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";adds and subtracts 1.0 to %val in alternate iterations\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %gather\n"
+		"%delta = OpPhi %f32 %c_f32_1 %entry %delta_next %gather\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val %gather\n"
+		// There are several possibilities for the Continue Target below.  Each
+		// will be specialized into a separate test case.
+		"OpLoopMerge %exit ${continue_target} None\n"
+		"OpBranch %if\n"
+
+		"%if = OpLabel\n"
+		";delta_next = (delta > 0) ? -1 : 1;\n"
+		"%gt0 = OpFOrdGreaterThan %bool %delta %c_f32_0\n"
+		"OpSelectionMerge %gather DontFlatten\n"
+		"OpBranchConditional %gt0 %even %odd ;tells us if %count is even or odd\n"
+
+		"%odd = OpLabel\n"
+		"OpBranch %gather\n"
+
+		"%even = OpLabel\n"
+		"OpBranch %gather\n"
+
+		"%gather = OpLabel\n"
+		"%delta_next = OpPhi %f32 %c_f32_n1 %even %c_f32_1 %odd\n"
+		"%val = OpFAdd %f32 %val1 %delta\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		"%result = OpVectorInsertDynamic %v4f32 %param1 %val %c_i32_0\n"
+		"OpReturnValue %result\n"
+
+		"OpFunctionEnd\n");
+
+	map<string, string> continue_target;
+
+	// The Continue Target is the loop block itself.
+	continue_target["continue_target"] = "%loop";
+	fragments["testfun"] = multiBlock.specialize(continue_target);
+	createTestsForAllStages("multi_block_continue_construct", defaultColors, defaultColors, fragments, testGroup.get());
+
+	// The Continue Target is at the end of the loop.
+	continue_target["continue_target"] = "%gather";
+	fragments["testfun"] = multiBlock.specialize(continue_target);
+	createTestsForAllStages("multi_block_loop_construct", defaultColors, defaultColors, fragments, testGroup.get());
+
+	// A loop with continue statement.
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+
+		"%entry = OpLabel\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";adds 4, 3, and 1 to %val0 (skips 2)\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %continue\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val %continue\n"
+		"OpLoopMerge %exit %continue None\n"
+		"OpBranch %if\n"
+
+		"%if = OpLabel\n"
+		";skip if %count==2\n"
+		"%eq2 = OpIEqual %bool %count %c_i32_2\n"
+		"OpSelectionMerge %continue DontFlatten\n"
+		"OpBranchConditional %eq2 %continue %body\n"
+
+		"%body = OpLabel\n"
+		"%fcount = OpConvertSToF %f32 %count\n"
+		"%val2 = OpFAdd %f32 %val1 %fcount\n"
+		"OpBranch %continue\n"
+
+		"%continue = OpLabel\n"
+		"%val = OpPhi %f32 %val2 %body %val1 %if\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		"%same = OpFSub %f32 %val %c_f32_8\n"
+		"%result = OpVectorInsertDynamic %v4f32 %param1 %same %c_i32_0\n"
+		"OpReturnValue %result\n"
+		"OpFunctionEnd\n";
+	createTestsForAllStages("continue", defaultColors, defaultColors, fragments, testGroup.get());
+
+	// A loop with break.
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+
+		"%entry = OpLabel\n"
+		";param1 components are between 0 and 1, so dot product is 4 or less\n"
+		"%dot = OpDot %f32 %param1 %param1\n"
+		"%div = OpFDiv %f32 %dot %c_f32_5\n"
+		"%zero = OpConvertFToU %u32 %div\n"
+		"%two = OpIAdd %i32 %zero %c_i32_2\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";adds 4 and 3 to %val0 (exits early)\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %continue\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val2 %continue\n"
+		"OpLoopMerge %exit %continue None\n"
+		"OpBranch %if\n"
+
+		"%if = OpLabel\n"
+		";end loop if %count==%two\n"
+		"%above2 = OpSGreaterThan %bool %count %two\n"
+		"OpSelectionMerge %continue DontFlatten\n"
+		"OpBranchConditional %above2 %body %exit\n"
+
+		"%body = OpLabel\n"
+		"%fcount = OpConvertSToF %f32 %count\n"
+		"%val2 = OpFAdd %f32 %val1 %fcount\n"
+		"OpBranch %continue\n"
+
+		"%continue = OpLabel\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		"%val_post = OpPhi %f32 %val2 %continue %val1 %if\n"
+		"%same = OpFSub %f32 %val_post %c_f32_7\n"
+		"%result = OpVectorInsertDynamic %v4f32 %param1 %same %c_i32_0\n"
+		"OpReturnValue %result\n"
+		"OpFunctionEnd\n";
+	createTestsForAllStages("break", defaultColors, defaultColors, fragments, testGroup.get());
+
+	// A loop with return.
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+
+		"%entry = OpLabel\n"
+		";param1 components are between 0 and 1, so dot product is 4 or less\n"
+		"%dot = OpDot %f32 %param1 %param1\n"
+		"%div = OpFDiv %f32 %dot %c_f32_5\n"
+		"%zero = OpConvertFToU %u32 %div\n"
+		"%two = OpIAdd %i32 %zero %c_i32_2\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";returns early without modifying %param1\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %continue\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val2 %continue\n"
+		"OpLoopMerge %exit %continue None\n"
+		"OpBranch %if\n"
+
+		"%if = OpLabel\n"
+		";return if %count==%two\n"
+		"%above2 = OpSGreaterThan %bool %count %two\n"
+		"OpSelectionMerge %continue DontFlatten\n"
+		"OpBranchConditional %above2 %body %early_exit\n"
+
+		"%early_exit = OpLabel\n"
+		"OpReturnValue %param1\n"
+
+		"%body = OpLabel\n"
+		"%fcount = OpConvertSToF %f32 %count\n"
+		"%val2 = OpFAdd %f32 %val1 %fcount\n"
+		"OpBranch %continue\n"
+
+		"%continue = OpLabel\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		";should never get here, so return an incorrect result\n"
+		"%result = OpVectorInsertDynamic %v4f32 %param1 %val2 %c_i32_0\n"
+		"OpReturnValue %result\n"
+		"OpFunctionEnd\n";
+	createTestsForAllStages("return", defaultColors, defaultColors, fragments, testGroup.get());
+
+	return testGroup.release();
+}
+
+// Adds a new test to group using custom fragments for the tessellation-control
+// stage and passthrough fragments for all other stages.  Uses default colors
+// for input and expected output.
+void addTessCtrlTest(tcu::TestCaseGroup* group, const char* name, const map<string, string>& fragments)
+{
+	RGBA defaultColors[4];
+	getDefaultColors(defaultColors);
+	const ShaderElement pipelineStages[] =
+	{
+		ShaderElement("vert", "main", VK_SHADER_STAGE_VERTEX_BIT),
+		ShaderElement("tessc", "main", VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT),
+		ShaderElement("tesse", "main", VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT),
+		ShaderElement("frag", "main", VK_SHADER_STAGE_FRAGMENT_BIT),
+	};
+
+	addFunctionCaseWithPrograms<InstanceContext>(group, name, "", addShaderCodeCustomTessControl,
+												 runAndVerifyDefaultPipeline, createInstanceContext(
+													 pipelineStages, defaultColors, defaultColors, fragments, StageToSpecConstantMap()));
+}
+
+// A collection of tests putting OpControlBarrier in places GLSL forbids but SPIR-V allows.
+tcu::TestCaseGroup* createBarrierTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> testGroup(new tcu::TestCaseGroup(testCtx, "barrier", "OpControlBarrier"));
+	map<string, string> fragments;
+
+	// A barrier inside a function body.
+	fragments["pre_main"] =
+		"%Workgroup = OpConstant %i32 2\n"
+		"%SequentiallyConsistent = OpConstant %i32 0x10\n";
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"OpReturnValue %param1\n"
+		"OpFunctionEnd\n";
+	addTessCtrlTest(testGroup.get(), "in_function", fragments);
+
+	// Common setup code for the following tests.
+	fragments["pre_main"] =
+		"%Workgroup = OpConstant %i32 2\n"
+		"%SequentiallyConsistent = OpConstant %i32 0x10\n"
+		"%c_f32_5 = OpConstant %f32 5.\n";
+	const string setupPercentZero =	 // Begins %test_code function with code that sets %zero to 0u but cannot be optimized away.
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%entry = OpLabel\n"
+		";param1 components are between 0 and 1, so dot product is 4 or less\n"
+		"%dot = OpDot %f32 %param1 %param1\n"
+		"%div = OpFDiv %f32 %dot %c_f32_5\n"
+		"%zero = OpConvertFToU %u32 %div\n";
+
+	// Barriers inside OpSwitch branches.
+	fragments["testfun"] =
+		setupPercentZero +
+		"OpSelectionMerge %switch_exit None\n"
+		"OpSwitch %zero %switch_default 0 %case0 1 %case1 ;should always go to %case0\n"
+
+		"%case1 = OpLabel\n"
+		";This barrier should never be executed, but its presence makes test failure more likely when there's a bug.\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"%wrong_branch_alert1 = OpVectorInsertDynamic %v4f32 %param1 %c_f32_0_5 %c_i32_0\n"
+		"OpBranch %switch_exit\n"
+
+		"%switch_default = OpLabel\n"
+		"%wrong_branch_alert2 = OpVectorInsertDynamic %v4f32 %param1 %c_f32_0_5 %c_i32_0\n"
+		";This barrier should never be executed, but its presence makes test failure more likely when there's a bug.\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"OpBranch %switch_exit\n"
+
+		"%case0 = OpLabel\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"OpBranch %switch_exit\n"
+
+		"%switch_exit = OpLabel\n"
+		"%ret = OpPhi %v4f32 %param1 %case0 %wrong_branch_alert1 %case1 %wrong_branch_alert2 %switch_default\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n";
+	addTessCtrlTest(testGroup.get(), "in_switch", fragments);
+
+	// Barriers inside if-then-else.
+	fragments["testfun"] =
+		setupPercentZero +
+		"%eq0 = OpIEqual %bool %zero %c_u32_0\n"
+		"OpSelectionMerge %exit DontFlatten\n"
+		"OpBranchConditional %eq0 %then %else\n"
+
+		"%else = OpLabel\n"
+		";This barrier should never be executed, but its presence makes test failure more likely when there's a bug.\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"%wrong_branch_alert = OpVectorInsertDynamic %v4f32 %param1 %c_f32_0_5 %c_i32_0\n"
+		"OpBranch %exit\n"
+
+		"%then = OpLabel\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"OpBranch %exit\n"
+
+		"%exit = OpLabel\n"
+		"%ret = OpPhi %v4f32 %param1 %then %wrong_branch_alert %else\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n";
+	addTessCtrlTest(testGroup.get(), "in_if", fragments);
+
+	// A barrier after control-flow reconvergence, tempting the compiler to attempt something like this:
+	// http://lists.llvm.org/pipermail/llvm-dev/2009-October/026317.html.
+	fragments["testfun"] =
+		setupPercentZero +
+		"%thread_id = OpLoad %i32 %BP_gl_InvocationID\n"
+		"%thread0 = OpIEqual %bool %thread_id %c_i32_0\n"
+		"OpSelectionMerge %exit DontFlatten\n"
+		"OpBranchConditional %thread0 %then %else\n"
+
+		"%else = OpLabel\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %exit\n"
+
+		"%then = OpLabel\n"
+		"%val1 = OpVectorExtractDynamic %f32 %param1 %zero\n"
+		"OpBranch %exit\n"
+
+		"%exit = OpLabel\n"
+		"%val = OpPhi %f32 %val0 %else %val1 %then\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %val %zero\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n";
+	addTessCtrlTest(testGroup.get(), "after_divergent_if", fragments);
+
+	// A barrier inside a loop.
+	fragments["pre_main"] =
+		"%Workgroup = OpConstant %i32 2\n"
+		"%SequentiallyConsistent = OpConstant %i32 0x10\n"
+		"%c_f32_10 = OpConstant %f32 10.\n";
+	fragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%entry = OpLabel\n"
+		"%val0 = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"OpBranch %loop\n"
+
+		";adds 4, 3, 2, and 1 to %val0\n"
+		"%loop = OpLabel\n"
+		"%count = OpPhi %i32 %c_i32_4 %entry %count__ %loop\n"
+		"%val1 = OpPhi %f32 %val0 %entry %val %loop\n"
+		"OpControlBarrier %Workgroup %Workgroup %SequentiallyConsistent\n"
+		"%fcount = OpConvertSToF %f32 %count\n"
+		"%val = OpFAdd %f32 %val1 %fcount\n"
+		"%count__ = OpISub %i32 %count %c_i32_1\n"
+		"%again = OpSGreaterThan %bool %count__ %c_i32_0\n"
+		"OpLoopMerge %exit %loop None\n"
+		"OpBranchConditional %again %loop %exit\n"
+
+		"%exit = OpLabel\n"
+		"%same = OpFSub %f32 %val %c_f32_10\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %same %c_i32_0\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n";
+	addTessCtrlTest(testGroup.get(), "in_loop", fragments);
+
+	return testGroup.release();
+}
+
+// Test for the OpFRem instruction.
+tcu::TestCaseGroup* createFRemTests(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>		testGroup(new tcu::TestCaseGroup(testCtx, "frem", "OpFRem"));
+	map<string, string>					fragments;
+	RGBA								inputColors[4];
+	RGBA								outputColors[4];
+
+	fragments["pre_main"]				 =
+		"%c_f32_3 = OpConstant %f32 3.0\n"
+		"%c_f32_n3 = OpConstant %f32 -3.0\n"
+		"%c_f32_4 = OpConstant %f32 4.0\n"
+		"%c_f32_p75 = OpConstant %f32 0.75\n"
+		"%c_v4f32_p75_p75_p75_p75 = OpConstantComposite %v4f32 %c_f32_p75 %c_f32_p75 %c_f32_p75 %c_f32_p75 \n"
+		"%c_v4f32_4_4_4_4 = OpConstantComposite %v4f32 %c_f32_4 %c_f32_4 %c_f32_4 %c_f32_4\n"
+		"%c_v4f32_3_n3_3_n3 = OpConstantComposite %v4f32 %c_f32_3 %c_f32_n3 %c_f32_3 %c_f32_n3\n";
+
+	// The test does the following.
+	// vec4 result = (param1 * 8.0) - 4.0;
+	// return (frem(result.x,3) + 0.75, frem(result.y, -3) + 0.75, 0, 1)
+	fragments["testfun"]				 =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"%v_times_8 = OpVectorTimesScalar %v4f32 %param1 %c_f32_8\n"
+		"%minus_4 = OpFSub %v4f32 %v_times_8 %c_v4f32_4_4_4_4\n"
+		"%frem = OpFRem %v4f32 %minus_4 %c_v4f32_3_n3_3_n3\n"
+		"%added = OpFAdd %v4f32 %frem %c_v4f32_p75_p75_p75_p75\n"
+		"%xyz_1 = OpVectorInsertDynamic %v4f32 %added %c_f32_1 %c_i32_3\n"
+		"%xy_0_1 = OpVectorInsertDynamic %v4f32 %xyz_1 %c_f32_0 %c_i32_2\n"
+		"OpReturnValue %xy_0_1\n"
+		"OpFunctionEnd\n";
+
+
+	inputColors[0]		= RGBA(16,	16,		0, 255);
+	inputColors[1]		= RGBA(232, 232,	0, 255);
+	inputColors[2]		= RGBA(232, 16,		0, 255);
+	inputColors[3]		= RGBA(16,	232,	0, 255);
+
+	outputColors[0]		= RGBA(64,	64,		0, 255);
+	outputColors[1]		= RGBA(255, 255,	0, 255);
+	outputColors[2]		= RGBA(255, 64,		0, 255);
+	outputColors[3]		= RGBA(64,	255,	0, 255);
+
+	createTestsForAllStages("frem", inputColors, outputColors, fragments, testGroup.get());
+	return testGroup.release();
+}
+
+tcu::TestCaseGroup* createInstructionTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> instructionTests	(new tcu::TestCaseGroup(testCtx, "instruction", "Instructions with special opcodes/operands"));
+	de::MovePtr<tcu::TestCaseGroup> computeTests		(new tcu::TestCaseGroup(testCtx, "compute", "Compute Instructions with special opcodes/operands"));
+	de::MovePtr<tcu::TestCaseGroup> graphicsTests		(new tcu::TestCaseGroup(testCtx, "graphics", "Graphics Instructions with special opcodes/operands"));
+
+	computeTests->addChild(createOpNopGroup(testCtx));
+	computeTests->addChild(createOpLineGroup(testCtx));
+	computeTests->addChild(createOpNoLineGroup(testCtx));
+	computeTests->addChild(createOpConstantNullGroup(testCtx));
+	computeTests->addChild(createOpConstantCompositeGroup(testCtx));
+	computeTests->addChild(createOpConstantUsageGroup(testCtx));
+	computeTests->addChild(createSpecConstantGroup(testCtx));
+	computeTests->addChild(createOpSourceGroup(testCtx));
+	computeTests->addChild(createOpSourceExtensionGroup(testCtx));
+	computeTests->addChild(createDecorationGroupGroup(testCtx));
+	computeTests->addChild(createOpPhiGroup(testCtx));
+	computeTests->addChild(createLoopControlGroup(testCtx));
+	computeTests->addChild(createFunctionControlGroup(testCtx));
+	computeTests->addChild(createSelectionControlGroup(testCtx));
+	computeTests->addChild(createBlockOrderGroup(testCtx));
+	computeTests->addChild(createMultipleShaderGroup(testCtx));
+	computeTests->addChild(createMemoryAccessGroup(testCtx));
+	computeTests->addChild(createOpCopyMemoryGroup(testCtx));
+	computeTests->addChild(createOpCopyObjectGroup(testCtx));
+	computeTests->addChild(createNoContractionGroup(testCtx));
+	computeTests->addChild(createOpUndefGroup(testCtx));
+	computeTests->addChild(createOpUnreachableGroup(testCtx));
+	computeTests ->addChild(createOpQuantizeToF16Group(testCtx));
+	computeTests ->addChild(createOpFRemGroup(testCtx));
+
+	RGBA defaultColors[4];
+	getDefaultColors(defaultColors);
+
+	de::MovePtr<tcu::TestCaseGroup> opnopTests (new tcu::TestCaseGroup(testCtx, "opnop", "Test OpNop"));
+	map<string, string> opNopFragments;
+	opNopFragments["testfun"] =
+		"%test_code = OpFunction %v4f32 None %v4f32_function\n"
+		"%param1 = OpFunctionParameter %v4f32\n"
+		"%label_testfun = OpLabel\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpNop\n"
+		"%a = OpVectorExtractDynamic %f32 %param1 %c_i32_0\n"
+		"%b = OpFAdd %f32 %a %a\n"
+		"OpNop\n"
+		"%c = OpFSub %f32 %b %a\n"
+		"%ret = OpVectorInsertDynamic %v4f32 %param1 %c %c_i32_0\n"
+		"OpNop\n"
+		"OpNop\n"
+		"OpReturnValue %ret\n"
+		"OpFunctionEnd\n"
+		;
+	createTestsForAllStages("opnop", defaultColors, defaultColors, opNopFragments, opnopTests.get());
+
+
+	graphicsTests->addChild(opnopTests.release());
+	graphicsTests->addChild(createOpSourceTests(testCtx));
+	graphicsTests->addChild(createOpSourceContinuedTests(testCtx));
+	graphicsTests->addChild(createOpLineTests(testCtx));
+	graphicsTests->addChild(createOpNoLineTests(testCtx));
+	graphicsTests->addChild(createOpConstantNullTests(testCtx));
+	graphicsTests->addChild(createOpConstantCompositeTests(testCtx));
+	graphicsTests->addChild(createMemoryAccessTests(testCtx));
+	graphicsTests->addChild(createOpUndefTests(testCtx));
+	graphicsTests->addChild(createSelectionBlockOrderTests(testCtx));
+	graphicsTests->addChild(createModuleTests(testCtx));
+	graphicsTests->addChild(createSwitchBlockOrderTests(testCtx));
+	graphicsTests->addChild(createOpPhiTests(testCtx));
+	graphicsTests->addChild(createNoContractionTests(testCtx));
+	graphicsTests->addChild(createOpQuantizeTests(testCtx));
+	graphicsTests->addChild(createLoopTests(testCtx));
+	graphicsTests->addChild(createSpecConstantTests(testCtx));
+	graphicsTests->addChild(createSpecConstantOpQuantizeToF16Group(testCtx));
+	graphicsTests->addChild(createBarrierTests(testCtx));
+	graphicsTests->addChild(createDecorationGroupTests(testCtx));
+	graphicsTests->addChild(createFRemTests(testCtx));
+
+	instructionTests->addChild(computeTests.release());
+	instructionTests->addChild(graphicsTests.release());
+
+	return instructionTests.release();
+}
+
+} // SpirVAssembly
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.hpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.hpp
new file mode 100644
index 0000000..29587db
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmInstructionTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTSPVASMINSTRUCTIONTESTS_HPP
+#define _VKTSPVASMINSTRUCTIONTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V Assembly Tests for Instructions (special opcode/operand)
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+tcu::TestCaseGroup* createInstructionTests (tcu::TestContext& testCtx);
+
+} // SpirVAssembly
+} // vkt
+
+#endif // _VKTSPVASMINSTRUCTIONTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.cpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.cpp
new file mode 100644
index 0000000..8870d49
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.cpp
@@ -0,0 +1,66 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V Assembly Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSpvAsmTests.hpp"
+
+#include "vktSpvAsmInstructionTests.hpp"
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+namespace
+{
+
+void createChildren (tcu::TestCaseGroup* spirVAssemblyTests)
+{
+	tcu::TestContext&	testCtx		= spirVAssemblyTests->getTestContext();
+
+	spirVAssemblyTests->addChild(createInstructionTests(testCtx));
+	// \todo [2015-09-28 antiagainst] control flow
+	// \todo [2015-09-28 antiagainst] multiple entry points for the same shader stage
+	// \todo [2015-09-28 antiagainst] multiple shaders in the same module
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	return createTestGroup(testCtx, "spirv_assembly", "SPIR-V Assembly tests", createChildren);
+}
+
+} // SpirVAssembly
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.hpp b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.hpp
new file mode 100644
index 0000000..92e5301
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/spirv_assembly/vktSpvAsmTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTSPVASMTESTS_HPP
+#define _VKTSPVASMTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SPIR-V Assembly Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace SpirVAssembly
+{
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx);
+
+} // SpirVAssembly
+} // vkt
+
+#endif // _VKTSPVASMTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/ssbo/CMakeLists.txt b/external/vulkancts/modules/vulkan/ssbo/CMakeLists.txt
new file mode 100644
index 0000000..357590f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ssbo/CMakeLists.txt
@@ -0,0 +1,19 @@
+include_directories(
+	..
+)
+
+set(DEQP_VK_SSBO_SRCS
+	vktSSBOLayoutCase.cpp
+	vktSSBOLayoutCase.hpp
+	vktSSBOLayoutTests.cpp
+	vktSSBOLayoutTests.cpp
+)
+
+set(DEQP_VK_SSBO_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+)
+
+add_library(deqp-vk-ssbo STATIC ${DEQP_VK_SSBO_SRCS})
+target_link_libraries(deqp-vk-ssbo ${DEQP_VK_SSBO_LIBS})
diff --git a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp
new file mode 100644
index 0000000..3e6a3f5
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.cpp
@@ -0,0 +1,2168 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SSBO layout case.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSSBOLayoutCase.hpp"
+#include "gluShaderProgram.hpp"
+#include "gluContextInfo.hpp"
+#include "gluShaderUtil.hpp"
+#include "gluVarType.hpp"
+#include "gluVarTypeUtil.hpp"
+#include "tcuTestLog.hpp"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deMemory.h"
+#include "deString.h"
+#include "deMath.h"
+#include "deSharedPtr.hpp"
+
+#include <algorithm>
+#include <map>
+
+#include "vkBuilderUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+namespace vkt
+{
+namespace ssbo
+{
+
+using tcu::TestLog;
+using std::string;
+using std::vector;
+using std::map;
+using glu::VarType;
+using glu::StructType;
+using glu::StructMember;
+
+struct LayoutFlagsFmt
+{
+	deUint32 flags;
+	LayoutFlagsFmt (deUint32 flags_) : flags(flags_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const LayoutFlagsFmt& fmt)
+{
+	static const struct
+	{
+		deUint32	bit;
+		const char*	token;
+	} bitDesc[] =
+	{
+		{ LAYOUT_STD140,		"std140"		},
+		{ LAYOUT_STD430,		"std430"		},
+		{ LAYOUT_ROW_MAJOR,		"row_major"		},
+		{ LAYOUT_COLUMN_MAJOR,	"column_major"	}
+	};
+
+	deUint32 remBits = fmt.flags;
+	for (int descNdx = 0; descNdx < DE_LENGTH_OF_ARRAY(bitDesc); descNdx++)
+	{
+		if (remBits & bitDesc[descNdx].bit)
+		{
+			if (remBits != fmt.flags)
+				str << ", ";
+			str << bitDesc[descNdx].token;
+			remBits &= ~bitDesc[descNdx].bit;
+		}
+	}
+	DE_ASSERT(remBits == 0);
+	return str;
+}
+
+// BufferVar implementation.
+
+BufferVar::BufferVar (const char* name, const VarType& type, deUint32 flags)
+	: m_name	(name)
+	, m_type	(type)
+	, m_flags	(flags)
+{
+}
+
+// BufferBlock implementation.
+
+BufferBlock::BufferBlock (const char* blockName)
+	: m_blockName	(blockName)
+	, m_arraySize	(-1)
+	, m_flags		(0)
+{
+	setArraySize(0);
+}
+
+void BufferBlock::setArraySize (int arraySize)
+{
+	DE_ASSERT(arraySize >= 0);
+	m_lastUnsizedArraySizes.resize(arraySize == 0 ? 1 : arraySize, 0);
+	m_arraySize = arraySize;
+}
+
+std::ostream& operator<< (std::ostream& stream, const BlockLayoutEntry& entry)
+{
+	stream << entry.name << " { name = " << entry.name
+		   << ", size = " << entry.size
+		   << ", activeVarIndices = [";
+
+	for (vector<int>::const_iterator i = entry.activeVarIndices.begin(); i != entry.activeVarIndices.end(); i++)
+	{
+		if (i != entry.activeVarIndices.begin())
+			stream << ", ";
+		stream << *i;
+	}
+
+	stream << "] }";
+	return stream;
+}
+
+static bool isUnsizedArray (const BufferVarLayoutEntry& entry)
+{
+	DE_ASSERT(entry.arraySize != 0 || entry.topLevelArraySize != 0);
+	return entry.arraySize == 0 || entry.topLevelArraySize == 0;
+}
+
+std::ostream& operator<< (std::ostream& stream, const BufferVarLayoutEntry& entry)
+{
+	stream << entry.name << " { type = " << glu::getDataTypeName(entry.type)
+		   << ", blockNdx = " << entry.blockNdx
+		   << ", offset = " << entry.offset
+		   << ", arraySize = " << entry.arraySize
+		   << ", arrayStride = " << entry.arrayStride
+		   << ", matrixStride = " << entry.matrixStride
+		   << ", topLevelArraySize = " << entry.topLevelArraySize
+		   << ", topLevelArrayStride = " << entry.topLevelArrayStride
+		   << ", isRowMajor = " << (entry.isRowMajor ? "true" : "false")
+		   << " }";
+	return stream;
+}
+
+// \todo [2012-01-24 pyry] Speed up lookups using hash.
+
+int BufferLayout::getVariableIndex (const string& name) const
+{
+	for (int ndx = 0; ndx < (int)bufferVars.size(); ndx++)
+	{
+		if (bufferVars[ndx].name == name)
+			return ndx;
+	}
+	return -1;
+}
+
+int BufferLayout::getBlockIndex (const string& name) const
+{
+	for (int ndx = 0; ndx < (int)blocks.size(); ndx++)
+	{
+		if (blocks[ndx].name == name)
+			return ndx;
+	}
+	return -1;
+}
+
+// ShaderInterface implementation.
+
+ShaderInterface::ShaderInterface (void)
+{
+}
+
+ShaderInterface::~ShaderInterface (void)
+{
+	for (std::vector<StructType*>::iterator i = m_structs.begin(); i != m_structs.end(); i++)
+		delete *i;
+
+	for (std::vector<BufferBlock*>::iterator i = m_bufferBlocks.begin(); i != m_bufferBlocks.end(); i++)
+		delete *i;
+}
+
+StructType& ShaderInterface::allocStruct (const char* name)
+{
+	m_structs.reserve(m_structs.size()+1);
+	m_structs.push_back(new StructType(name));
+	return *m_structs.back();
+}
+
+struct StructNameEquals
+{
+	std::string name;
+
+	StructNameEquals (const char* name_) : name(name_) {}
+
+	bool operator() (const StructType* type) const
+	{
+		return type->getTypeName() && name == type->getTypeName();
+	}
+};
+
+const StructType* ShaderInterface::findStruct (const char* name) const
+{
+	std::vector<StructType*>::const_iterator pos = std::find_if(m_structs.begin(), m_structs.end(), StructNameEquals(name));
+	return pos != m_structs.end() ? *pos : DE_NULL;
+}
+
+void ShaderInterface::getNamedStructs (std::vector<const StructType*>& structs) const
+{
+	for (std::vector<StructType*>::const_iterator i = m_structs.begin(); i != m_structs.end(); i++)
+	{
+		if ((*i)->getTypeName() != DE_NULL)
+			structs.push_back(*i);
+	}
+}
+
+BufferBlock& ShaderInterface::allocBlock (const char* name)
+{
+	m_bufferBlocks.reserve(m_bufferBlocks.size()+1);
+	m_bufferBlocks.push_back(new BufferBlock(name));
+	return *m_bufferBlocks.back();
+}
+
+namespace // Utilities
+{
+// Layout computation.
+
+int getDataTypeByteSize (glu::DataType type)
+{
+	return glu::getDataTypeScalarSize(type)*(int)sizeof(deUint32);
+}
+
+int getDataTypeByteAlignment (glu::DataType type)
+{
+	switch (type)
+	{
+		case glu::TYPE_FLOAT:
+		case glu::TYPE_INT:
+		case glu::TYPE_UINT:
+		case glu::TYPE_BOOL:		return 1*(int)sizeof(deUint32);
+
+		case glu::TYPE_FLOAT_VEC2:
+		case glu::TYPE_INT_VEC2:
+		case glu::TYPE_UINT_VEC2:
+		case glu::TYPE_BOOL_VEC2:	return 2*(int)sizeof(deUint32);
+
+		case glu::TYPE_FLOAT_VEC3:
+		case glu::TYPE_INT_VEC3:
+		case glu::TYPE_UINT_VEC3:
+		case glu::TYPE_BOOL_VEC3:	// Fall-through to vec4
+
+		case glu::TYPE_FLOAT_VEC4:
+		case glu::TYPE_INT_VEC4:
+		case glu::TYPE_UINT_VEC4:
+		case glu::TYPE_BOOL_VEC4:	return 4*(int)sizeof(deUint32);
+
+		default:
+			DE_ASSERT(false);
+			return 0;
+	}
+}
+
+static inline int deRoundUp32 (int a, int b)
+{
+	int d = a/b;
+	return d*b == a ? a : (d+1)*b;
+}
+
+int computeStd140BaseAlignment (const VarType& type, deUint32 layoutFlags)
+{
+	const int vec4Alignment = (int)sizeof(deUint32)*4;
+
+	if (type.isBasicType())
+	{
+		glu::DataType basicType = type.getBasicType();
+
+		if (glu::isDataTypeMatrix(basicType))
+		{
+			const bool	isRowMajor	= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			const int	vecSize		= isRowMajor ? glu::getDataTypeMatrixNumColumns(basicType)
+												 : glu::getDataTypeMatrixNumRows(basicType);
+			const int	vecAlign	= deAlign32(getDataTypeByteAlignment(glu::getDataTypeFloatVec(vecSize)), vec4Alignment);
+
+			return vecAlign;
+		}
+		else
+			return getDataTypeByteAlignment(basicType);
+	}
+	else if (type.isArrayType())
+	{
+		int elemAlignment = computeStd140BaseAlignment(type.getElementType(), layoutFlags);
+
+		// Round up to alignment of vec4
+		return deAlign32(elemAlignment, vec4Alignment);
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		int maxBaseAlignment = 0;
+
+		for (StructType::ConstIterator memberIter = type.getStructPtr()->begin(); memberIter != type.getStructPtr()->end(); memberIter++)
+			maxBaseAlignment = de::max(maxBaseAlignment, computeStd140BaseAlignment(memberIter->getType(), layoutFlags));
+
+		return deAlign32(maxBaseAlignment, vec4Alignment);
+	}
+}
+
+int computeStd430BaseAlignment (const VarType& type, deUint32 layoutFlags)
+{
+	// Otherwise identical to std140 except that alignment of structures and arrays
+	// are not rounded up to alignment of vec4.
+
+	if (type.isBasicType())
+	{
+		glu::DataType basicType = type.getBasicType();
+
+		if (glu::isDataTypeMatrix(basicType))
+		{
+			const bool	isRowMajor	= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			const int	vecSize		= isRowMajor ? glu::getDataTypeMatrixNumColumns(basicType)
+												 : glu::getDataTypeMatrixNumRows(basicType);
+			const int	vecAlign	= getDataTypeByteAlignment(glu::getDataTypeFloatVec(vecSize));
+
+			return vecAlign;
+		}
+		else
+			return getDataTypeByteAlignment(basicType);
+	}
+	else if (type.isArrayType())
+	{
+		return computeStd430BaseAlignment(type.getElementType(), layoutFlags);
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		int maxBaseAlignment = 0;
+
+		for (StructType::ConstIterator memberIter = type.getStructPtr()->begin(); memberIter != type.getStructPtr()->end(); memberIter++)
+			maxBaseAlignment = de::max(maxBaseAlignment, computeStd430BaseAlignment(memberIter->getType(), layoutFlags));
+
+		return maxBaseAlignment;
+	}
+}
+
+inline deUint32 mergeLayoutFlags (deUint32 prevFlags, deUint32 newFlags)
+{
+	const deUint32	packingMask		= LAYOUT_STD430|LAYOUT_STD140;
+	const deUint32	matrixMask		= LAYOUT_ROW_MAJOR|LAYOUT_COLUMN_MAJOR;
+
+	deUint32 mergedFlags = 0;
+
+	mergedFlags |= ((newFlags & packingMask)	? newFlags : prevFlags) & packingMask;
+	mergedFlags |= ((newFlags & matrixMask)		? newFlags : prevFlags) & matrixMask;
+
+	return mergedFlags;
+}
+
+//! Appends all child elements to layout, returns value that should be appended to offset.
+int computeReferenceLayout (
+	BufferLayout&		layout,
+	int					curBlockNdx,
+	int					baseOffset,
+	const std::string&	curPrefix,
+	const VarType&		type,
+	deUint32			layoutFlags)
+{
+	// Reference layout uses std430 rules by default. std140 rules are
+	// choosen only for blocks that have std140 layout.
+	const bool	isStd140			= (layoutFlags & LAYOUT_STD140) != 0;
+	const int	baseAlignment		= isStd140 ? computeStd140BaseAlignment(type, layoutFlags)
+											   : computeStd430BaseAlignment(type, layoutFlags);
+	int			curOffset			= deAlign32(baseOffset, baseAlignment);
+	const int	topLevelArraySize	= 1; // Default values
+	const int	topLevelArrayStride	= 0;
+
+	if (type.isBasicType())
+	{
+		const glu::DataType		basicType	= type.getBasicType();
+		BufferVarLayoutEntry	entry;
+
+		entry.name					= curPrefix;
+		entry.type					= basicType;
+		entry.arraySize				= 1;
+		entry.arrayStride			= 0;
+		entry.matrixStride			= 0;
+		entry.topLevelArraySize		= topLevelArraySize;
+		entry.topLevelArrayStride	= topLevelArrayStride;
+		entry.blockNdx				= curBlockNdx;
+
+		if (glu::isDataTypeMatrix(basicType))
+		{
+			// Array of vectors as specified in rules 5 & 7.
+			const bool	isRowMajor			= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			const int	numVecs				= isRowMajor ? glu::getDataTypeMatrixNumRows(basicType)
+														 : glu::getDataTypeMatrixNumColumns(basicType);
+
+			entry.offset		= curOffset;
+			entry.matrixStride	= baseAlignment;
+			entry.isRowMajor	= isRowMajor;
+
+			curOffset += numVecs*baseAlignment;
+		}
+		else
+		{
+			// Scalar or vector.
+			entry.offset = curOffset;
+
+			curOffset += getDataTypeByteSize(basicType);
+		}
+
+		layout.bufferVars.push_back(entry);
+	}
+	else if (type.isArrayType())
+	{
+		const VarType&	elemType	= type.getElementType();
+
+		if (elemType.isBasicType() && !glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of scalars or vectors.
+			const glu::DataType		elemBasicType	= elemType.getBasicType();
+			const int				stride			= baseAlignment;
+			BufferVarLayoutEntry	entry;
+
+			entry.name					= curPrefix + "[0]"; // Array variables are always postfixed with [0]
+			entry.type					= elemBasicType;
+			entry.blockNdx				= curBlockNdx;
+			entry.offset				= curOffset;
+			entry.arraySize				= type.getArraySize();
+			entry.arrayStride			= stride;
+			entry.matrixStride			= 0;
+			entry.topLevelArraySize		= topLevelArraySize;
+			entry.topLevelArrayStride	= topLevelArrayStride;
+
+			curOffset += stride*type.getArraySize();
+
+			layout.bufferVars.push_back(entry);
+		}
+		else if (elemType.isBasicType() && glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of matrices.
+			const glu::DataType			elemBasicType	= elemType.getBasicType();
+			const bool					isRowMajor		= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			const int					numVecs			= isRowMajor ? glu::getDataTypeMatrixNumRows(elemBasicType)
+																	 : glu::getDataTypeMatrixNumColumns(elemBasicType);
+			const int					vecStride		= baseAlignment;
+			BufferVarLayoutEntry		entry;
+
+			entry.name					= curPrefix + "[0]"; // Array variables are always postfixed with [0]
+			entry.type					= elemBasicType;
+			entry.blockNdx				= curBlockNdx;
+			entry.offset				= curOffset;
+			entry.arraySize				= type.getArraySize();
+			entry.arrayStride			= vecStride*numVecs;
+			entry.matrixStride			= vecStride;
+			entry.isRowMajor			= isRowMajor;
+			entry.topLevelArraySize		= topLevelArraySize;
+			entry.topLevelArrayStride	= topLevelArrayStride;
+
+			curOffset += numVecs*vecStride*type.getArraySize();
+
+			layout.bufferVars.push_back(entry);
+		}
+		else
+		{
+			DE_ASSERT(elemType.isStructType() || elemType.isArrayType());
+
+			for (int elemNdx = 0; elemNdx < type.getArraySize(); elemNdx++)
+				curOffset += computeReferenceLayout(layout, curBlockNdx, curOffset, curPrefix + "[" + de::toString(elemNdx) + "]", type.getElementType(), layoutFlags);
+		}
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		for (StructType::ConstIterator memberIter = type.getStructPtr()->begin(); memberIter != type.getStructPtr()->end(); memberIter++)
+			curOffset += computeReferenceLayout(layout, curBlockNdx, curOffset, curPrefix + "." + memberIter->getName(), memberIter->getType(), layoutFlags);
+
+		curOffset = deAlign32(curOffset, baseAlignment);
+	}
+
+	return curOffset-baseOffset;
+}
+
+//! Appends all child elements to layout, returns offset increment.
+int computeReferenceLayout (BufferLayout& layout, int curBlockNdx, const std::string& blockPrefix, int baseOffset, const BufferVar& bufVar, deUint32 blockLayoutFlags)
+{
+	const VarType&	varType			= bufVar.getType();
+	const deUint32	combinedFlags	= mergeLayoutFlags(blockLayoutFlags, bufVar.getFlags());
+
+	if (varType.isArrayType())
+	{
+		// Top-level arrays need special care.
+		const int		topLevelArraySize	= varType.getArraySize() == VarType::UNSIZED_ARRAY ? 0 : varType.getArraySize();
+		const string	prefix				= blockPrefix + bufVar.getName() + "[0]";
+		const bool		isStd140			= (blockLayoutFlags & LAYOUT_STD140) != 0;
+		const int		vec4Align			= (int)sizeof(deUint32)*4;
+		const int		baseAlignment		= isStd140 ? computeStd140BaseAlignment(varType, combinedFlags)
+													   : computeStd430BaseAlignment(varType, combinedFlags);
+		int				curOffset			= deAlign32(baseOffset, baseAlignment);
+		const VarType&	elemType			= varType.getElementType();
+
+		if (elemType.isBasicType() && !glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of scalars or vectors.
+			const glu::DataType		elemBasicType	= elemType.getBasicType();
+			const int				elemBaseAlign	= getDataTypeByteAlignment(elemBasicType);
+			const int				stride			= isStd140 ? deAlign32(elemBaseAlign, vec4Align) : elemBaseAlign;
+			BufferVarLayoutEntry	entry;
+
+			entry.name					= prefix;
+			entry.topLevelArraySize		= 1;
+			entry.topLevelArrayStride	= 0;
+			entry.type					= elemBasicType;
+			entry.blockNdx				= curBlockNdx;
+			entry.offset				= curOffset;
+			entry.arraySize				= topLevelArraySize;
+			entry.arrayStride			= stride;
+			entry.matrixStride			= 0;
+
+			layout.bufferVars.push_back(entry);
+
+			curOffset += stride*topLevelArraySize;
+		}
+		else if (elemType.isBasicType() && glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of matrices.
+			const glu::DataType		elemBasicType	= elemType.getBasicType();
+			const bool				isRowMajor		= !!(combinedFlags & LAYOUT_ROW_MAJOR);
+			const int				vecSize			= isRowMajor ? glu::getDataTypeMatrixNumColumns(elemBasicType)
+																 : glu::getDataTypeMatrixNumRows(elemBasicType);
+			const int				numVecs			= isRowMajor ? glu::getDataTypeMatrixNumRows(elemBasicType)
+																 : glu::getDataTypeMatrixNumColumns(elemBasicType);
+			const glu::DataType		vecType			= glu::getDataTypeFloatVec(vecSize);
+			const int				vecBaseAlign	= getDataTypeByteAlignment(vecType);
+			const int				stride			= isStd140 ? deAlign32(vecBaseAlign, vec4Align) : vecBaseAlign;
+			BufferVarLayoutEntry	entry;
+
+			entry.name					= prefix;
+			entry.topLevelArraySize		= 1;
+			entry.topLevelArrayStride	= 0;
+			entry.type					= elemBasicType;
+			entry.blockNdx				= curBlockNdx;
+			entry.offset				= curOffset;
+			entry.arraySize				= topLevelArraySize;
+			entry.arrayStride			= stride*numVecs;
+			entry.matrixStride			= stride;
+			entry.isRowMajor			= isRowMajor;
+
+			layout.bufferVars.push_back(entry);
+
+			curOffset += stride*numVecs*topLevelArraySize;
+		}
+		else
+		{
+			DE_ASSERT(elemType.isStructType() || elemType.isArrayType());
+
+			// Struct base alignment is not added multiple times as curOffset supplied to computeReferenceLayout
+			// was already aligned correctly. Thus computeReferenceLayout should not add any extra padding
+			// before struct. Padding after struct will be added as it should.
+			//
+			// Stride could be computed prior to creating child elements, but it would essentially require running
+			// the layout computation twice. Instead we fix stride to child elements afterwards.
+
+			const int	firstChildNdx	= (int)layout.bufferVars.size();
+			const int	stride			= computeReferenceLayout(layout, curBlockNdx, curOffset, prefix, varType.getElementType(), combinedFlags);
+
+			for (int childNdx = firstChildNdx; childNdx < (int)layout.bufferVars.size(); childNdx++)
+			{
+				layout.bufferVars[childNdx].topLevelArraySize	= topLevelArraySize;
+				layout.bufferVars[childNdx].topLevelArrayStride	= stride;
+			}
+
+			curOffset += stride*topLevelArraySize;
+		}
+
+		return curOffset-baseOffset;
+	}
+	else
+		return computeReferenceLayout(layout, curBlockNdx, baseOffset, blockPrefix + bufVar.getName(), varType, combinedFlags);
+}
+
+void computeReferenceLayout (BufferLayout& layout, const ShaderInterface& interface)
+{
+	int numBlocks = interface.getNumBlocks();
+
+	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		const BufferBlock&	block			= interface.getBlock(blockNdx);
+		bool				hasInstanceName	= block.getInstanceName() != DE_NULL;
+		std::string			blockPrefix		= hasInstanceName ? (std::string(block.getBlockName()) + ".") : std::string("");
+		int					curOffset		= 0;
+		int					activeBlockNdx	= (int)layout.blocks.size();
+		int					firstVarNdx		= (int)layout.bufferVars.size();
+
+		for (BufferBlock::const_iterator varIter = block.begin(); varIter != block.end(); varIter++)
+		{
+			const BufferVar& bufVar = *varIter;
+			curOffset += computeReferenceLayout(layout, activeBlockNdx,  blockPrefix, curOffset, bufVar, block.getFlags());
+		}
+
+		int	varIndicesEnd	= (int)layout.bufferVars.size();
+		int	blockSize		= curOffset;
+		int	numInstances	= block.isArray() ? block.getArraySize() : 1;
+
+		// Create block layout entries for each instance.
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			// Allocate entry for instance.
+			layout.blocks.push_back(BlockLayoutEntry());
+			BlockLayoutEntry& blockEntry = layout.blocks.back();
+
+			blockEntry.name = block.getBlockName();
+			blockEntry.size = blockSize;
+
+			// Compute active variable set for block.
+			for (int varNdx = firstVarNdx; varNdx < varIndicesEnd; varNdx++)
+				blockEntry.activeVarIndices.push_back(varNdx);
+
+			if (block.isArray())
+				blockEntry.name += "[" + de::toString(instanceNdx) + "]";
+		}
+	}
+}
+
+// Value generator.
+
+void generateValue (const BufferVarLayoutEntry& entry, int unsizedArraySize, void* basePtr, de::Random& rnd)
+{
+	const glu::DataType	scalarType		= glu::getDataTypeScalarType(entry.type);
+	const int			scalarSize		= glu::getDataTypeScalarSize(entry.type);
+	const int			arraySize		= entry.arraySize == 0 ? unsizedArraySize : entry.arraySize;
+	const int			arrayStride		= entry.arrayStride;
+	const int			topLevelSize	= entry.topLevelArraySize == 0 ? unsizedArraySize : entry.topLevelArraySize;
+	const int			topLevelStride	= entry.topLevelArrayStride;
+	const bool			isMatrix		= glu::isDataTypeMatrix(entry.type);
+	const int			numVecs			= isMatrix ? (entry.isRowMajor ? glu::getDataTypeMatrixNumRows(entry.type) : glu::getDataTypeMatrixNumColumns(entry.type)) : 1;
+	const int			vecSize			= scalarSize / numVecs;
+	const int			compSize		= sizeof(deUint32);
+
+	DE_ASSERT(scalarSize%numVecs == 0);
+	DE_ASSERT(topLevelSize >= 0);
+	DE_ASSERT(arraySize >= 0);
+
+	for (int topElemNdx = 0; topElemNdx < topLevelSize; topElemNdx++)
+	{
+		deUint8* const topElemPtr = (deUint8*)basePtr + entry.offset + topElemNdx*topLevelStride;
+
+		for (int elemNdx = 0; elemNdx < arraySize; elemNdx++)
+		{
+			deUint8* const elemPtr = topElemPtr + elemNdx*arrayStride;
+
+			for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+			{
+				deUint8* const vecPtr = elemPtr + (isMatrix ? vecNdx*entry.matrixStride : 0);
+
+				for (int compNdx = 0; compNdx < vecSize; compNdx++)
+				{
+					deUint8* const compPtr = vecPtr + compSize*compNdx;
+
+					switch (scalarType)
+					{
+						case glu::TYPE_FLOAT:	*((float*)compPtr)		= (float)rnd.getInt(-9, 9);						break;
+						case glu::TYPE_INT:		*((int*)compPtr)		= rnd.getInt(-9, 9);							break;
+						case glu::TYPE_UINT:	*((deUint32*)compPtr)	= (deUint32)rnd.getInt(0, 9);					break;
+						// \note Random bit pattern is used for true values. Spec states that all non-zero values are
+						//       interpreted as true but some implementations fail this.
+						case glu::TYPE_BOOL:	*((deUint32*)compPtr)	= rnd.getBool() ? rnd.getUint32()|1u : 0u;		break;
+						default:
+							DE_ASSERT(false);
+					}
+				}
+			}
+		}
+	}
+}
+
+void generateValues (const BufferLayout& layout, const vector<BlockDataPtr>& blockPointers, deUint32 seed)
+{
+	de::Random	rnd			(seed);
+	const int	numBlocks	= (int)layout.blocks.size();
+
+	DE_ASSERT(numBlocks == (int)blockPointers.size());
+
+	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		const BlockLayoutEntry&	blockLayout	= layout.blocks[blockNdx];
+		const BlockDataPtr&		blockPtr	= blockPointers[blockNdx];
+		const int				numEntries	= (int)layout.blocks[blockNdx].activeVarIndices.size();
+
+		for (int entryNdx = 0; entryNdx < numEntries; entryNdx++)
+		{
+			const int					varNdx		= blockLayout.activeVarIndices[entryNdx];
+			const BufferVarLayoutEntry&	varEntry	= layout.bufferVars[varNdx];
+
+			generateValue(varEntry, blockPtr.lastUnsizedArraySize, blockPtr.ptr, rnd);
+		}
+	}
+}
+
+// Shader generator.
+
+const char* getCompareFuncForType (glu::DataType type)
+{
+	switch (type)
+	{
+		case glu::TYPE_FLOAT:			return "bool compare_float    (highp float a, highp float b)  { return abs(a - b) < 0.05; }\n";
+		case glu::TYPE_FLOAT_VEC2:		return "bool compare_vec2     (highp vec2 a, highp vec2 b)    { return compare_float(a.x, b.x)&&compare_float(a.y, b.y); }\n";
+		case glu::TYPE_FLOAT_VEC3:		return "bool compare_vec3     (highp vec3 a, highp vec3 b)    { return compare_float(a.x, b.x)&&compare_float(a.y, b.y)&&compare_float(a.z, b.z); }\n";
+		case glu::TYPE_FLOAT_VEC4:		return "bool compare_vec4     (highp vec4 a, highp vec4 b)    { return compare_float(a.x, b.x)&&compare_float(a.y, b.y)&&compare_float(a.z, b.z)&&compare_float(a.w, b.w); }\n";
+		case glu::TYPE_FLOAT_MAT2:		return "bool compare_mat2     (highp mat2 a, highp mat2 b)    { return compare_vec2(a[0], b[0])&&compare_vec2(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT2X3:	return "bool compare_mat2x3   (highp mat2x3 a, highp mat2x3 b){ return compare_vec3(a[0], b[0])&&compare_vec3(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT2X4:	return "bool compare_mat2x4   (highp mat2x4 a, highp mat2x4 b){ return compare_vec4(a[0], b[0])&&compare_vec4(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT3X2:	return "bool compare_mat3x2   (highp mat3x2 a, highp mat3x2 b){ return compare_vec2(a[0], b[0])&&compare_vec2(a[1], b[1])&&compare_vec2(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT3:		return "bool compare_mat3     (highp mat3 a, highp mat3 b)    { return compare_vec3(a[0], b[0])&&compare_vec3(a[1], b[1])&&compare_vec3(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT3X4:	return "bool compare_mat3x4   (highp mat3x4 a, highp mat3x4 b){ return compare_vec4(a[0], b[0])&&compare_vec4(a[1], b[1])&&compare_vec4(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT4X2:	return "bool compare_mat4x2   (highp mat4x2 a, highp mat4x2 b){ return compare_vec2(a[0], b[0])&&compare_vec2(a[1], b[1])&&compare_vec2(a[2], b[2])&&compare_vec2(a[3], b[3]); }\n";
+		case glu::TYPE_FLOAT_MAT4X3:	return "bool compare_mat4x3   (highp mat4x3 a, highp mat4x3 b){ return compare_vec3(a[0], b[0])&&compare_vec3(a[1], b[1])&&compare_vec3(a[2], b[2])&&compare_vec3(a[3], b[3]); }\n";
+		case glu::TYPE_FLOAT_MAT4:		return "bool compare_mat4     (highp mat4 a, highp mat4 b)    { return compare_vec4(a[0], b[0])&&compare_vec4(a[1], b[1])&&compare_vec4(a[2], b[2])&&compare_vec4(a[3], b[3]); }\n";
+		case glu::TYPE_INT:				return "bool compare_int      (highp int a, highp int b)      { return a == b; }\n";
+		case glu::TYPE_INT_VEC2:		return "bool compare_ivec2    (highp ivec2 a, highp ivec2 b)  { return a == b; }\n";
+		case glu::TYPE_INT_VEC3:		return "bool compare_ivec3    (highp ivec3 a, highp ivec3 b)  { return a == b; }\n";
+		case glu::TYPE_INT_VEC4:		return "bool compare_ivec4    (highp ivec4 a, highp ivec4 b)  { return a == b; }\n";
+		case glu::TYPE_UINT:			return "bool compare_uint     (highp uint a, highp uint b)    { return a == b; }\n";
+		case glu::TYPE_UINT_VEC2:		return "bool compare_uvec2    (highp uvec2 a, highp uvec2 b)  { return a == b; }\n";
+		case glu::TYPE_UINT_VEC3:		return "bool compare_uvec3    (highp uvec3 a, highp uvec3 b)  { return a == b; }\n";
+		case glu::TYPE_UINT_VEC4:		return "bool compare_uvec4    (highp uvec4 a, highp uvec4 b)  { return a == b; }\n";
+		case glu::TYPE_BOOL:			return "bool compare_bool     (bool a, bool b)                { return a == b; }\n";
+		case glu::TYPE_BOOL_VEC2:		return "bool compare_bvec2    (bvec2 a, bvec2 b)              { return a == b; }\n";
+		case glu::TYPE_BOOL_VEC3:		return "bool compare_bvec3    (bvec3 a, bvec3 b)              { return a == b; }\n";
+		case glu::TYPE_BOOL_VEC4:		return "bool compare_bvec4    (bvec4 a, bvec4 b)              { return a == b; }\n";
+		default:
+			DE_ASSERT(false);
+			return DE_NULL;
+	}
+}
+
+void getCompareDependencies (std::set<glu::DataType>& compareFuncs, glu::DataType basicType)
+{
+	switch (basicType)
+	{
+		case glu::TYPE_FLOAT_VEC2:
+		case glu::TYPE_FLOAT_VEC3:
+		case glu::TYPE_FLOAT_VEC4:
+			compareFuncs.insert(glu::TYPE_FLOAT);
+			compareFuncs.insert(basicType);
+			break;
+
+		case glu::TYPE_FLOAT_MAT2:
+		case glu::TYPE_FLOAT_MAT2X3:
+		case glu::TYPE_FLOAT_MAT2X4:
+		case glu::TYPE_FLOAT_MAT3X2:
+		case glu::TYPE_FLOAT_MAT3:
+		case glu::TYPE_FLOAT_MAT3X4:
+		case glu::TYPE_FLOAT_MAT4X2:
+		case glu::TYPE_FLOAT_MAT4X3:
+		case glu::TYPE_FLOAT_MAT4:
+			compareFuncs.insert(glu::TYPE_FLOAT);
+			compareFuncs.insert(glu::getDataTypeFloatVec(glu::getDataTypeMatrixNumRows(basicType)));
+			compareFuncs.insert(basicType);
+			break;
+
+		default:
+			compareFuncs.insert(basicType);
+			break;
+	}
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const VarType& type)
+{
+	if (type.isStructType())
+	{
+		for (StructType::ConstIterator iter = type.getStructPtr()->begin(); iter != type.getStructPtr()->end(); ++iter)
+			collectUniqueBasicTypes(basicTypes, iter->getType());
+	}
+	else if (type.isArrayType())
+		collectUniqueBasicTypes(basicTypes, type.getElementType());
+	else
+	{
+		DE_ASSERT(type.isBasicType());
+		basicTypes.insert(type.getBasicType());
+	}
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const BufferBlock& bufferBlock)
+{
+	for (BufferBlock::const_iterator iter = bufferBlock.begin(); iter != bufferBlock.end(); ++iter)
+		collectUniqueBasicTypes(basicTypes, iter->getType());
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const ShaderInterface& interface)
+{
+	for (int ndx = 0; ndx < interface.getNumBlocks(); ++ndx)
+		collectUniqueBasicTypes(basicTypes, interface.getBlock(ndx));
+}
+
+void generateCompareFuncs (std::ostream& str, const ShaderInterface& interface)
+{
+	std::set<glu::DataType> types;
+	std::set<glu::DataType> compareFuncs;
+
+	// Collect unique basic types
+	collectUniqueBasicTypes(types, interface);
+
+	// Set of compare functions required
+	for (std::set<glu::DataType>::const_iterator iter = types.begin(); iter != types.end(); ++iter)
+	{
+		getCompareDependencies(compareFuncs, *iter);
+	}
+
+	for (int type = 0; type < glu::TYPE_LAST; ++type)
+	{
+		if (compareFuncs.find(glu::DataType(type)) != compareFuncs.end())
+			str << getCompareFuncForType(glu::DataType(type));
+	}
+}
+
+struct Indent
+{
+	int level;
+	Indent (int level_) : level(level_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const Indent& indent)
+{
+	for (int i = 0; i < indent.level; i++)
+		str << "\t";
+	return str;
+}
+
+void generateDeclaration (std::ostream& src, const BufferVar& bufferVar, int indentLevel)
+{
+	// \todo [pyry] Qualifiers
+
+	if ((bufferVar.getFlags() & LAYOUT_MASK) != 0)
+		src << "layout(" << LayoutFlagsFmt(bufferVar.getFlags() & LAYOUT_MASK) << ") ";
+
+	src << glu::declare(bufferVar.getType(), bufferVar.getName(), indentLevel);
+}
+
+void generateDeclaration (std::ostream& src, const BufferBlock& block, int bindingPoint)
+{
+	src << "layout(";
+
+	if ((block.getFlags() & LAYOUT_MASK) != 0)
+		src << LayoutFlagsFmt(block.getFlags() & LAYOUT_MASK) << ", ";
+
+	src << "binding = " << bindingPoint;
+
+	src << ") ";
+
+	src << "buffer " << block.getBlockName();
+	src << "\n{\n";
+
+	for (BufferBlock::const_iterator varIter = block.begin(); varIter != block.end(); varIter++)
+	{
+		src << Indent(1);
+		generateDeclaration(src, *varIter, 1 /* indent level */);
+		src << ";\n";
+	}
+
+	src << "}";
+
+	if (block.getInstanceName() != DE_NULL)
+	{
+		src << " " << block.getInstanceName();
+		if (block.isArray())
+			src << "[" << block.getArraySize() << "]";
+	}
+	else
+		DE_ASSERT(!block.isArray());
+
+	src << ";\n";
+}
+
+void generateImmMatrixSrc (std::ostream& src, glu::DataType basicType, int matrixStride, bool isRowMajor, const void* valuePtr)
+{
+	DE_ASSERT(glu::isDataTypeMatrix(basicType));
+
+	const int		compSize		= sizeof(deUint32);
+	const int		numRows			= glu::getDataTypeMatrixNumRows(basicType);
+	const int		numCols			= glu::getDataTypeMatrixNumColumns(basicType);
+
+	src << glu::getDataTypeName(basicType) << "(";
+
+	// Constructed in column-wise order.
+	for (int colNdx = 0; colNdx < numCols; colNdx++)
+	{
+		for (int rowNdx = 0; rowNdx < numRows; rowNdx++)
+		{
+			const deUint8*	compPtr	= (const deUint8*)valuePtr + (isRowMajor ? rowNdx*matrixStride + colNdx*compSize
+																				: colNdx*matrixStride + rowNdx*compSize);
+
+			if (colNdx > 0 || rowNdx > 0)
+				src << ", ";
+
+			src << de::floatToString(*((const float*)compPtr), 1);
+		}
+	}
+
+	src << ")";
+}
+
+void generateImmScalarVectorSrc (std::ostream& src, glu::DataType basicType, const void* valuePtr)
+{
+	DE_ASSERT(glu::isDataTypeFloatOrVec(basicType)	||
+			  glu::isDataTypeIntOrIVec(basicType)	||
+			  glu::isDataTypeUintOrUVec(basicType)	||
+			  glu::isDataTypeBoolOrBVec(basicType));
+
+	const glu::DataType		scalarType		= glu::getDataTypeScalarType(basicType);
+	const int				scalarSize		= glu::getDataTypeScalarSize(basicType);
+	const int				compSize		= sizeof(deUint32);
+
+	if (scalarSize > 1)
+		src << glu::getDataTypeName(basicType) << "(";
+
+	for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+	{
+		const deUint8* compPtr = (const deUint8*)valuePtr + scalarNdx*compSize;
+
+		if (scalarNdx > 0)
+			src << ", ";
+
+		switch (scalarType)
+		{
+			case glu::TYPE_FLOAT:	src << de::floatToString(*((const float*)compPtr), 1);			break;
+			case glu::TYPE_INT:		src << *((const int*)compPtr);									break;
+			case glu::TYPE_UINT:	src << *((const deUint32*)compPtr) << "u";						break;
+			case glu::TYPE_BOOL:	src << (*((const deUint32*)compPtr) != 0u ? "true" : "false");	break;
+			default:
+				DE_ASSERT(false);
+		}
+	}
+
+	if (scalarSize > 1)
+		src << ")";
+}
+
+string getAPIName (const BufferBlock& block, const BufferVar& var, const glu::TypeComponentVector& accessPath)
+{
+	std::ostringstream name;
+
+	if (block.getInstanceName())
+		name << block.getBlockName() << ".";
+
+	name << var.getName();
+
+	for (glu::TypeComponentVector::const_iterator pathComp = accessPath.begin(); pathComp != accessPath.end(); pathComp++)
+	{
+		if (pathComp->type == glu::VarTypeComponent::STRUCT_MEMBER)
+		{
+			const VarType		curType		= glu::getVarType(var.getType(), accessPath.begin(), pathComp);
+			const StructType*	structPtr	= curType.getStructPtr();
+
+			name << "." << structPtr->getMember(pathComp->index).getName();
+		}
+		else if (pathComp->type == glu::VarTypeComponent::ARRAY_ELEMENT)
+		{
+			if (pathComp == accessPath.begin() || (pathComp+1) == accessPath.end())
+				name << "[0]"; // Top- / bottom-level array
+			else
+				name << "[" << pathComp->index << "]";
+		}
+		else
+			DE_ASSERT(false);
+	}
+
+	return name.str();
+}
+
+string getShaderName (const BufferBlock& block, int instanceNdx, const BufferVar& var, const glu::TypeComponentVector& accessPath)
+{
+	std::ostringstream name;
+
+	if (block.getInstanceName())
+	{
+		name << block.getInstanceName();
+
+		if (block.isArray())
+			name << "[" << instanceNdx << "]";
+
+		name << ".";
+	}
+	else
+		DE_ASSERT(instanceNdx == 0);
+
+	name << var.getName();
+
+	for (glu::TypeComponentVector::const_iterator pathComp = accessPath.begin(); pathComp != accessPath.end(); pathComp++)
+	{
+		if (pathComp->type == glu::VarTypeComponent::STRUCT_MEMBER)
+		{
+			const VarType		curType		= glu::getVarType(var.getType(), accessPath.begin(), pathComp);
+			const StructType*	structPtr	= curType.getStructPtr();
+
+			name << "." << structPtr->getMember(pathComp->index).getName();
+		}
+		else if (pathComp->type == glu::VarTypeComponent::ARRAY_ELEMENT)
+			name << "[" << pathComp->index << "]";
+		else
+			DE_ASSERT(false);
+	}
+
+	return name.str();
+}
+
+int computeOffset (const BufferVarLayoutEntry& varLayout, const glu::TypeComponentVector& accessPath)
+{
+	const int	topLevelNdx		= (accessPath.size() > 1 && accessPath.front().type == glu::VarTypeComponent::ARRAY_ELEMENT) ? accessPath.front().index : 0;
+	const int	bottomLevelNdx	= (!accessPath.empty() && accessPath.back().type == glu::VarTypeComponent::ARRAY_ELEMENT) ? accessPath.back().index : 0;
+
+	return varLayout.offset + varLayout.topLevelArrayStride*topLevelNdx + varLayout.arrayStride*bottomLevelNdx;
+}
+
+void generateCompareSrc (
+	std::ostream&				src,
+	const char*					resultVar,
+	const BufferLayout&			bufferLayout,
+	const BufferBlock&			block,
+	int							instanceNdx,
+	const BlockDataPtr&			blockPtr,
+	const BufferVar&			bufVar,
+	const glu::SubTypeAccess&	accessPath)
+{
+	const VarType curType = accessPath.getType();
+
+	if (curType.isArrayType())
+	{
+		const int arraySize = curType.getArraySize() == VarType::UNSIZED_ARRAY ? block.getLastUnsizedArraySize(instanceNdx) : curType.getArraySize();
+
+		for (int elemNdx = 0; elemNdx < arraySize; elemNdx++)
+			generateCompareSrc(src, resultVar, bufferLayout, block, instanceNdx, blockPtr, bufVar, accessPath.element(elemNdx));
+	}
+	else if (curType.isStructType())
+	{
+		const int numMembers = curType.getStructPtr()->getNumMembers();
+
+		for (int memberNdx = 0; memberNdx < numMembers; memberNdx++)
+			generateCompareSrc(src, resultVar, bufferLayout, block, instanceNdx, blockPtr, bufVar, accessPath.member(memberNdx));
+	}
+	else
+	{
+		DE_ASSERT(curType.isBasicType());
+
+		const string	apiName	= getAPIName(block, bufVar, accessPath.getPath());
+		const int		varNdx	= bufferLayout.getVariableIndex(apiName);
+
+		DE_ASSERT(varNdx >= 0);
+		{
+			const BufferVarLayoutEntry&	varLayout		= bufferLayout.bufferVars[varNdx];
+			const string				shaderName		= getShaderName(block, instanceNdx, bufVar, accessPath.getPath());
+			const glu::DataType			basicType		= curType.getBasicType();
+			const bool					isMatrix		= glu::isDataTypeMatrix(basicType);
+			const char*					typeName		= glu::getDataTypeName(basicType);
+			const void*					valuePtr		= (const deUint8*)blockPtr.ptr + computeOffset(varLayout, accessPath.getPath());
+
+			src << "\t" << resultVar << " = " << resultVar << " && compare_" << typeName << "(" << shaderName << ", ";
+
+			if (isMatrix)
+				generateImmMatrixSrc(src, basicType, varLayout.matrixStride, varLayout.isRowMajor, valuePtr);
+			else
+				generateImmScalarVectorSrc(src, basicType, valuePtr);
+
+			src << ");\n";
+		}
+	}
+}
+
+void generateCompareSrc (std::ostream& src, const char* resultVar, const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& blockPointers)
+{
+	for (int declNdx = 0; declNdx < interface.getNumBlocks(); declNdx++)
+	{
+		const BufferBlock&	block			= interface.getBlock(declNdx);
+		const bool			isArray			= block.isArray();
+		const int			numInstances	= isArray ? block.getArraySize() : 1;
+
+		DE_ASSERT(!isArray || block.getInstanceName());
+
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			const string		instanceName	= block.getBlockName() + (isArray ? "[" + de::toString(instanceNdx) + "]" : string(""));
+			const int			blockNdx		= layout.getBlockIndex(instanceName);
+			const BlockDataPtr&	blockPtr		= blockPointers[blockNdx];
+
+			for (BufferBlock::const_iterator varIter = block.begin(); varIter != block.end(); varIter++)
+			{
+				const BufferVar& bufVar = *varIter;
+
+				if ((bufVar.getFlags() & ACCESS_READ) == 0)
+					continue; // Don't read from that variable.
+
+				generateCompareSrc(src, resultVar, layout, block, instanceNdx, blockPtr, bufVar, glu::SubTypeAccess(bufVar.getType()));
+			}
+		}
+	}
+}
+
+// \todo [2013-10-14 pyry] Almost identical to generateCompareSrc - unify?
+
+void generateWriteSrc (
+	std::ostream&				src,
+	const BufferLayout&			bufferLayout,
+	const BufferBlock&			block,
+	int							instanceNdx,
+	const BlockDataPtr&			blockPtr,
+	const BufferVar&			bufVar,
+	const glu::SubTypeAccess&	accessPath)
+{
+	const VarType curType = accessPath.getType();
+
+	if (curType.isArrayType())
+	{
+		const int arraySize = curType.getArraySize() == VarType::UNSIZED_ARRAY ? block.getLastUnsizedArraySize(instanceNdx) : curType.getArraySize();
+
+		for (int elemNdx = 0; elemNdx < arraySize; elemNdx++)
+			generateWriteSrc(src, bufferLayout, block, instanceNdx, blockPtr, bufVar, accessPath.element(elemNdx));
+	}
+	else if (curType.isStructType())
+	{
+		const int numMembers = curType.getStructPtr()->getNumMembers();
+
+		for (int memberNdx = 0; memberNdx < numMembers; memberNdx++)
+			generateWriteSrc(src, bufferLayout, block, instanceNdx, blockPtr, bufVar, accessPath.member(memberNdx));
+	}
+	else
+	{
+		DE_ASSERT(curType.isBasicType());
+
+		const string	apiName	= getAPIName(block, bufVar, accessPath.getPath());
+		const int		varNdx	= bufferLayout.getVariableIndex(apiName);
+
+		DE_ASSERT(varNdx >= 0);
+		{
+			const BufferVarLayoutEntry&	varLayout		= bufferLayout.bufferVars[varNdx];
+			const string				shaderName		= getShaderName(block, instanceNdx, bufVar, accessPath.getPath());
+			const glu::DataType			basicType		= curType.getBasicType();
+			const bool					isMatrix		= glu::isDataTypeMatrix(basicType);
+			const void*					valuePtr		= (const deUint8*)blockPtr.ptr + computeOffset(varLayout, accessPath.getPath());
+
+			src << "\t" << shaderName << " = ";
+
+			if (isMatrix)
+				generateImmMatrixSrc(src, basicType, varLayout.matrixStride, varLayout.isRowMajor, valuePtr);
+			else
+				generateImmScalarVectorSrc(src, basicType, valuePtr);
+
+			src << ";\n";
+		}
+	}
+}
+
+void generateWriteSrc (std::ostream& src, const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& blockPointers)
+{
+	for (int declNdx = 0; declNdx < interface.getNumBlocks(); declNdx++)
+	{
+		const BufferBlock&	block			= interface.getBlock(declNdx);
+		const bool			isArray			= block.isArray();
+		const int			numInstances	= isArray ? block.getArraySize() : 1;
+
+		DE_ASSERT(!isArray || block.getInstanceName());
+
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			const string		instanceName	= block.getBlockName() + (isArray ? "[" + de::toString(instanceNdx) + "]" : string(""));
+			const int			blockNdx		= layout.getBlockIndex(instanceName);
+			const BlockDataPtr&	blockPtr		= blockPointers[blockNdx];
+
+			for (BufferBlock::const_iterator varIter = block.begin(); varIter != block.end(); varIter++)
+			{
+				const BufferVar& bufVar = *varIter;
+
+				if ((bufVar.getFlags() & ACCESS_WRITE) == 0)
+					continue; // Don't write to that variable.
+
+				generateWriteSrc(src, layout, block, instanceNdx, blockPtr, bufVar, glu::SubTypeAccess(bufVar.getType()));
+			}
+		}
+	}
+}
+
+string generateComputeShader (const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& comparePtrs, const vector<BlockDataPtr>& writePtrs)
+{
+	std::ostringstream src;
+
+	src << "#version 450\n";
+	src << "layout(local_size_x = 1) in;\n";
+	src << "\n";
+
+	// Atomic counter for counting passed invocations.
+	src << "layout(std140, binding = 0) buffer AcBlock { highp uint ac_numPassed; };\n\n";
+
+	std::vector<const StructType*> namedStructs;
+	interface.getNamedStructs(namedStructs);
+	for (std::vector<const StructType*>::const_iterator structIter = namedStructs.begin(); structIter != namedStructs.end(); structIter++)
+		src << glu::declare(*structIter) << ";\n";
+
+	{
+		int bindingPoint = 1;
+
+		for (int blockNdx = 0; blockNdx < interface.getNumBlocks(); blockNdx++)
+		{
+			const BufferBlock& block = interface.getBlock(blockNdx);
+			generateDeclaration(src, block, bindingPoint);
+
+			bindingPoint += block.isArray() ? block.getArraySize() : 1;
+		}
+	}
+
+	// Comparison utilities.
+	src << "\n";
+	generateCompareFuncs(src, interface);
+
+	src << "\n"
+		   "void main (void)\n"
+		   "{\n"
+		   "	bool allOk = true;\n";
+
+	// Value compare.
+	generateCompareSrc(src, "allOk", interface, layout, comparePtrs);
+
+	src << "	if (allOk)\n"
+		<< "		ac_numPassed++;\n"
+		<< "\n";
+
+	// Value write.
+	generateWriteSrc(src, interface, layout, writePtrs);
+
+	src << "}\n";
+
+	return src.str();
+}
+
+void copyBufferVarData (const BufferVarLayoutEntry& dstEntry, const BlockDataPtr& dstBlockPtr, const BufferVarLayoutEntry& srcEntry, const BlockDataPtr& srcBlockPtr)
+{
+	DE_ASSERT(dstEntry.arraySize <= srcEntry.arraySize);
+	DE_ASSERT(dstEntry.topLevelArraySize <= srcEntry.topLevelArraySize);
+	DE_ASSERT(dstBlockPtr.lastUnsizedArraySize <= srcBlockPtr.lastUnsizedArraySize);
+	DE_ASSERT(dstEntry.type == srcEntry.type);
+
+	deUint8* const			dstBasePtr			= (deUint8*)dstBlockPtr.ptr + dstEntry.offset;
+	const deUint8* const	srcBasePtr			= (const deUint8*)srcBlockPtr.ptr + srcEntry.offset;
+	const int				scalarSize			= glu::getDataTypeScalarSize(dstEntry.type);
+	const bool				isMatrix			= glu::isDataTypeMatrix(dstEntry.type);
+	const int				compSize			= sizeof(deUint32);
+	const int				dstArraySize		= dstEntry.arraySize == 0 ? dstBlockPtr.lastUnsizedArraySize : dstEntry.arraySize;
+	const int				dstArrayStride		= dstEntry.arrayStride;
+	const int				dstTopLevelSize		= dstEntry.topLevelArraySize == 0 ? dstBlockPtr.lastUnsizedArraySize : dstEntry.topLevelArraySize;
+	const int				dstTopLevelStride	= dstEntry.topLevelArrayStride;
+	const int				srcArraySize		= srcEntry.arraySize == 0 ? srcBlockPtr.lastUnsizedArraySize : srcEntry.arraySize;
+	const int				srcArrayStride		= srcEntry.arrayStride;
+	const int				srcTopLevelSize		= srcEntry.topLevelArraySize == 0 ? srcBlockPtr.lastUnsizedArraySize : srcEntry.topLevelArraySize;
+	const int				srcTopLevelStride	= srcEntry.topLevelArrayStride;
+
+	DE_ASSERT(dstArraySize <= srcArraySize && dstTopLevelSize <= srcTopLevelSize);
+	DE_UNREF(srcArraySize && srcTopLevelSize);
+
+	for (int topElemNdx = 0; topElemNdx < dstTopLevelSize; topElemNdx++)
+	{
+		deUint8* const			dstTopPtr	= dstBasePtr + topElemNdx*dstTopLevelStride;
+		const deUint8* const	srcTopPtr	= srcBasePtr + topElemNdx*srcTopLevelStride;
+
+		for (int elementNdx = 0; elementNdx < dstArraySize; elementNdx++)
+		{
+			deUint8* const			dstElemPtr	= dstTopPtr + elementNdx*dstArrayStride;
+			const deUint8* const	srcElemPtr	= srcTopPtr + elementNdx*srcArrayStride;
+
+			if (isMatrix)
+			{
+				const int	numRows	= glu::getDataTypeMatrixNumRows(dstEntry.type);
+				const int	numCols	= glu::getDataTypeMatrixNumColumns(dstEntry.type);
+
+				for (int colNdx = 0; colNdx < numCols; colNdx++)
+				{
+					for (int rowNdx = 0; rowNdx < numRows; rowNdx++)
+					{
+						deUint8*		dstCompPtr	= dstElemPtr + (dstEntry.isRowMajor ? rowNdx*dstEntry.matrixStride + colNdx*compSize
+																						: colNdx*dstEntry.matrixStride + rowNdx*compSize);
+						const deUint8*	srcCompPtr	= srcElemPtr + (srcEntry.isRowMajor ? rowNdx*srcEntry.matrixStride + colNdx*compSize
+																						: colNdx*srcEntry.matrixStride + rowNdx*compSize);
+
+						DE_ASSERT((deIntptr)(srcCompPtr + compSize) - (deIntptr)srcBlockPtr.ptr <= (deIntptr)srcBlockPtr.size);
+						DE_ASSERT((deIntptr)(dstCompPtr + compSize) - (deIntptr)dstBlockPtr.ptr <= (deIntptr)dstBlockPtr.size);
+						deMemcpy(dstCompPtr, srcCompPtr, compSize);
+					}
+				}
+			}
+			else
+			{
+				DE_ASSERT((deIntptr)(srcElemPtr + scalarSize*compSize) - (deIntptr)srcBlockPtr.ptr <= (deIntptr)srcBlockPtr.size);
+				DE_ASSERT((deIntptr)(dstElemPtr + scalarSize*compSize) - (deIntptr)dstBlockPtr.ptr <= (deIntptr)dstBlockPtr.size);
+				deMemcpy(dstElemPtr, srcElemPtr, scalarSize*compSize);
+			}
+		}
+	}
+}
+
+void copyData (const BufferLayout& dstLayout, const vector<BlockDataPtr>& dstBlockPointers, const BufferLayout& srcLayout, const vector<BlockDataPtr>& srcBlockPointers)
+{
+	// \note Src layout is used as reference in case of activeVarIndices happens to be incorrect in dstLayout blocks.
+	int numBlocks = (int)srcLayout.blocks.size();
+
+	for (int srcBlockNdx = 0; srcBlockNdx < numBlocks; srcBlockNdx++)
+	{
+		const BlockLayoutEntry&		srcBlock	= srcLayout.blocks[srcBlockNdx];
+		const BlockDataPtr&			srcBlockPtr	= srcBlockPointers[srcBlockNdx];
+		int							dstBlockNdx	= dstLayout.getBlockIndex(srcBlock.name.c_str());
+
+		if (dstBlockNdx >= 0)
+		{
+			DE_ASSERT(de::inBounds(dstBlockNdx, 0, (int)dstBlockPointers.size()));
+
+			const BlockDataPtr& dstBlockPtr = dstBlockPointers[dstBlockNdx];
+
+			for (vector<int>::const_iterator srcVarNdxIter = srcBlock.activeVarIndices.begin(); srcVarNdxIter != srcBlock.activeVarIndices.end(); srcVarNdxIter++)
+			{
+				const BufferVarLayoutEntry&	srcEntry	= srcLayout.bufferVars[*srcVarNdxIter];
+				int							dstVarNdx	= dstLayout.getVariableIndex(srcEntry.name.c_str());
+
+				if (dstVarNdx >= 0)
+					copyBufferVarData(dstLayout.bufferVars[dstVarNdx], dstBlockPtr, srcEntry, srcBlockPtr);
+			}
+		}
+	}
+}
+
+void copyNonWrittenData (
+	const BufferLayout&			layout,
+	const BufferBlock&			block,
+	int							instanceNdx,
+	const BlockDataPtr&			srcBlockPtr,
+	const BlockDataPtr&			dstBlockPtr,
+	const BufferVar&			bufVar,
+	const glu::SubTypeAccess&	accessPath)
+{
+	const VarType curType = accessPath.getType();
+
+	if (curType.isArrayType())
+	{
+		const int arraySize = curType.getArraySize() == VarType::UNSIZED_ARRAY ? block.getLastUnsizedArraySize(instanceNdx) : curType.getArraySize();
+
+		for (int elemNdx = 0; elemNdx < arraySize; elemNdx++)
+			copyNonWrittenData(layout, block, instanceNdx, srcBlockPtr, dstBlockPtr, bufVar, accessPath.element(elemNdx));
+	}
+	else if (curType.isStructType())
+	{
+		const int numMembers = curType.getStructPtr()->getNumMembers();
+
+		for (int memberNdx = 0; memberNdx < numMembers; memberNdx++)
+			copyNonWrittenData(layout, block, instanceNdx, srcBlockPtr, dstBlockPtr, bufVar, accessPath.member(memberNdx));
+	}
+	else
+	{
+		DE_ASSERT(curType.isBasicType());
+
+		const string	apiName	= getAPIName(block, bufVar, accessPath.getPath());
+		const int		varNdx	= layout.getVariableIndex(apiName);
+
+		DE_ASSERT(varNdx >= 0);
+		{
+			const BufferVarLayoutEntry& varLayout = layout.bufferVars[varNdx];
+			copyBufferVarData(varLayout, dstBlockPtr, varLayout, srcBlockPtr);
+		}
+	}
+}
+
+void copyNonWrittenData (const ShaderInterface& interface, const BufferLayout& layout, const vector<BlockDataPtr>& srcPtrs, const vector<BlockDataPtr>& dstPtrs)
+{
+	for (int declNdx = 0; declNdx < interface.getNumBlocks(); declNdx++)
+	{
+		const BufferBlock&	block			= interface.getBlock(declNdx);
+		const bool			isArray			= block.isArray();
+		const int			numInstances	= isArray ? block.getArraySize() : 1;
+
+		DE_ASSERT(!isArray || block.getInstanceName());
+
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			const string		instanceName	= block.getBlockName() + (isArray ? "[" + de::toString(instanceNdx) + "]" : string(""));
+			const int			blockNdx		= layout.getBlockIndex(instanceName);
+			const BlockDataPtr&	srcBlockPtr		= srcPtrs[blockNdx];
+			const BlockDataPtr&	dstBlockPtr		= dstPtrs[blockNdx];
+
+			for (BufferBlock::const_iterator varIter = block.begin(); varIter != block.end(); varIter++)
+			{
+				const BufferVar& bufVar = *varIter;
+
+				if (bufVar.getFlags() & ACCESS_WRITE)
+					continue;
+
+				copyNonWrittenData(layout, block, instanceNdx, srcBlockPtr, dstBlockPtr, bufVar, glu::SubTypeAccess(bufVar.getType()));
+			}
+		}
+	}
+}
+
+bool compareComponents (glu::DataType scalarType, const void* ref, const void* res, int numComps)
+{
+	if (scalarType == glu::TYPE_FLOAT)
+	{
+		const float threshold = 0.05f; // Same as used in shaders - should be fine for values being used.
+
+		for (int ndx = 0; ndx < numComps; ndx++)
+		{
+			const float		refVal		= *((const float*)ref + ndx);
+			const float		resVal		= *((const float*)res + ndx);
+
+			if (deFloatAbs(resVal - refVal) >= threshold)
+				return false;
+		}
+	}
+	else if (scalarType == glu::TYPE_BOOL)
+	{
+		for (int ndx = 0; ndx < numComps; ndx++)
+		{
+			const deUint32	refVal		= *((const deUint32*)ref + ndx);
+			const deUint32	resVal		= *((const deUint32*)res + ndx);
+
+			if ((refVal != 0) != (resVal != 0))
+				return false;
+		}
+	}
+	else
+	{
+		DE_ASSERT(scalarType == glu::TYPE_INT || scalarType == glu::TYPE_UINT);
+
+		for (int ndx = 0; ndx < numComps; ndx++)
+		{
+			const deUint32	refVal		= *((const deUint32*)ref + ndx);
+			const deUint32	resVal		= *((const deUint32*)res + ndx);
+
+			if (refVal != resVal)
+				return false;
+		}
+	}
+
+	return true;
+}
+
+bool compareBufferVarData (tcu::TestLog& log, const BufferVarLayoutEntry& refEntry, const BlockDataPtr& refBlockPtr, const BufferVarLayoutEntry& resEntry, const BlockDataPtr& resBlockPtr)
+{
+	DE_ASSERT(resEntry.arraySize <= refEntry.arraySize);
+	DE_ASSERT(resEntry.topLevelArraySize <= refEntry.topLevelArraySize);
+	DE_ASSERT(resBlockPtr.lastUnsizedArraySize <= refBlockPtr.lastUnsizedArraySize);
+	DE_ASSERT(resEntry.type == refEntry.type);
+
+	deUint8* const			resBasePtr			= (deUint8*)resBlockPtr.ptr + resEntry.offset;
+	const deUint8* const	refBasePtr			= (const deUint8*)refBlockPtr.ptr + refEntry.offset;
+	const glu::DataType		scalarType			= glu::getDataTypeScalarType(refEntry.type);
+	const int				scalarSize			= glu::getDataTypeScalarSize(resEntry.type);
+	const bool				isMatrix			= glu::isDataTypeMatrix(resEntry.type);
+	const int				compSize			= sizeof(deUint32);
+	const int				maxPrints			= 3;
+	int						numFailed			= 0;
+
+	const int				resArraySize		= resEntry.arraySize == 0 ? resBlockPtr.lastUnsizedArraySize : resEntry.arraySize;
+	const int				resArrayStride		= resEntry.arrayStride;
+	const int				resTopLevelSize		= resEntry.topLevelArraySize == 0 ? resBlockPtr.lastUnsizedArraySize : resEntry.topLevelArraySize;
+	const int				resTopLevelStride	= resEntry.topLevelArrayStride;
+	const int				refArraySize		= refEntry.arraySize == 0 ? refBlockPtr.lastUnsizedArraySize : refEntry.arraySize;
+	const int				refArrayStride		= refEntry.arrayStride;
+	const int				refTopLevelSize		= refEntry.topLevelArraySize == 0 ? refBlockPtr.lastUnsizedArraySize : refEntry.topLevelArraySize;
+	const int				refTopLevelStride	= refEntry.topLevelArrayStride;
+
+	DE_ASSERT(resArraySize <= refArraySize && resTopLevelSize <= refTopLevelSize);
+	DE_UNREF(refArraySize && refTopLevelSize);
+
+	for (int topElemNdx = 0; topElemNdx < resTopLevelSize; topElemNdx++)
+	{
+		deUint8* const			resTopPtr	= resBasePtr + topElemNdx*resTopLevelStride;
+		const deUint8* const	refTopPtr	= refBasePtr + topElemNdx*refTopLevelStride;
+
+		for (int elementNdx = 0; elementNdx < resArraySize; elementNdx++)
+		{
+			deUint8* const			resElemPtr	= resTopPtr + elementNdx*resArrayStride;
+			const deUint8* const	refElemPtr	= refTopPtr + elementNdx*refArrayStride;
+
+			if (isMatrix)
+			{
+				const int	numRows	= glu::getDataTypeMatrixNumRows(resEntry.type);
+				const int	numCols	= glu::getDataTypeMatrixNumColumns(resEntry.type);
+				bool		isOk	= true;
+
+				for (int colNdx = 0; colNdx < numCols; colNdx++)
+				{
+					for (int rowNdx = 0; rowNdx < numRows; rowNdx++)
+					{
+						deUint8*		resCompPtr	= resElemPtr + (resEntry.isRowMajor ? rowNdx*resEntry.matrixStride + colNdx*compSize
+																						: colNdx*resEntry.matrixStride + rowNdx*compSize);
+						const deUint8*	refCompPtr	= refElemPtr + (refEntry.isRowMajor ? rowNdx*refEntry.matrixStride + colNdx*compSize
+																						: colNdx*refEntry.matrixStride + rowNdx*compSize);
+
+						DE_ASSERT((deIntptr)(refCompPtr + compSize) - (deIntptr)refBlockPtr.ptr <= (deIntptr)refBlockPtr.size);
+						DE_ASSERT((deIntptr)(resCompPtr + compSize) - (deIntptr)resBlockPtr.ptr <= (deIntptr)resBlockPtr.size);
+
+						isOk = isOk && compareComponents(scalarType, resCompPtr, refCompPtr, 1);
+					}
+				}
+
+				if (!isOk)
+				{
+					numFailed += 1;
+					if (numFailed < maxPrints)
+					{
+						std::ostringstream expected, got;
+						generateImmMatrixSrc(expected, refEntry.type, refEntry.matrixStride, refEntry.isRowMajor, refElemPtr);
+						generateImmMatrixSrc(got, resEntry.type, resEntry.matrixStride, resEntry.isRowMajor, resElemPtr);
+						log << TestLog::Message << "ERROR: mismatch in " << refEntry.name << ", top-level ndx " << topElemNdx << ", bottom-level ndx " << elementNdx << ":\n"
+												<< "  expected " << expected.str() << "\n"
+												<< "  got " << got.str()
+							<< TestLog::EndMessage;
+					}
+				}
+			}
+			else
+			{
+				DE_ASSERT((deIntptr)(refElemPtr + scalarSize*compSize) - (deIntptr)refBlockPtr.ptr <= (deIntptr)refBlockPtr.size);
+				DE_ASSERT((deIntptr)(resElemPtr + scalarSize*compSize) - (deIntptr)resBlockPtr.ptr <= (deIntptr)resBlockPtr.size);
+
+				const bool isOk = compareComponents(scalarType, resElemPtr, refElemPtr, scalarSize);
+
+				if (!isOk)
+				{
+					numFailed += 1;
+					if (numFailed < maxPrints)
+					{
+						std::ostringstream expected, got;
+						generateImmScalarVectorSrc(expected, refEntry.type, refElemPtr);
+						generateImmScalarVectorSrc(got, resEntry.type, resElemPtr);
+						log << TestLog::Message << "ERROR: mismatch in " << refEntry.name << ", top-level ndx " << topElemNdx << ", bottom-level ndx " << elementNdx << ":\n"
+												<< "  expected " << expected.str() << "\n"
+												<< "  got " << got.str()
+							<< TestLog::EndMessage;
+					}
+				}
+			}
+		}
+	}
+
+	if (numFailed >= maxPrints)
+		log << TestLog::Message << "... (" << numFailed << " failures for " << refEntry.name << " in total)" << TestLog::EndMessage;
+
+	return numFailed == 0;
+}
+
+bool compareData (tcu::TestLog& log, const BufferLayout& refLayout, const vector<BlockDataPtr>& refBlockPointers, const BufferLayout& resLayout, const vector<BlockDataPtr>& resBlockPointers)
+{
+	const int	numBlocks	= (int)refLayout.blocks.size();
+	bool		allOk		= true;
+
+	for (int refBlockNdx = 0; refBlockNdx < numBlocks; refBlockNdx++)
+	{
+		const BlockLayoutEntry&		refBlock	= refLayout.blocks[refBlockNdx];
+		const BlockDataPtr&			refBlockPtr	= refBlockPointers[refBlockNdx];
+		int							resBlockNdx	= resLayout.getBlockIndex(refBlock.name.c_str());
+
+		if (resBlockNdx >= 0)
+		{
+			DE_ASSERT(de::inBounds(resBlockNdx, 0, (int)resBlockPointers.size()));
+
+			const BlockDataPtr& resBlockPtr = resBlockPointers[resBlockNdx];
+
+			for (vector<int>::const_iterator refVarNdxIter = refBlock.activeVarIndices.begin(); refVarNdxIter != refBlock.activeVarIndices.end(); refVarNdxIter++)
+			{
+				const BufferVarLayoutEntry&	refEntry	= refLayout.bufferVars[*refVarNdxIter];
+				int							resVarNdx	= resLayout.getVariableIndex(refEntry.name.c_str());
+
+				if (resVarNdx >= 0)
+				{
+					const BufferVarLayoutEntry& resEntry = resLayout.bufferVars[resVarNdx];
+					allOk = compareBufferVarData(log, refEntry, refBlockPtr, resEntry, resBlockPtr) && allOk;
+				}
+			}
+		}
+	}
+
+	return allOk;
+}
+
+string getBlockAPIName (const BufferBlock& block, int instanceNdx)
+{
+	DE_ASSERT(block.isArray() || instanceNdx == 0);
+	return block.getBlockName() + (block.isArray() ? ("[" + de::toString(instanceNdx) + "]") : string());
+}
+
+// \note Some implementations don't report block members in the order they are declared.
+//		 For checking whether size has to be adjusted by some top-level array actual size,
+//		 we only need to know a) whether there is a unsized top-level array, and b)
+//		 what is stride of that array.
+
+static bool hasUnsizedArray (const BufferLayout& layout, const BlockLayoutEntry& entry)
+{
+	for (vector<int>::const_iterator varNdx = entry.activeVarIndices.begin(); varNdx != entry.activeVarIndices.end(); ++varNdx)
+	{
+		if (isUnsizedArray(layout.bufferVars[*varNdx]))
+			return true;
+	}
+
+	return false;
+}
+
+static int getUnsizedArrayStride (const BufferLayout& layout, const BlockLayoutEntry& entry)
+{
+	for (vector<int>::const_iterator varNdx = entry.activeVarIndices.begin(); varNdx != entry.activeVarIndices.end(); ++varNdx)
+	{
+		const BufferVarLayoutEntry& varEntry = layout.bufferVars[*varNdx];
+
+		if (varEntry.arraySize == 0)
+			return varEntry.arrayStride;
+		else if (varEntry.topLevelArraySize == 0)
+			return varEntry.topLevelArrayStride;
+	}
+
+	return 0;
+}
+
+vector<int> computeBufferSizes (const ShaderInterface& interface, const BufferLayout& layout)
+{
+	vector<int> sizes(layout.blocks.size());
+
+	for (int declNdx = 0; declNdx < interface.getNumBlocks(); declNdx++)
+	{
+		const BufferBlock&	block			= interface.getBlock(declNdx);
+		const bool			isArray			= block.isArray();
+		const int			numInstances	= isArray ? block.getArraySize() : 1;
+
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			const string	apiName		= getBlockAPIName(block, instanceNdx);
+			const int		blockNdx	= layout.getBlockIndex(apiName);
+
+			if (blockNdx >= 0)
+			{
+				const BlockLayoutEntry&		blockLayout		= layout.blocks[blockNdx];
+				const int					baseSize		= blockLayout.size;
+				const bool					isLastUnsized	= hasUnsizedArray(layout, blockLayout);
+				const int					lastArraySize	= isLastUnsized ? block.getLastUnsizedArraySize(instanceNdx) : 0;
+				const int					stride			= isLastUnsized ? getUnsizedArrayStride(layout, blockLayout) : 0;
+
+				sizes[blockNdx] = baseSize + lastArraySize*stride;
+			}
+		}
+	}
+
+	return sizes;
+}
+
+BlockDataPtr getBlockDataPtr (const BufferLayout& layout, const BlockLayoutEntry& blockLayout, void* ptr, int bufferSize)
+{
+	const bool	isLastUnsized	= hasUnsizedArray(layout, blockLayout);
+	const int	baseSize		= blockLayout.size;
+
+	if (isLastUnsized)
+	{
+		const int		lastArrayStride	= getUnsizedArrayStride(layout, blockLayout);
+		const int		lastArraySize	= (bufferSize-baseSize) / (lastArrayStride ? lastArrayStride : 1);
+
+		DE_ASSERT(baseSize + lastArraySize*lastArrayStride == bufferSize);
+
+		return BlockDataPtr(ptr, bufferSize, lastArraySize);
+	}
+	else
+		return BlockDataPtr(ptr, bufferSize, 0);
+}
+
+struct Buffer
+{
+	deUint32				buffer;
+	int						size;
+
+	Buffer (deUint32 buffer_, int size_) : buffer(buffer_), size(size_) {}
+	Buffer (void) : buffer(0), size(0) {}
+};
+
+struct BlockLocation
+{
+	int						index;
+	int						offset;
+	int						size;
+
+	BlockLocation (int index_, int offset_, int size_) : index(index_), offset(offset_), size(size_) {}
+	BlockLocation (void) : index(0), offset(0), size(0) {}
+};
+
+void initRefDataStorage (const ShaderInterface& interface, const BufferLayout& layout, RefDataStorage& storage)
+{
+	DE_ASSERT(storage.data.empty() && storage.pointers.empty());
+
+	const vector<int>	bufferSizes = computeBufferSizes(interface, layout);
+	int					totalSize	= 0;
+
+	for (vector<int>::const_iterator sizeIter = bufferSizes.begin(); sizeIter != bufferSizes.end(); ++sizeIter)
+		totalSize += *sizeIter;
+
+	storage.data.resize(totalSize);
+
+	// Pointers for each block.
+	{
+		deUint8*	basePtr		= storage.data.empty() ? DE_NULL : &storage.data[0];
+		int			curOffset	= 0;
+
+		DE_ASSERT(bufferSizes.size() == layout.blocks.size());
+		DE_ASSERT(totalSize == 0 || basePtr);
+
+		storage.pointers.resize(layout.blocks.size());
+
+		for (int blockNdx = 0; blockNdx < (int)layout.blocks.size(); blockNdx++)
+		{
+			const BlockLayoutEntry&	blockLayout		= layout.blocks[blockNdx];
+			const int				bufferSize		= bufferSizes[blockNdx];
+
+			storage.pointers[blockNdx] = getBlockDataPtr(layout, blockLayout, basePtr + curOffset, bufferSize);
+
+			curOffset += bufferSize;
+		}
+	}
+}
+
+
+vector<BlockDataPtr> blockLocationsToPtrs (const BufferLayout& layout, const vector<BlockLocation>& blockLocations, const vector<void*>& bufPtrs)
+{
+	vector<BlockDataPtr> blockPtrs(blockLocations.size());
+
+	DE_ASSERT(layout.blocks.size() == blockLocations.size());
+
+	for (int blockNdx = 0; blockNdx < (int)layout.blocks.size(); blockNdx++)
+	{
+		const BlockLayoutEntry&	blockLayout		= layout.blocks[blockNdx];
+		const BlockLocation&	location		= blockLocations[blockNdx];
+
+		blockPtrs[blockNdx] = getBlockDataPtr(layout, blockLayout, (deUint8*)bufPtrs[location.index] + location.offset, location.size);
+	}
+
+	return blockPtrs;
+}
+
+} // anonymous (utilities)
+
+de::MovePtr<vk::Allocation> allocateAndBindMemory (Context& context, vk::VkBuffer buffer, vk::MemoryRequirement memReqs)
+{
+	const vk::DeviceInterface&		vkd		= context.getDeviceInterface();
+	const vk::VkMemoryRequirements	bufReqs	= vk::getBufferMemoryRequirements(vkd, context.getDevice(), buffer);
+	de::MovePtr<vk::Allocation>		memory	= context.getDefaultAllocator().allocate(bufReqs, memReqs);
+
+	vkd.bindBufferMemory(context.getDevice(), buffer, memory->getMemory(), memory->getOffset());
+
+	return memory;
+}
+
+vk::Move<vk::VkBuffer> createBuffer (Context& context, vk::VkDeviceSize bufferSize, vk::VkBufferUsageFlags usageFlags)
+{
+	const vk::VkDevice			vkDevice			= context.getDevice();
+	const vk::DeviceInterface&	vk					= context.getDeviceInterface();
+	const deUint32			queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+
+	const vk::VkBufferCreateInfo	bufferInfo		=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkBufferCreateFlags	flags;
+		bufferSize,									// VkDeviceSize			size;
+		usageFlags,									// VkBufferUsageFlags	usage;
+		vk::VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+		1u,											// deUint32				queueFamilyCount;
+		&queueFamilyIndex							// const deUint32*		pQueueFamilyIndices;
+	};
+
+	return vk::createBuffer(vk, vkDevice, &bufferInfo);
+}
+
+// SSBOLayoutCaseInstance
+
+class SSBOLayoutCaseInstance : public TestInstance
+{
+public:
+								SSBOLayoutCaseInstance	(Context&					context,
+														SSBOLayoutCase::BufferMode	bufferMode,
+														const ShaderInterface&		interface,
+														const BufferLayout&			refLayout,
+														const RefDataStorage&		initialData,
+														const RefDataStorage&		writeData);
+	virtual						~SSBOLayoutCaseInstance	(void);
+	virtual tcu::TestStatus		iterate						(void);
+
+private:
+	SSBOLayoutCase::BufferMode	m_bufferMode;
+	const ShaderInterface&		m_interface;
+	const BufferLayout&			m_refLayout;
+	const RefDataStorage&		m_initialData;	// Initial data stored in buffer.
+	const RefDataStorage&		m_writeData;	// Data written by compute shader.
+
+
+	typedef de::SharedPtr<vk::Unique<vk::VkBuffer> >	VkBufferSp;
+	typedef de::SharedPtr<vk::Allocation>				AllocationSp;
+
+	std::vector<VkBufferSp>		m_uniformBuffers;
+	std::vector<AllocationSp>	m_uniformAllocs;
+};
+
+SSBOLayoutCaseInstance::SSBOLayoutCaseInstance (Context&					context,
+												SSBOLayoutCase::BufferMode	bufferMode,
+												const ShaderInterface&		interface,
+												const BufferLayout&			refLayout,
+												const RefDataStorage&		initialData,
+												const RefDataStorage&		writeData)
+	: TestInstance	(context)
+	, m_bufferMode	(bufferMode)
+	, m_interface	(interface)
+	, m_refLayout	(refLayout)
+	, m_initialData	(initialData)
+	, m_writeData	(writeData)
+{
+}
+
+SSBOLayoutCaseInstance::~SSBOLayoutCaseInstance (void)
+{
+}
+
+tcu::TestStatus SSBOLayoutCaseInstance::iterate (void)
+{
+	// todo: add compute stage availability check
+	const vk::DeviceInterface&	vk					= m_context.getDeviceInterface();
+	const vk::VkDevice			device				= m_context.getDevice();
+	const vk::VkQueue			queue				= m_context.getUniversalQueue();
+	const deUint32				queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	// Create descriptor set
+	const deUint32 acBufferSize = 1024;
+	vk::Move<vk::VkBuffer> acBuffer (createBuffer(m_context, acBufferSize, vk:: VK_BUFFER_USAGE_STORAGE_BUFFER_BIT));
+	de::UniquePtr<vk::Allocation> acBufferAlloc (allocateAndBindMemory(m_context, *acBuffer, vk::MemoryRequirement::HostVisible));
+
+	deMemset(acBufferAlloc->getHostPtr(), 0, acBufferSize);
+	flushMappedMemoryRange(vk, device, acBufferAlloc->getMemory(), acBufferAlloc->getOffset(), acBufferSize);
+
+	vk::DescriptorSetLayoutBuilder setLayoutBuilder;
+	vk::DescriptorPoolBuilder poolBuilder;
+
+	setLayoutBuilder
+		.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+
+	const int numBlocks	= (int)m_refLayout.blocks.size();
+	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		setLayoutBuilder
+			.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, vk::VK_SHADER_STAGE_COMPUTE_BIT);
+	}
+
+	poolBuilder
+		.addType(vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, (deUint32)(1 + numBlocks));
+
+	const vk::Unique<vk::VkDescriptorSetLayout> descriptorSetLayout(setLayoutBuilder.build(vk, device));
+	const vk::Unique<vk::VkDescriptorPool> descriptorPool(poolBuilder.build(vk, device, vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
+
+	const vk::VkDescriptorSetAllocateInfo allocInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		*descriptorPool,
+		1u,
+		&descriptorSetLayout.get(),
+	};
+
+	const vk::Unique<vk::VkDescriptorSet> descriptorSet(allocateDescriptorSet(vk, device, &allocInfo));
+	const vk::VkDescriptorBufferInfo descriptorInfo = makeDescriptorBufferInfo(*acBuffer, 0ull, acBufferSize);
+
+	vk::DescriptorSetUpdateBuilder setUpdateBuilder;
+	std::vector<vk::VkDescriptorBufferInfo>	descriptors(numBlocks);
+
+	setUpdateBuilder
+		.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(0u), vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptorInfo);
+
+	vector<BlockDataPtr>  mappedBlockPtrs;
+
+	// Upload base buffers
+	{
+		const std::vector<int>			bufferSizes		= computeBufferSizes(m_interface, m_refLayout);
+		std::vector<void*>				mapPtrs;
+		std::vector<BlockLocation>		blockLocations	(numBlocks);
+
+		DE_ASSERT(bufferSizes.size() == m_refLayout.blocks.size());
+
+		if (m_bufferMode == SSBOLayoutCase::BUFFERMODE_PER_BLOCK)
+		{
+			mapPtrs.resize(numBlocks);
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const deUint32 bufferSize = bufferSizes[blockNdx];
+				DE_ASSERT(bufferSize > 0);
+
+				blockLocations[blockNdx] = BlockLocation(blockNdx, 0, bufferSize);
+
+				vk::Move<vk::VkBuffer>				buffer		= createBuffer(m_context, bufferSize, vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+				de::MovePtr<vk::Allocation>			alloc		= allocateAndBindMemory(m_context, *buffer, vk::MemoryRequirement::HostVisible);
+
+				descriptors[blockNdx] = makeDescriptorBufferInfo(*buffer, 0ull, bufferSize);
+
+				mapPtrs[blockNdx] = alloc->getHostPtr();
+
+				m_uniformBuffers.push_back(VkBufferSp(new vk::Unique<vk::VkBuffer>(buffer)));
+				m_uniformAllocs.push_back(AllocationSp(alloc.release()));
+
+				setUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(blockNdx + 1),
+											vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptors[blockNdx]);
+			}
+		}
+		else
+		{
+			DE_ASSERT(m_bufferMode == SSBOLayoutCase::BUFFERMODE_SINGLE);
+
+			vk::VkPhysicalDeviceProperties properties;
+			m_context.getInstanceInterface().getPhysicalDeviceProperties(m_context.getPhysicalDevice(), &properties);
+			const int	bindingAlignment	= (int)properties.limits.minStorageBufferOffsetAlignment;
+			int			curOffset			= 0;
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const int bufferSize = bufferSizes[blockNdx];
+				DE_ASSERT(bufferSize > 0);
+
+				if (bindingAlignment > 0)
+					curOffset = deRoundUp32(curOffset, bindingAlignment);
+
+				blockLocations[blockNdx] = BlockLocation(0, curOffset, bufferSize);
+				curOffset += bufferSize;
+			}
+
+			const int						totalBufferSize = curOffset;
+			vk::Move<vk::VkBuffer>			buffer			= createBuffer(m_context, totalBufferSize, vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
+			de::MovePtr<vk::Allocation>		alloc			= allocateAndBindMemory(m_context, *buffer, vk::MemoryRequirement::HostVisible);
+
+			mapPtrs.push_back(alloc->getHostPtr());
+
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const deUint32						bufferSize	= bufferSizes[blockNdx];
+				const deUint32						offset		= blockLocations[blockNdx].offset;
+
+				descriptors[blockNdx] = makeDescriptorBufferInfo(*buffer, offset, bufferSize);
+
+				setUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(blockNdx + 1),
+										vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &descriptors[blockNdx]);
+			}
+
+			m_uniformBuffers.push_back(VkBufferSp(new vk::Unique<vk::VkBuffer>(buffer)));
+			m_uniformAllocs.push_back(AllocationSp(alloc.release()));
+		}
+
+		// Copy the initial data to the storage buffers
+		{
+			mappedBlockPtrs = blockLocationsToPtrs(m_refLayout, blockLocations, mapPtrs);
+			copyData(m_refLayout, mappedBlockPtrs, m_refLayout, m_initialData.pointers);
+
+			if (m_bufferMode == SSBOLayoutCase::BUFFERMODE_PER_BLOCK)
+			{
+				DE_ASSERT(m_uniformAllocs.size() == bufferSizes.size());
+				for (size_t allocNdx = 0; allocNdx < m_uniformAllocs.size(); allocNdx++)
+				{
+					const int size = bufferSizes[allocNdx];
+					vk::Allocation* alloc = m_uniformAllocs[allocNdx].get();
+					flushMappedMemoryRange(vk, device, alloc->getMemory(), alloc->getOffset(), size);
+				}
+			}
+			else
+			{
+				DE_ASSERT(m_bufferMode == SSBOLayoutCase::BUFFERMODE_SINGLE);
+				DE_ASSERT(m_uniformAllocs.size() == 1);
+				int totalSize = 0;
+				for (size_t bufferNdx = 0; bufferNdx < bufferSizes.size(); bufferNdx++)
+				{
+					totalSize += bufferSizes[bufferNdx];
+				}
+
+				DE_ASSERT(totalSize > 0);
+				vk::Allocation* alloc = m_uniformAllocs[0].get();
+				flushMappedMemoryRange(vk, device, alloc->getMemory(), alloc->getOffset(), totalSize);
+			}
+		}
+	}
+
+	setUpdateBuilder.update(vk, device);
+
+	const vk::VkPipelineLayoutCreateInfo pipelineLayoutParams =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,	// VkStructureType				sType;
+		DE_NULL,											// const void*					pNext;
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1u,													// deUint32						descriptorSetCount;
+		&*descriptorSetLayout,								// const VkDescriptorSetLayout*	pSetLayouts;
+		0u,													// deUint32						pushConstantRangeCount;
+		DE_NULL,											// const VkPushConstantRange*	pPushConstantRanges;
+	};
+	vk::Move<vk::VkPipelineLayout> pipelineLayout(createPipelineLayout(vk, device, &pipelineLayoutParams));
+
+	vk::Move<vk::VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("compute"), 0));
+	const vk::VkPipelineShaderStageCreateInfo pipelineShaderStageParams =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,// VkStructureType				sType;
+		DE_NULL,												// const void*					pNext;
+		(vk::VkPipelineShaderStageCreateFlags)0,
+		vk::VK_SHADER_STAGE_COMPUTE_BIT,						// VkShaderStage				stage;
+		*shaderModule,											// VkShader						shader;
+		"main",													//
+		DE_NULL,												// const VkSpecializationInfo*	pSpecializationInfo;
+	};
+	const vk::VkComputePipelineCreateInfo pipelineCreateInfo =
+	{
+		vk::VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,	// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0,													// VkPipelineCreateFlags			flags;
+		pipelineShaderStageParams,							// VkPipelineShaderStageCreateInfo	stage;
+		*pipelineLayout,									// VkPipelineLayout					layout;
+		DE_NULL,											// VkPipeline						basePipelineHandle;
+		0,													// deInt32							basePipelineIndex;
+	};
+	vk::Move<vk::VkPipeline> pipeline(createComputePipeline(vk, device, DE_NULL, &pipelineCreateInfo));
+
+	const vk::VkCommandPoolCreateInfo cmdPoolParams =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,			// VkStructureType		sType;
+		DE_NULL,												// const void*			pNext;
+		vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,	// VkCmdPoolCreateFlags	flags;
+		queueFamilyIndex,										// deUint32				queueFamilyIndex;
+	};
+	vk::Move<vk::VkCommandPool> cmdPool (createCommandPool(vk, device, &cmdPoolParams));
+
+	const vk::VkCommandBufferAllocateInfo cmdBufParams =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// VkStructureType			sType;
+		DE_NULL,											// const void*				pNext;
+		*cmdPool,											// VkCmdPool				pool;
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// VkCmdBufferLevel		level;
+		1u,													// deUint32					bufferCount;
+	};
+	vk::Move<vk::VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, &cmdBufParams));
+
+	const vk::VkCommandBufferBeginInfo cmdBufBeginParams =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	//	VkStructureType				sType;
+		DE_NULL,											//	const void*					pNext;
+		0u,													//	VkCmdBufferOptimizeFlags	flags;
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &cmdBufBeginParams));
+
+	vk.cmdBindPipeline(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
+
+	vk.cmdDispatch(*cmdBuffer, 1, 1, 1);
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	const vk::VkFenceCreateInfo	fenceParams =
+	{
+		vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,									// const void*			pNext;
+		0u,											// VkFenceCreateFlags	flags;
+	};
+	vk::Move<vk::VkFence> fence (createFence(vk, device, &fenceParams));
+
+	const vk::VkSubmitInfo  submitInfo  =
+	{
+		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+		0u,
+		(const vk::VkSemaphore*)DE_NULL,
+		(const vk::VkPipelineStageFlags*)DE_NULL,
+		1u,
+		&cmdBuffer.get(),
+		0u,
+		(const vk::VkSemaphore*)DE_NULL,
+	};
+
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+	VK_CHECK(vk.waitForFences(device, 1u, &fence.get(), DE_TRUE, ~0ull));
+
+	// Read back ac_numPassed data
+	bool counterOk;
+	{
+		const int refCount = 1;
+		int resCount = 0;
+
+		resCount = *(const int*)((const deUint8*)acBufferAlloc->getHostPtr());
+
+		counterOk = (refCount == resCount);
+		if (!counterOk)
+		{
+			m_context.getTestContext().getLog() << TestLog::Message << "Error: ac_numPassed = " << resCount << ", expected " << refCount << TestLog::EndMessage;
+		}
+	}
+
+	// Validate result
+	const bool compareOk = compareData(m_context.getTestContext().getLog(), m_refLayout, m_writeData.pointers, m_refLayout, mappedBlockPtrs);
+
+	if (compareOk && counterOk)
+		return tcu::TestStatus::pass("Result comparison and counter values are OK");
+	else if (!compareOk && counterOk)
+		return tcu::TestStatus::fail("Result comparison failed");
+	else if (compareOk && !counterOk)
+		return tcu::TestStatus::fail("Counter value incorrect");
+	else
+		return tcu::TestStatus::fail("Result comparison and counter values are incorrect");
+}
+
+// SSBOLayoutCase.
+
+SSBOLayoutCase::SSBOLayoutCase (tcu::TestContext& testCtx, const char* name, const char* description, BufferMode bufferMode)
+	: TestCase		(testCtx, name, description)
+	, m_bufferMode	(bufferMode)
+{
+}
+
+SSBOLayoutCase::~SSBOLayoutCase (void)
+{
+}
+
+void SSBOLayoutCase::initPrograms (vk::SourceCollections& programCollection) const
+{
+	DE_ASSERT(!m_computeShaderSrc.empty());
+
+	programCollection.glslSources.add("compute") << glu::ComputeSource(m_computeShaderSrc);
+}
+
+TestInstance* SSBOLayoutCase::createInstance (Context& context) const
+{
+	return new SSBOLayoutCaseInstance(context, m_bufferMode, m_interface, m_refLayout, m_initialData, m_writeData);
+}
+
+void SSBOLayoutCase::init (void)
+{
+	computeReferenceLayout	(m_refLayout, m_interface);
+	initRefDataStorage		(m_interface, m_refLayout, m_initialData);
+	initRefDataStorage		(m_interface, m_refLayout, m_writeData);
+	generateValues			(m_refLayout, m_initialData.pointers, deStringHash(getName()) ^ 0xad2f7214);
+	generateValues			(m_refLayout, m_writeData.pointers, deStringHash(getName()) ^ 0x25ca4e7);
+	copyNonWrittenData		(m_interface, m_refLayout, m_initialData.pointers, m_writeData.pointers);
+
+	m_computeShaderSrc = generateComputeShader(m_interface, m_refLayout, m_initialData.pointers, m_writeData.pointers);
+}
+
+} // ssbo
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.hpp b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.hpp
new file mode 100644
index 0000000..bbf7e0a
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutCase.hpp
@@ -0,0 +1,255 @@
+#ifndef _VKTSSBOLAYOUTCASE_HPP
+#define _VKTSSBOLAYOUTCASE_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SSBO layout tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+#include "tcuDefs.hpp"
+#include "gluShaderUtil.hpp"
+#include "gluVarType.hpp"
+
+namespace vkt
+{
+namespace ssbo
+{
+
+enum BufferVarFlags
+{
+	LAYOUT_STD140		= (1<<0),
+	LAYOUT_STD430		= (1<<1),
+	LAYOUT_ROW_MAJOR	= (1<<2),
+	LAYOUT_COLUMN_MAJOR	= (1<<3),	//!< \note Lack of both flags means column-major matrix.
+	LAYOUT_MASK			= LAYOUT_STD430|LAYOUT_STD140|LAYOUT_ROW_MAJOR|LAYOUT_COLUMN_MAJOR,
+
+	// \todo [2013-10-14 pyry] Investigate adding these.
+/*	QUALIFIER_COHERENT	= (1<<4),
+	QUALIFIER_VOLATILE	= (1<<5),
+	QUALIFIER_RESTRICT	= (1<<6),
+	QUALIFIER_READONLY	= (1<<7),
+	QUALIFIER_WRITEONLY	= (1<<8),*/
+
+	ACCESS_READ			= (1<<9),	//!< Buffer variable is read in the shader.
+	ACCESS_WRITE		= (1<<10),	//!< Buffer variable is written in the shader.
+};
+
+class BufferVar
+{
+public:
+						BufferVar		(const char* name, const glu::VarType& type, deUint32 flags);
+
+	const char*			getName			(void) const { return m_name.c_str();	}
+	const glu::VarType&	getType			(void) const { return m_type;			}
+	deUint32			getFlags		(void) const { return m_flags;			}
+
+private:
+	std::string			m_name;
+	glu::VarType		m_type;
+	deUint32			m_flags;
+};
+
+class BufferBlock
+{
+public:
+	typedef std::vector<BufferVar>::iterator		iterator;
+	typedef std::vector<BufferVar>::const_iterator	const_iterator;
+
+							BufferBlock				(const char* blockName);
+
+	const char*				getBlockName			(void) const { return m_blockName.c_str();		}
+	const char*				getInstanceName			(void) const { return m_instanceName.empty() ? DE_NULL : m_instanceName.c_str();	}
+	bool					isArray					(void) const { return m_arraySize > 0;			}
+	int						getArraySize			(void) const { return m_arraySize;				}
+	deUint32				getFlags				(void) const { return m_flags;					}
+
+	void					setInstanceName			(const char* name)			{ m_instanceName = name;			}
+	void					setFlags				(deUint32 flags)			{ m_flags = flags;					}
+	void					addMember				(const BufferVar& var)		{ m_variables.push_back(var);		}
+	void					setArraySize			(int arraySize);
+
+	int						getLastUnsizedArraySize	(int instanceNdx) const		{ return m_lastUnsizedArraySizes[instanceNdx];	}
+	void					setLastUnsizedArraySize	(int instanceNdx, int size)	{ m_lastUnsizedArraySizes[instanceNdx] = size;	}
+
+	inline iterator			begin					(void)			{ return m_variables.begin();	}
+	inline const_iterator	begin					(void) const	{ return m_variables.begin();	}
+	inline iterator			end						(void)			{ return m_variables.end();		}
+	inline const_iterator	end						(void) const	{ return m_variables.end();		}
+
+private:
+	std::string				m_blockName;
+	std::string				m_instanceName;
+	std::vector<BufferVar>	m_variables;
+	int						m_arraySize;				//!< Array size or 0 if not interface block array.
+	std::vector<int>		m_lastUnsizedArraySizes;	//!< Sizes of last unsized array element, can be different per instance.
+	deUint32				m_flags;
+};
+
+class ShaderInterface
+{
+public:
+									ShaderInterface			(void);
+									~ShaderInterface		(void);
+
+	glu::StructType&				allocStruct				(const char* name);
+	const glu::StructType*			findStruct				(const char* name) const;
+	void							getNamedStructs			(std::vector<const glu::StructType*>& structs) const;
+
+	BufferBlock&					allocBlock				(const char* name);
+
+	int								getNumBlocks			(void) const	{ return (int)m_bufferBlocks.size();	}
+	const BufferBlock&				getBlock				(int ndx) const	{ return *m_bufferBlocks[ndx];			}
+
+private:
+									ShaderInterface			(const ShaderInterface&);
+	ShaderInterface&				operator=				(const ShaderInterface&);
+
+	std::vector<glu::StructType*>	m_structs;
+	std::vector<BufferBlock*>		m_bufferBlocks;
+};
+
+struct BufferVarLayoutEntry
+{
+	BufferVarLayoutEntry (void)
+		: type					(glu::TYPE_LAST)
+		, blockNdx				(-1)
+		, offset				(-1)
+		, arraySize				(-1)
+		, arrayStride			(-1)
+		, matrixStride			(-1)
+		, topLevelArraySize		(-1)
+		, topLevelArrayStride	(-1)
+		, isRowMajor			(false)
+	{
+	}
+
+	std::string			name;
+	glu::DataType		type;
+	int					blockNdx;
+	int					offset;
+	int					arraySize;
+	int					arrayStride;
+	int					matrixStride;
+	int					topLevelArraySize;
+	int					topLevelArrayStride;
+	bool				isRowMajor;
+};
+
+struct BlockLayoutEntry
+{
+	BlockLayoutEntry (void)
+		: size(0)
+	{
+	}
+
+	std::string			name;
+	int					size;
+	std::vector<int>	activeVarIndices;
+};
+
+class BufferLayout
+{
+public:
+	std::vector<BlockLayoutEntry>		blocks;
+	std::vector<BufferVarLayoutEntry>	bufferVars;
+
+	int									getVariableIndex		(const std::string& name) const;
+	int									getBlockIndex			(const std::string& name) const;
+};
+
+// BlockDataPtr
+
+struct BlockDataPtr
+{
+	void*		ptr;
+	int			size;						//!< Redundant, for debugging purposes.
+	int			lastUnsizedArraySize;
+
+	BlockDataPtr (void* ptr_, int size_, int lastUnsizedArraySize_)
+		: ptr					(ptr_)
+		, size					(size_)
+		, lastUnsizedArraySize	(lastUnsizedArraySize_)
+	{
+	}
+
+	BlockDataPtr (void)
+		: ptr					(DE_NULL)
+		, size					(0)
+		, lastUnsizedArraySize	(0)
+	{
+	}
+};
+
+struct RefDataStorage
+{
+	std::vector<deUint8>			data;
+	std::vector<BlockDataPtr>	pointers;
+};
+
+class SSBOLayoutCase : public vkt::TestCase
+{
+public:
+	enum BufferMode
+	{
+		BUFFERMODE_SINGLE = 0,	//!< Single buffer shared between uniform blocks.
+		BUFFERMODE_PER_BLOCK,	//!< Per-block buffers
+
+		BUFFERMODE_LAST
+	};
+
+								SSBOLayoutCase				(tcu::TestContext& testCtx, const char* name, const char* description, BufferMode bufferMode);
+	virtual						~SSBOLayoutCase				(void);
+
+	virtual void				initPrograms				(vk::SourceCollections& programCollection) const;
+	virtual TestInstance*		createInstance				(Context& context) const;
+
+protected:
+    void                        init                        (void);
+
+	BufferMode					m_bufferMode;
+	ShaderInterface				m_interface;
+
+private:
+								SSBOLayoutCase				(const SSBOLayoutCase&);
+	SSBOLayoutCase&				operator=					(const SSBOLayoutCase&);
+
+	BufferLayout				m_refLayout;
+	RefDataStorage				m_initialData;	// Initial data stored in buffer.
+	RefDataStorage				m_writeData;		// Data written by compute shader.
+	std::string					m_computeShaderSrc;
+};
+
+} // ssbo
+} // vkt
+
+#endif // _VKTSSBOLAYOUTCASE_HPP
diff --git a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.cpp b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.cpp
new file mode 100644
index 0000000..6a5e93f
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.cpp
@@ -0,0 +1,1329 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SSBO layout tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktSSBOLayoutTests.hpp"
+#include "vktSSBOLayoutCase.hpp"
+
+#include "deUniquePtr.hpp"
+#include "tcuCommandLine.hpp"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+#include "deString.h"
+
+namespace vkt
+{
+namespace ssbo
+{
+namespace
+{
+
+using std::string;
+using std::vector;
+using glu::VarType;
+using glu::StructType;
+
+enum FeatureBits
+{
+	FEATURE_VECTORS				= (1<<0),
+	FEATURE_MATRICES			= (1<<1),
+	FEATURE_ARRAYS				= (1<<2),
+	FEATURE_STRUCTS				= (1<<3),
+	FEATURE_NESTED_STRUCTS		= (1<<4),
+	FEATURE_INSTANCE_ARRAYS		= (1<<5),
+	FEATURE_UNUSED_VARS			= (1<<6),
+	FEATURE_UNUSED_MEMBERS		= (1<<7),
+	FEATURE_STD140_LAYOUT		= (1<<8),
+	FEATURE_STD430_LAYOUT		= (1<<9),
+	FEATURE_MATRIX_LAYOUT		= (1<<10),	//!< Matrix layout flags.
+	FEATURE_UNSIZED_ARRAYS		= (1<<11),
+	FEATURE_ARRAYS_OF_ARRAYS	= (1<<12)
+};
+
+class RandomSSBOLayoutCase : public SSBOLayoutCase
+{
+public:
+
+							RandomSSBOLayoutCase		(tcu::TestContext& testCtx, const char* name, const char* description, BufferMode bufferMode, deUint32 features, deUint32 seed);
+
+private:
+	void					generateBlock				(de::Random& rnd, deUint32 layoutFlags);
+	void					generateBufferVar			(de::Random& rnd, BufferBlock& block, bool isLastMember);
+	glu::VarType			generateType				(de::Random& rnd, int typeDepth, bool arrayOk, bool unusedArrayOk);
+
+	deUint32				m_features;
+	int						m_maxBlocks;
+	int						m_maxInstances;
+	int						m_maxArrayLength;
+	int						m_maxStructDepth;
+	int						m_maxBlockMembers;
+	int						m_maxStructMembers;
+	deUint32				m_seed;
+
+	int						m_blockNdx;
+	int						m_bufferVarNdx;
+	int						m_structNdx;
+};
+
+RandomSSBOLayoutCase::RandomSSBOLayoutCase (tcu::TestContext& testCtx, const char* name, const char* description, BufferMode bufferMode, deUint32 features, deUint32 seed)
+	: SSBOLayoutCase		(testCtx, name, description, bufferMode)
+	, m_features			(features)
+	, m_maxBlocks			(4)
+	, m_maxInstances		((features & FEATURE_INSTANCE_ARRAYS)	? 3 : 0)
+	, m_maxArrayLength		((features & FEATURE_ARRAYS)			? 8 : 1)
+	, m_maxStructDepth		((features & FEATURE_STRUCTS)			? 2 : 0)
+	, m_maxBlockMembers		(5)
+	, m_maxStructMembers	(4)
+	, m_seed				(seed)
+	, m_blockNdx			(1)
+	, m_bufferVarNdx		(1)
+	, m_structNdx			(1)
+{
+	de::Random rnd(m_seed);
+
+	const int numBlocks = rnd.getInt(1, m_maxBlocks);
+
+	for (int ndx = 0; ndx < numBlocks; ndx++)
+		generateBlock(rnd, 0);
+
+	init();
+}
+
+void RandomSSBOLayoutCase::generateBlock (de::Random& rnd, deUint32 layoutFlags)
+{
+	DE_ASSERT(m_blockNdx <= 'z' - 'a');
+
+	const float		instanceArrayWeight	= 0.3f;
+	BufferBlock&	block				= m_interface.allocBlock((string("Block") + (char)('A' + m_blockNdx)).c_str());
+	int				numInstances		= (m_maxInstances > 0 && rnd.getFloat() < instanceArrayWeight) ? rnd.getInt(0, m_maxInstances) : 0;
+	int				numVars				= rnd.getInt(1, m_maxBlockMembers);
+
+	if (numInstances > 0)
+		block.setArraySize(numInstances);
+
+	if (numInstances > 0 || rnd.getBool())
+		block.setInstanceName((string("block") + (char)('A' + m_blockNdx)).c_str());
+
+	// Layout flag candidates.
+	vector<deUint32> layoutFlagCandidates;
+	layoutFlagCandidates.push_back(0);
+	if (m_features & FEATURE_STD140_LAYOUT)
+		layoutFlagCandidates.push_back(LAYOUT_STD140);
+
+	layoutFlags |= rnd.choose<deUint32>(layoutFlagCandidates.begin(), layoutFlagCandidates.end());
+
+	if (m_features & FEATURE_MATRIX_LAYOUT)
+	{
+		static const deUint32 matrixCandidates[] = { 0, LAYOUT_ROW_MAJOR, LAYOUT_COLUMN_MAJOR };
+		layoutFlags |= rnd.choose<deUint32>(&matrixCandidates[0], &matrixCandidates[DE_LENGTH_OF_ARRAY(matrixCandidates)]);
+	}
+
+	block.setFlags(layoutFlags);
+
+	for (int ndx = 0; ndx < numVars; ndx++)
+		generateBufferVar(rnd, block, (ndx+1 == numVars));
+
+	if (numVars > 0)
+	{
+		const BufferVar&	lastVar			= *(block.end()-1);
+		const glu::VarType&	lastType		= lastVar.getType();
+		const bool			isUnsizedArr	= lastType.isArrayType() && (lastType.getArraySize() == glu::VarType::UNSIZED_ARRAY);
+
+		if (isUnsizedArr)
+		{
+			for (int instanceNdx = 0; instanceNdx < (numInstances ? numInstances : 1); instanceNdx++)
+			{
+				const int arrSize = rnd.getInt(1, m_maxArrayLength);
+				block.setLastUnsizedArraySize(instanceNdx, arrSize);
+			}
+		}
+	}
+
+	m_blockNdx += 1;
+}
+
+static std::string genName (char first, char last, int ndx)
+{
+	std::string	str			= "";
+	int			alphabetLen	= last - first + 1;
+
+	while (ndx > alphabetLen)
+	{
+		str.insert(str.begin(), (char)(first + ((ndx-1)%alphabetLen)));
+		ndx = ((ndx-1) / alphabetLen);
+	}
+
+	str.insert(str.begin(), (char)(first + (ndx%(alphabetLen+1)) - 1));
+
+	return str;
+}
+
+void RandomSSBOLayoutCase::generateBufferVar (de::Random& rnd, BufferBlock& block, bool isLastMember)
+{
+	const float			readWeight			= 0.7f;
+	const float			writeWeight			= 0.7f;
+	const float			accessWeight		= 0.85f;
+	const bool			unusedOk			= (m_features & FEATURE_UNUSED_VARS) != 0;
+	const std::string	name				= genName('a', 'z', m_bufferVarNdx);
+	const glu::VarType	type				= generateType(rnd, 0, true, isLastMember && (m_features & FEATURE_UNSIZED_ARRAYS));
+	const bool			access				= !unusedOk || (rnd.getFloat() < accessWeight);
+	const bool			read				= access ? (rnd.getFloat() < readWeight) : false;
+	const bool			write				= access ? (!read || (rnd.getFloat() < writeWeight)) : false;
+	const deUint32		flags				= (read ? ACCESS_READ : 0) | (write ? ACCESS_WRITE : 0);
+
+	block.addMember(BufferVar(name.c_str(), type, flags));
+
+	m_bufferVarNdx += 1;
+}
+
+glu::VarType RandomSSBOLayoutCase::generateType (de::Random& rnd, int typeDepth, bool arrayOk, bool unsizedArrayOk)
+{
+	const float structWeight		= 0.1f;
+	const float arrayWeight			= 0.1f;
+	const float	unsizedArrayWeight	= 0.8f;
+
+	DE_ASSERT(arrayOk || !unsizedArrayOk);
+
+	if (unsizedArrayOk && (rnd.getFloat() < unsizedArrayWeight))
+	{
+		const bool			childArrayOk	= (m_features & FEATURE_ARRAYS_OF_ARRAYS) != 0;
+		const glu::VarType	elementType		= generateType(rnd, typeDepth, childArrayOk, false);
+		return glu::VarType(elementType, glu::VarType::UNSIZED_ARRAY);
+	}
+	else if (typeDepth < m_maxStructDepth && rnd.getFloat() < structWeight)
+	{
+		vector<glu::VarType>	memberTypes;
+		int						numMembers = rnd.getInt(1, m_maxStructMembers);
+
+		// Generate members first so nested struct declarations are in correct order.
+		for (int ndx = 0; ndx < numMembers; ndx++)
+			memberTypes.push_back(generateType(rnd, typeDepth+1, true, false));
+
+		glu::StructType& structType = m_interface.allocStruct((string("s") + genName('A', 'Z', m_structNdx)).c_str());
+		m_structNdx += 1;
+
+		DE_ASSERT(numMembers <= 'Z' - 'A');
+		for (int ndx = 0; ndx < numMembers; ndx++)
+		{
+			structType.addMember((string("m") + (char)('A' + ndx)).c_str(), memberTypes[ndx]);
+		}
+
+		return glu::VarType(&structType);
+	}
+	else if (m_maxArrayLength > 0 && arrayOk && rnd.getFloat() < arrayWeight)
+	{
+		const int			arrayLength		= rnd.getInt(1, m_maxArrayLength);
+		const bool			childArrayOk	= (m_features & FEATURE_ARRAYS_OF_ARRAYS) != 0;
+		const glu::VarType	elementType		= generateType(rnd, typeDepth, childArrayOk, false);
+
+		return glu::VarType(elementType, arrayLength);
+	}
+	else
+	{
+		vector<glu::DataType> typeCandidates;
+
+		typeCandidates.push_back(glu::TYPE_FLOAT);
+		typeCandidates.push_back(glu::TYPE_INT);
+		typeCandidates.push_back(glu::TYPE_UINT);
+		typeCandidates.push_back(glu::TYPE_BOOL);
+
+		if (m_features & FEATURE_VECTORS)
+		{
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC4);
+			typeCandidates.push_back(glu::TYPE_INT_VEC2);
+			typeCandidates.push_back(glu::TYPE_INT_VEC3);
+			typeCandidates.push_back(glu::TYPE_INT_VEC4);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC2);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC3);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC4);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC2);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC3);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC4);
+		}
+
+		if (m_features & FEATURE_MATRICES)
+		{
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT2X3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3X2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3X4);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4X2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4X3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4);
+		}
+
+		glu::DataType	type		= rnd.choose<glu::DataType>(typeCandidates.begin(), typeCandidates.end());
+		glu::Precision	precision;
+
+		if (!glu::isDataTypeBoolOrBVec(type))
+		{
+			// Precision.
+			static const glu::Precision precisionCandidates[] = { glu::PRECISION_LOWP, glu::PRECISION_MEDIUMP, glu::PRECISION_HIGHP };
+			precision = rnd.choose<glu::Precision>(&precisionCandidates[0], &precisionCandidates[DE_LENGTH_OF_ARRAY(precisionCandidates)]);
+		}
+		else
+			precision = glu::PRECISION_LAST;
+
+		return glu::VarType(type, precision);
+	}
+}
+
+class BlockBasicTypeCase : public SSBOLayoutCase
+{
+public:
+	BlockBasicTypeCase (tcu::TestContext& testCtx, const char* name, const char* description, const VarType& type, deUint32 layoutFlags, int numInstances)
+		: SSBOLayoutCase(testCtx, name, description, BUFFERMODE_PER_BLOCK)
+	{
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("var", type, ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setArraySize(numInstances);
+			block.setInstanceName("block");
+		}
+
+		init();
+	}
+};
+
+class BlockBasicUnsizedArrayCase : public SSBOLayoutCase
+{
+public:
+	BlockBasicUnsizedArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, const VarType& elementType, int arraySize, deUint32 layoutFlags)
+		: SSBOLayoutCase(testCtx, name, description, BUFFERMODE_PER_BLOCK)
+	{
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("var", VarType(elementType, VarType::UNSIZED_ARRAY), ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(layoutFlags);
+
+		block.setLastUnsizedArraySize(0, arraySize);
+
+		init();
+	}
+};
+
+static void createRandomCaseGroup (tcu::TestCaseGroup* parentGroup, tcu::TestContext& testCtx, const char* groupName, const char* description, SSBOLayoutCase::BufferMode bufferMode, deUint32 features, int numCases, deUint32 baseSeed)
+{
+	tcu::TestCaseGroup* group = new tcu::TestCaseGroup(testCtx, groupName, description);
+	parentGroup->addChild(group);
+
+	baseSeed += (deUint32)testCtx.getCommandLine().getBaseSeed();
+
+	for (int ndx = 0; ndx < numCases; ndx++)
+		group->addChild(new RandomSSBOLayoutCase(testCtx, de::toString(ndx).c_str(), "", bufferMode, features, (deUint32)ndx+baseSeed));
+}
+
+class BlockSingleStructCase : public SSBOLayoutCase
+{
+public:
+	BlockSingleStructCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, glu::PRECISION_HIGHP)); // \todo [pyry] First member is unused.
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("s", VarType(&typeS), ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockSingleStructArrayCase : public SSBOLayoutCase
+{
+public:
+	BlockSingleStructArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, glu::PRECISION_HIGHP)); // \todo [pyry] UNUSED
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("u", VarType(glu::TYPE_UINT, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("s", VarType(VarType(&typeS), 3), ACCESS_READ|ACCESS_WRITE));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_MEDIUMP), ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockSingleNestedStructCase : public SSBOLayoutCase
+{
+public:
+	BlockSingleNestedStructCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, glu::PRECISION_HIGHP));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP)); // \todo [pyry] UNUSED
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP));
+		typeT.addMember("b", VarType(&typeS));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("s", VarType(&typeS), ACCESS_READ));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("t", VarType(&typeT), ACCESS_READ|ACCESS_WRITE));
+		block.addMember(BufferVar("u", VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP), ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockSingleNestedStructArrayCase : public SSBOLayoutCase
+{
+public:
+	BlockSingleNestedStructArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, glu::PRECISION_HIGHP));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_INT_VEC2, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP)); // \todo [pyry] UNUSED
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP));
+		typeT.addMember("b", VarType(VarType(&typeS), 3));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("s", VarType(&typeS), ACCESS_WRITE));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("t", VarType(VarType(&typeT), 2), ACCESS_READ));
+		block.addMember(BufferVar("u", VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP), ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockUnsizedStructArrayCase : public SSBOLayoutCase
+{
+public:
+	BlockUnsizedStructArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_UINT_VEC2, glu::PRECISION_HIGHP)); // \todo [pyry] UNUSED
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT2X4, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC3, glu::PRECISION_HIGHP));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("u", VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_UINT, glu::PRECISION_MEDIUMP), ACCESS_WRITE));
+		block.addMember(BufferVar("s", VarType(VarType(&typeS), VarType::UNSIZED_ARRAY), ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		{
+			de::Random rnd(246);
+			for (int ndx = 0; ndx < (m_numInstances ? m_numInstances : 1); ndx++)
+			{
+				const int lastArrayLen = rnd.getInt(1, 5);
+				block.setLastUnsizedArraySize(ndx, lastArrayLen);
+			}
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class Block2LevelUnsizedStructArrayCase : public SSBOLayoutCase
+{
+public:
+	Block2LevelUnsizedStructArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, glu::PRECISION_HIGHP));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("u", VarType(glu::TYPE_UINT, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_MEDIUMP), ACCESS_WRITE));
+		block.addMember(BufferVar("s", VarType(VarType(VarType(&typeS), 2), VarType::UNSIZED_ARRAY), ACCESS_READ|ACCESS_WRITE));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		{
+			de::Random rnd(2344);
+			for (int ndx = 0; ndx < (m_numInstances ? m_numInstances : 1); ndx++)
+			{
+				const int lastArrayLen = rnd.getInt(1, 5);
+				block.setLastUnsizedArraySize(ndx, lastArrayLen);
+			}
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockUnsizedNestedStructArrayCase : public SSBOLayoutCase
+{
+public:
+	BlockUnsizedNestedStructArrayCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_UINT_VEC3, glu::PRECISION_HIGHP));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP)); // \todo [pyry] UNUSED
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_FLOAT_MAT4X3, glu::PRECISION_MEDIUMP));
+		typeT.addMember("b", VarType(VarType(&typeS), 3));
+		typeT.addMember("c", VarType(glu::TYPE_INT, glu::PRECISION_HIGHP));
+
+		BufferBlock& block = m_interface.allocBlock("Block");
+		block.addMember(BufferVar("s", VarType(&typeS), ACCESS_WRITE));
+		block.addMember(BufferVar("v", VarType(glu::TYPE_FLOAT_VEC2, glu::PRECISION_LOWP), 0 /* no access */));
+		block.addMember(BufferVar("u", VarType(glu::TYPE_UINT, glu::PRECISION_HIGHP), ACCESS_READ|ACCESS_WRITE));
+		block.addMember(BufferVar("t", VarType(VarType(&typeT), VarType::UNSIZED_ARRAY), ACCESS_READ));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		{
+			de::Random rnd(7921);
+			for (int ndx = 0; ndx < (m_numInstances ? m_numInstances : 1); ndx++)
+			{
+				const int lastArrayLen = rnd.getInt(1, 5);
+				block.setLastUnsizedArraySize(ndx, lastArrayLen);
+			}
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+class BlockMultiBasicTypesCase : public SSBOLayoutCase
+{
+public:
+	BlockMultiBasicTypesCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 flagsA, deUint32 flagsB, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_flagsA			(flagsA)
+		, m_flagsB			(flagsB)
+		, m_numInstances	(numInstances)
+	{
+		BufferBlock& blockA = m_interface.allocBlock("BlockA");
+		blockA.addMember(BufferVar("a", VarType(glu::TYPE_FLOAT, glu::PRECISION_HIGHP), ACCESS_READ|ACCESS_WRITE));
+		blockA.addMember(BufferVar("b", VarType(glu::TYPE_UINT_VEC3, glu::PRECISION_LOWP), 0 /* no access */));
+		blockA.addMember(BufferVar("c", VarType(glu::TYPE_FLOAT_MAT2, glu::PRECISION_MEDIUMP), ACCESS_READ));
+		blockA.setInstanceName("blockA");
+		blockA.setFlags(m_flagsA);
+
+		BufferBlock& blockB = m_interface.allocBlock("BlockB");
+		blockB.addMember(BufferVar("a", VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_MEDIUMP), ACCESS_WRITE));
+		blockB.addMember(BufferVar("b", VarType(glu::TYPE_INT_VEC2, glu::PRECISION_LOWP), ACCESS_READ));
+		blockB.addMember(BufferVar("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP), 0 /* no access */));
+		blockB.addMember(BufferVar("d", VarType(glu::TYPE_BOOL, glu::PRECISION_LAST), ACCESS_READ|ACCESS_WRITE));
+		blockB.setInstanceName("blockB");
+		blockB.setFlags(m_flagsB);
+
+		if (m_numInstances > 0)
+		{
+			blockA.setArraySize(m_numInstances);
+			blockB.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_flagsA;
+	deUint32	m_flagsB;
+	int			m_numInstances;
+};
+
+class BlockMultiNestedStructCase : public SSBOLayoutCase
+{
+public:
+	BlockMultiNestedStructCase (tcu::TestContext& testCtx, const char* name, const char* description, deUint32 flagsA, deUint32 flagsB, BufferMode bufferMode, int numInstances)
+		: SSBOLayoutCase	(testCtx, name, description, bufferMode)
+		, m_flagsA			(flagsA)
+		, m_flagsB			(flagsB)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, glu::PRECISION_LOWP));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_INT_VEC2, glu::PRECISION_MEDIUMP), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, glu::PRECISION_HIGHP));
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_UINT, glu::PRECISION_MEDIUMP)); // \todo [pyry] UNUSED
+		typeT.addMember("b", VarType(&typeS));
+		typeT.addMember("c", VarType(glu::TYPE_BOOL_VEC4, glu::PRECISION_LAST));
+
+		BufferBlock& blockA = m_interface.allocBlock("BlockA");
+		blockA.addMember(BufferVar("a", VarType(glu::TYPE_FLOAT, glu::PRECISION_HIGHP), ACCESS_READ|ACCESS_WRITE));
+		blockA.addMember(BufferVar("b", VarType(&typeS), ACCESS_WRITE));
+		blockA.addMember(BufferVar("c", VarType(glu::TYPE_UINT_VEC3, glu::PRECISION_LOWP), 0 /* no access */));
+		blockA.setInstanceName("blockA");
+		blockA.setFlags(m_flagsA);
+
+		BufferBlock& blockB = m_interface.allocBlock("BlockB");
+		blockB.addMember(BufferVar("a", VarType(glu::TYPE_FLOAT_MAT2, glu::PRECISION_MEDIUMP), ACCESS_WRITE));
+		blockB.addMember(BufferVar("b", VarType(&typeT), ACCESS_READ|ACCESS_WRITE));
+		blockB.addMember(BufferVar("c", VarType(glu::TYPE_BOOL_VEC4, glu::PRECISION_LAST), 0 /* no access */));
+		blockB.addMember(BufferVar("d", VarType(glu::TYPE_BOOL, glu::PRECISION_LAST), ACCESS_READ|ACCESS_WRITE));
+		blockB.setInstanceName("blockB");
+		blockB.setFlags(m_flagsB);
+
+		if (m_numInstances > 0)
+		{
+			blockA.setArraySize(m_numInstances);
+			blockB.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_flagsA;
+	deUint32	m_flagsB;
+	int			m_numInstances;
+};
+
+class SSBOLayoutTests : public tcu::TestCaseGroup
+{
+public:
+							SSBOLayoutTests		(tcu::TestContext& testCtx);
+							~SSBOLayoutTests	(void);
+
+	void					init				(void);
+
+private:
+							SSBOLayoutTests		(const SSBOLayoutTests& other);
+	SSBOLayoutTests&		operator=			(const SSBOLayoutTests& other);
+};
+
+
+SSBOLayoutTests::SSBOLayoutTests (tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "layout", "SSBO Layout Tests")
+{
+}
+
+SSBOLayoutTests::~SSBOLayoutTests (void)
+{
+}
+
+void SSBOLayoutTests::init (void)
+{
+	static const glu::DataType basicTypes[] =
+	{
+		glu::TYPE_FLOAT,
+		glu::TYPE_FLOAT_VEC2,
+		glu::TYPE_FLOAT_VEC3,
+		glu::TYPE_FLOAT_VEC4,
+		glu::TYPE_INT,
+		glu::TYPE_INT_VEC2,
+		glu::TYPE_INT_VEC3,
+		glu::TYPE_INT_VEC4,
+		glu::TYPE_UINT,
+		glu::TYPE_UINT_VEC2,
+		glu::TYPE_UINT_VEC3,
+		glu::TYPE_UINT_VEC4,
+		glu::TYPE_BOOL,
+		glu::TYPE_BOOL_VEC2,
+		glu::TYPE_BOOL_VEC3,
+		glu::TYPE_BOOL_VEC4,
+		glu::TYPE_FLOAT_MAT2,
+		glu::TYPE_FLOAT_MAT3,
+		glu::TYPE_FLOAT_MAT4,
+		glu::TYPE_FLOAT_MAT2X3,
+		glu::TYPE_FLOAT_MAT2X4,
+		glu::TYPE_FLOAT_MAT3X2,
+		glu::TYPE_FLOAT_MAT3X4,
+		glu::TYPE_FLOAT_MAT4X2,
+		glu::TYPE_FLOAT_MAT4X3
+	};
+
+	static const struct
+	{
+		const char*		name;
+		deUint32		flags;
+	} layoutFlags[] =
+	{
+		{ "std140",	LAYOUT_STD140 },
+		{ "std430",	LAYOUT_STD430 },
+	};
+
+	static const struct
+	{
+		const char*		name;
+		deUint32		flags;
+	} matrixFlags[] =
+	{
+		{ "row_major",		LAYOUT_ROW_MAJOR	},
+		{ "column_major",	LAYOUT_COLUMN_MAJOR }
+	};
+
+	static const struct
+	{
+		const char*							name;
+		SSBOLayoutCase::BufferMode		mode;
+	} bufferModes[] =
+	{
+		{ "per_block_buffer",	SSBOLayoutCase::BUFFERMODE_PER_BLOCK },
+		{ "single_buffer",		SSBOLayoutCase::BUFFERMODE_SINGLE	}
+	};
+
+	// ubo.single_basic_type
+	{
+		tcu::TestCaseGroup* singleBasicTypeGroup = new tcu::TestCaseGroup(m_testCtx, "single_basic_type", "Single basic variable in single buffer");
+		addChild(singleBasicTypeGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			singleBasicTypeGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+
+				if (glu::isDataTypeBoolOrBVec(type))
+					layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, typeName, "", VarType(type, glu::PRECISION_LAST), layoutFlags[layoutFlagNdx].flags, 0));
+				else
+				{
+					for (int precNdx = 0; precNdx < glu::PRECISION_LAST; precNdx++)
+					{
+						const glu::Precision	precision	= glu::Precision(precNdx);
+						const string			caseName	= string(glu::getPrecisionName(precision)) + "_" + typeName;
+
+						layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, caseName.c_str(), "", VarType(type, precision), layoutFlags[layoutFlagNdx].flags, 0));
+					}
+				}
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+					{
+						for (int precNdx = 0; precNdx < glu::PRECISION_LAST; precNdx++)
+						{
+							const glu::Precision	precision	= glu::Precision(precNdx);
+							const string			caseName	= string(matrixFlags[matFlagNdx].name) + "_" + string(glu::getPrecisionName(precision)) + "_" + typeName;
+
+							layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, caseName.c_str(), "", glu::VarType(type, precision), layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags, 0));
+						}
+					}
+				}
+			}
+		}
+	}
+
+	// ubo.single_basic_array
+	{
+		tcu::TestCaseGroup* singleBasicArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_basic_array", "Single basic array variable in single buffer");
+		addChild(singleBasicArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			singleBasicArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		arraySize	= 3;
+
+				layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, typeName, "",
+															 VarType(VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP), arraySize),
+															 layoutFlags[layoutFlagNdx].flags, 0));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																	 VarType(VarType(type, glu::PRECISION_HIGHP), arraySize),
+																	 layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags, 0));
+				}
+			}
+		}
+	}
+
+	// ubo.basic_unsized_array
+	{
+		tcu::TestCaseGroup* basicUnsizedArray = new tcu::TestCaseGroup(m_testCtx, "basic_unsized_array", "Basic unsized array tests");
+		addChild(basicUnsizedArray);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			basicUnsizedArray->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		arraySize	= 19;
+
+				layoutGroup->addChild(new BlockBasicUnsizedArrayCase(m_testCtx, typeName, "",
+																	 VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP),
+																	 arraySize, layoutFlags[layoutFlagNdx].flags));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicUnsizedArrayCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																			 VarType(type, glu::PRECISION_HIGHP), arraySize,
+																			 layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags));
+				}
+			}
+		}
+	}
+
+	// ubo.2_level_array
+	{
+		tcu::TestCaseGroup* nestedArrayGroup = new tcu::TestCaseGroup(m_testCtx, "2_level_array", "2-level nested array");
+		addChild(nestedArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			nestedArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		childSize	= 3;
+				const int		parentSize	= 4;
+				const VarType	childType	(VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP), childSize);
+				const VarType	fullType	(childType, parentSize);
+
+				layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, typeName, "", fullType, layoutFlags[layoutFlagNdx].flags, 0));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																	 fullType,
+																	 layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags, 0));
+				}
+			}
+		}
+	}
+
+	// ubo.3_level_array
+	{
+		tcu::TestCaseGroup* nestedArrayGroup = new tcu::TestCaseGroup(m_testCtx, "3_level_array", "3-level nested array");
+		addChild(nestedArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			nestedArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		childSize0	= 3;
+				const int		childSize1	= 2;
+				const int		parentSize	= 4;
+				const VarType	childType0	(VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP), childSize0);
+				const VarType	childType1	(childType0, childSize1);
+				const VarType	fullType	(childType1, parentSize);
+
+				layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, typeName, "", fullType, layoutFlags[layoutFlagNdx].flags, 0));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																	 fullType,
+																	 layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags, 0));
+				}
+			}
+		}
+	}
+
+	// ubo.3_level_unsized_array
+	{
+		tcu::TestCaseGroup* nestedArrayGroup = new tcu::TestCaseGroup(m_testCtx, "3_level_unsized_array", "3-level nested array, top-level array unsized");
+		addChild(nestedArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			nestedArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		childSize0	= 2;
+				const int		childSize1	= 4;
+				const int		parentSize	= 3;
+				const VarType	childType0	(VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP), childSize0);
+				const VarType	childType1	(childType0, childSize1);
+
+				layoutGroup->addChild(new BlockBasicUnsizedArrayCase(m_testCtx, typeName, "", childType1, parentSize, layoutFlags[layoutFlagNdx].flags));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicUnsizedArrayCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																			 childType1, parentSize,
+																			 layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags));
+				}
+			}
+		}
+	}
+
+	// ubo.single_struct
+	{
+		tcu::TestCaseGroup* singleStructGroup = new tcu::TestCaseGroup(m_testCtx, "single_struct", "Single struct in uniform block");
+		addChild(singleStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					const deUint32	caseFlags	= layoutFlags[layoutFlagNdx].flags;
+					string			caseName	= layoutFlags[layoutFlagNdx].name;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						caseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleStructCase(m_testCtx, caseName.c_str(), "", caseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_struct_array
+	{
+		tcu::TestCaseGroup* singleStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_struct_array", "Struct array in one uniform block");
+		addChild(singleStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleStructArrayCase(m_testCtx, baseName.c_str(),	"", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_nested_struct
+	{
+		tcu::TestCaseGroup* singleNestedStructGroup = new tcu::TestCaseGroup(m_testCtx, "single_nested_struct", "Nested struct in one uniform block");
+		addChild(singleNestedStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleNestedStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleNestedStructCase(m_testCtx, baseName.c_str(), "", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_nested_struct_array
+	{
+		tcu::TestCaseGroup* singleNestedStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_nested_struct_array", "Nested struct array in one uniform block");
+		addChild(singleNestedStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleNestedStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleNestedStructArrayCase(m_testCtx, baseName.c_str(), "", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.unsized_struct_array
+	{
+		tcu::TestCaseGroup* singleStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "unsized_struct_array", "Unsized struct array in one uniform block");
+		addChild(singleStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockUnsizedStructArrayCase(m_testCtx, baseName.c_str(),	"", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.2_level_unsized_struct_array
+	{
+		tcu::TestCaseGroup* structArrayGroup = new tcu::TestCaseGroup(m_testCtx, "2_level_unsized_struct_array", "Unsized 2-level struct array in one uniform block");
+		addChild(structArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			structArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new Block2LevelUnsizedStructArrayCase(m_testCtx, baseName.c_str(),	"", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.unsized_nested_struct_array
+	{
+		tcu::TestCaseGroup* singleNestedStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "unsized_nested_struct_array", "Unsized, nested struct array in one uniform block");
+		addChild(singleNestedStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleNestedStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == SSBOLayoutCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockUnsizedNestedStructArrayCase(m_testCtx, baseName.c_str(), "", baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.instance_array_basic_type
+	{
+		tcu::TestCaseGroup* instanceArrayBasicTypeGroup = new tcu::TestCaseGroup(m_testCtx, "instance_array_basic_type", "Single basic variable in instance array");
+		addChild(instanceArrayBasicTypeGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			instanceArrayBasicTypeGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type			= basicTypes[basicTypeNdx];
+				const char*		typeName		= glu::getDataTypeName(type);
+				const int		numInstances	= 3;
+
+				layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, typeName, "",
+															 VarType(type, glu::isDataTypeBoolOrBVec(type) ? glu::PRECISION_LAST : glu::PRECISION_HIGHP),
+															 layoutFlags[layoutFlagNdx].flags, numInstances));
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						layoutGroup->addChild(new BlockBasicTypeCase(m_testCtx, (string(matrixFlags[matFlagNdx].name) + "_" + typeName).c_str(), "",
+																	 VarType(type, glu::PRECISION_HIGHP), layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags,
+																	 numInstances));
+				}
+			}
+		}
+	}
+
+	// ubo.multi_basic_types
+	{
+		tcu::TestCaseGroup* multiBasicTypesGroup = new tcu::TestCaseGroup(m_testCtx, "multi_basic_types", "Multiple buffers with basic types");
+		addChild(multiBasicTypesGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			multiBasicTypesGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockMultiBasicTypesCase(m_testCtx, baseName.c_str(), "", baseFlags, baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.multi_nested_struct
+	{
+		tcu::TestCaseGroup* multiNestedStructGroup = new tcu::TestCaseGroup(m_testCtx, "multi_nested_struct", "Multiple buffers with nested structs");
+		addChild(multiNestedStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			multiNestedStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockMultiNestedStructCase(m_testCtx, baseName.c_str(), "", baseFlags, baseFlags, bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.random
+	{
+		const deUint32	allLayouts		= FEATURE_STD140_LAYOUT;
+		const deUint32	allBasicTypes	= FEATURE_VECTORS|FEATURE_MATRICES;
+		const deUint32	unused			= FEATURE_UNUSED_MEMBERS|FEATURE_UNUSED_VARS;
+		const deUint32	unsized			= FEATURE_UNSIZED_ARRAYS;
+		const deUint32	matFlags		= FEATURE_MATRIX_LAYOUT;
+
+		tcu::TestCaseGroup* randomGroup = new tcu::TestCaseGroup(m_testCtx, "random", "Random Uniform Block cases");
+		addChild(randomGroup);
+
+		// Basic types.
+		createRandomCaseGroup(randomGroup, m_testCtx, "scalar_types",		"Scalar types only, per-block buffers",				SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused,																			25, 0);
+		createRandomCaseGroup(randomGroup, m_testCtx, "vector_types",		"Scalar and vector types only, per-block buffers",	SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|FEATURE_VECTORS,															25, 25);
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_types",		"All basic types, per-block buffers",				SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags,													25, 50);
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_arrays",		"Arrays, per-block buffers",						SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|FEATURE_ARRAYS,									25, 50);
+		createRandomCaseGroup(randomGroup, m_testCtx, "unsized_arrays",		"Unsized arrays, per-block buffers",				SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_ARRAYS,							25, 50);
+		createRandomCaseGroup(randomGroup, m_testCtx, "arrays_of_arrays",	"Arrays of arrays, per-block buffers",				SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_ARRAYS|FEATURE_ARRAYS_OF_ARRAYS,	25, 950);
+
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_instance_arrays",					"Basic instance arrays, per-block buffers",				SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_INSTANCE_ARRAYS,															25, 75);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs",							"Nested structs, per-block buffers",					SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_STRUCTS,																	25, 100);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_arrays",					"Nested structs, arrays, per-block buffers",			SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_STRUCTS|FEATURE_ARRAYS|FEATURE_ARRAYS_OF_ARRAYS,							25, 150);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_instance_arrays",			"Nested structs, instance arrays, per-block buffers",	SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_STRUCTS|FEATURE_INSTANCE_ARRAYS,											25, 125);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_arrays_instance_arrays",	"Nested structs, instance arrays, per-block buffers",	SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	allLayouts|unused|allBasicTypes|matFlags|unsized|FEATURE_STRUCTS|FEATURE_ARRAYS|FEATURE_ARRAYS_OF_ARRAYS|FEATURE_INSTANCE_ARRAYS,	25, 175);
+
+		createRandomCaseGroup(randomGroup, m_testCtx, "all_per_block_buffers",	"All random features, per-block buffers",	SSBOLayoutCase::BUFFERMODE_PER_BLOCK,	~0u,	50, 200);
+		createRandomCaseGroup(randomGroup, m_testCtx, "all_shared_buffer",		"All random features, shared buffer",		SSBOLayoutCase::BUFFERMODE_SINGLE,		~0u,	50, 250);
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup> ssboTestGroup (new tcu::TestCaseGroup(testCtx, "ssbo", "Shader Storage Buffer Object Tests"));
+
+	ssboTestGroup->addChild(new SSBOLayoutTests(testCtx));
+
+	return ssboTestGroup.release();
+}
+
+} // ssbo
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.hpp b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.hpp
new file mode 100644
index 0000000..fc6d701
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ssbo/vktSSBOLayoutTests.hpp
@@ -0,0 +1,50 @@
+#ifndef _VKTSSBOLAYOUTTESTS_HPP
+#define _VKTSSBOLAYOUTTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief SSBO layout tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace ssbo
+{
+
+tcu::TestCaseGroup*		createTests		(tcu::TestContext& testCtx);
+
+} // ssbo
+} // vkt
+
+#endif // _VKTSSBOLAYOUTTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/ubo/CMakeLists.txt b/external/vulkancts/modules/vulkan/ubo/CMakeLists.txt
new file mode 100644
index 0000000..7976f6d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/CMakeLists.txt
@@ -0,0 +1,21 @@
+include_directories(
+	..
+)
+
+set(DEQP_VK_UBO_SRCS
+	vktUniformBlockCase.cpp
+	vktUniformBlockCase.hpp
+	vktRandomUniformBlockCase.cpp
+	vktRandomUniformBlockCase.hpp
+	vktUniformBlockTests.cpp
+	vktUniformBlockTests.hpp
+)
+
+set(DEQP_VK_UBO_LIBS
+	deqp-vk-common
+	tcutil
+	vkutil
+)
+
+add_library(deqp-vk-ubo STATIC ${DEQP_VK_UBO_SRCS})
+target_link_libraries(deqp-vk-ubo ${DEQP_VK_UBO_LIBS})
diff --git a/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.cpp b/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.cpp
new file mode 100644
index 0000000..ca61ca7
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.cpp
@@ -0,0 +1,251 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Random uniform block layout case.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktRandomUniformBlockCase.hpp"
+#include "deRandom.hpp"
+
+namespace vkt
+{
+namespace ubo
+{
+
+namespace
+{
+
+static std::string genName (char first, char last, int ndx)
+{
+	std::string	str			= "";
+	int			alphabetLen	= last - first + 1;
+
+	while (ndx > alphabetLen)
+	{
+		str.insert(str.begin(), (char)(first + ((ndx - 1) % alphabetLen)));
+		ndx = (ndx - 1) / alphabetLen;
+	}
+
+	str.insert(str.begin(), (char)(first + (ndx % (alphabetLen + 1)) - 1));
+
+	return str;
+}
+
+} // anonymous
+
+RandomUniformBlockCase::RandomUniformBlockCase (tcu::TestContext&	testCtx,
+												const std::string&	name,
+												const std::string&	description,
+												BufferMode			bufferMode,
+												deUint32			features,
+												deUint32			seed)
+	: UniformBlockCase		(testCtx, name, description, bufferMode)
+	, m_features			(features)
+	, m_maxVertexBlocks		((features & FEATURE_VERTEX_BLOCKS)		? 4 : 0)
+	, m_maxFragmentBlocks	((features & FEATURE_FRAGMENT_BLOCKS)	? 4 : 0)
+	, m_maxSharedBlocks		((features & FEATURE_SHARED_BLOCKS)		? 4 : 0)
+	, m_maxInstances		((features & FEATURE_INSTANCE_ARRAYS)	? 3 : 0)
+	, m_maxArrayLength		((features & FEATURE_ARRAYS)			? 8 : 0)
+	, m_maxStructDepth		((features & FEATURE_STRUCTS)			? 2 : 0)
+	, m_maxBlockMembers		(5)
+	, m_maxStructMembers	(4)
+	, m_seed				(seed)
+	, m_blockNdx			(1)
+	, m_uniformNdx			(1)
+	, m_structNdx			(1)
+{
+	de::Random rnd(m_seed);
+
+	int numShared		= m_maxSharedBlocks				> 0	? rnd.getInt(1, m_maxSharedBlocks)				: 0;
+	int numVtxBlocks	= m_maxVertexBlocks-numShared	> 0	? rnd.getInt(1, m_maxVertexBlocks - numShared)	: 0;
+	int	numFragBlocks	= m_maxFragmentBlocks-numShared	> 0 ? rnd.getInt(1, m_maxFragmentBlocks - numShared): 0;
+
+	for (int ndx = 0; ndx < numShared; ndx++)
+		generateBlock(rnd, DECLARE_VERTEX | DECLARE_FRAGMENT);
+
+	for (int ndx = 0; ndx < numVtxBlocks; ndx++)
+		generateBlock(rnd, DECLARE_VERTEX);
+
+	for (int ndx = 0; ndx < numFragBlocks; ndx++)
+		generateBlock(rnd, DECLARE_FRAGMENT);
+
+	init();
+}
+
+void RandomUniformBlockCase::generateBlock (de::Random& rnd, deUint32 layoutFlags)
+{
+	DE_ASSERT(m_blockNdx <= 'z' - 'a');
+
+	const float		instanceArrayWeight	= 0.3f;
+	UniformBlock&	block				= m_interface.allocBlock(std::string("Block") + (char)('A' + m_blockNdx));
+	int				numInstances		= (m_maxInstances > 0 && rnd.getFloat() < instanceArrayWeight) ? rnd.getInt(0, m_maxInstances) : 0;
+	int				numUniforms			= rnd.getInt(1, m_maxBlockMembers);
+
+	if (numInstances > 0)
+		block.setArraySize(numInstances);
+
+	if (numInstances > 0 || rnd.getBool())
+		block.setInstanceName(std::string("block") + (char)('A' + m_blockNdx));
+
+	// Layout flag candidates.
+	std::vector<deUint32> layoutFlagCandidates;
+	layoutFlagCandidates.push_back(0);
+
+	if (m_features & FEATURE_STD140_LAYOUT)
+		layoutFlagCandidates.push_back(LAYOUT_STD140);
+
+	layoutFlags |= rnd.choose<deUint32>(layoutFlagCandidates.begin(), layoutFlagCandidates.end());
+
+	if (m_features & FEATURE_MATRIX_LAYOUT)
+	{
+		static const deUint32 matrixCandidates[] = { 0, LAYOUT_ROW_MAJOR, LAYOUT_COLUMN_MAJOR };
+		layoutFlags |= rnd.choose<deUint32>(&matrixCandidates[0], &matrixCandidates[DE_LENGTH_OF_ARRAY(matrixCandidates)]);
+	}
+
+	block.setFlags(layoutFlags);
+
+	for (int ndx = 0; ndx < numUniforms; ndx++)
+		generateUniform(rnd, block);
+
+	m_blockNdx += 1;
+}
+
+void RandomUniformBlockCase::generateUniform (de::Random& rnd, UniformBlock& block)
+{
+	const float		unusedVtxWeight		= 0.15f;
+	const float		unusedFragWeight	= 0.15f;
+	bool			unusedOk			= (m_features & FEATURE_UNUSED_UNIFORMS) != 0;
+	deUint32		flags				= 0;
+	std::string		name				= genName('a', 'z', m_uniformNdx);
+	VarType			type				= generateType(rnd, 0, true);
+
+	flags |= (unusedOk && rnd.getFloat() < unusedVtxWeight)		? UNUSED_VERTEX		: 0;
+	flags |= (unusedOk && rnd.getFloat() < unusedFragWeight)	? UNUSED_FRAGMENT	: 0;
+
+	block.addUniform(Uniform(name, type, flags));
+
+	m_uniformNdx += 1;
+}
+
+VarType RandomUniformBlockCase::generateType (de::Random& rnd, int typeDepth, bool arrayOk)
+{
+	const float structWeight	= 0.1f;
+	const float arrayWeight		= 0.1f;
+
+	if (typeDepth < m_maxStructDepth && rnd.getFloat() < structWeight)
+	{
+		const float				unusedVtxWeight		= 0.15f;
+		const float				unusedFragWeight	= 0.15f;
+		bool					unusedOk			= (m_features & FEATURE_UNUSED_MEMBERS) != 0;
+		std::vector<VarType>	memberTypes;
+		int						numMembers = rnd.getInt(1, m_maxStructMembers);
+
+		// Generate members first so nested struct declarations are in correct order.
+		for (int ndx = 0; ndx < numMembers; ndx++)
+			memberTypes.push_back(generateType(rnd, typeDepth+1, true));
+
+		StructType& structType = m_interface.allocStruct(std::string("s") + genName('A', 'Z', m_structNdx));
+		m_structNdx += 1;
+
+		DE_ASSERT(numMembers <= 'Z' - 'A');
+		for (int ndx = 0; ndx < numMembers; ndx++)
+		{
+			deUint32 flags = 0;
+
+			flags |= (unusedOk && rnd.getFloat() < unusedVtxWeight)		? UNUSED_VERTEX		: 0;
+			flags |= (unusedOk && rnd.getFloat() < unusedFragWeight)	? UNUSED_FRAGMENT	: 0;
+
+			structType.addMember(std::string("m") + (char)('A' + ndx), memberTypes[ndx], flags);
+		}
+
+		return VarType(&structType);
+	}
+	else if (m_maxArrayLength > 0 && arrayOk && rnd.getFloat() < arrayWeight)
+	{
+		const bool	arraysOfArraysOk	= (m_features & FEATURE_ARRAYS_OF_ARRAYS) != 0;
+		const int	arrayLength			= rnd.getInt(1, m_maxArrayLength);
+		VarType		elementType			= generateType(rnd, typeDepth, arraysOfArraysOk);
+		return VarType(elementType, arrayLength);
+	}
+	else
+	{
+		std::vector<glu::DataType> typeCandidates;
+
+		typeCandidates.push_back(glu::TYPE_FLOAT);
+		typeCandidates.push_back(glu::TYPE_INT);
+		typeCandidates.push_back(glu::TYPE_UINT);
+		typeCandidates.push_back(glu::TYPE_BOOL);
+
+		if (m_features & FEATURE_VECTORS)
+		{
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_VEC4);
+			typeCandidates.push_back(glu::TYPE_INT_VEC2);
+			typeCandidates.push_back(glu::TYPE_INT_VEC3);
+			typeCandidates.push_back(glu::TYPE_INT_VEC4);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC2);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC3);
+			typeCandidates.push_back(glu::TYPE_UINT_VEC4);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC2);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC3);
+			typeCandidates.push_back(glu::TYPE_BOOL_VEC4);
+		}
+
+		if (m_features & FEATURE_MATRICES)
+		{
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT2X3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3X2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT3X4);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4X2);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4X3);
+			typeCandidates.push_back(glu::TYPE_FLOAT_MAT4);
+		}
+
+		glu::DataType	type	= rnd.choose<glu::DataType>(typeCandidates.begin(), typeCandidates.end());
+		deUint32		flags	= 0;
+
+		if (!glu::isDataTypeBoolOrBVec(type))
+		{
+			// Precision.
+			static const deUint32 precisionCandidates[] = { PRECISION_LOW, PRECISION_MEDIUM, PRECISION_HIGH };
+			flags |= rnd.choose<deUint32>(&precisionCandidates[0], &precisionCandidates[DE_LENGTH_OF_ARRAY(precisionCandidates)]);
+		}
+
+		return VarType(type, flags);
+	}
+}
+
+} // ubo
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.hpp b/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.hpp
new file mode 100644
index 0000000..bc424b4
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktRandomUniformBlockCase.hpp
@@ -0,0 +1,104 @@
+#ifndef _VKTRANDOMUNIFORMBLOCKCASE_HPP
+#define _VKTRANDOMUNIFORMBLOCKCASE_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Random uniform block layout case.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktUniformBlockCase.hpp"
+
+namespace de
+{
+class Random;
+} // de
+
+namespace vkt
+{
+namespace ubo
+{
+
+enum FeatureBits
+{
+	FEATURE_VECTORS				= (1<<0),
+	FEATURE_MATRICES			= (1<<1),
+	FEATURE_ARRAYS				= (1<<2),
+	FEATURE_STRUCTS				= (1<<3),
+	FEATURE_NESTED_STRUCTS		= (1<<4),
+	FEATURE_INSTANCE_ARRAYS		= (1<<5),
+	FEATURE_VERTEX_BLOCKS		= (1<<6),
+	FEATURE_FRAGMENT_BLOCKS		= (1<<7),
+	FEATURE_SHARED_BLOCKS		= (1<<8),
+	FEATURE_UNUSED_UNIFORMS		= (1<<9),
+	FEATURE_UNUSED_MEMBERS		= (1<<10),
+	FEATURE_PACKED_LAYOUT		= (1<<12),
+	FEATURE_SHARED_LAYOUT		= (1<<13),
+	FEATURE_STD140_LAYOUT		= (1<<14),
+	FEATURE_MATRIX_LAYOUT		= (1<<15),	//!< Matrix layout flags.
+	FEATURE_ARRAYS_OF_ARRAYS	= (1<<16)
+};
+
+class RandomUniformBlockCase : public UniformBlockCase
+{
+public:
+							RandomUniformBlockCase		(tcu::TestContext&		testCtx,
+														 const std::string&		name,
+														 const std::string&		description,
+														 BufferMode				bufferMode,
+														 deUint32				features,
+														 deUint32				seed);
+
+private:
+	void					generateBlock				(de::Random& rnd, deUint32 layoutFlags);
+	void					generateUniform				(de::Random& rnd, UniformBlock& block);
+	VarType					generateType				(de::Random& rnd, int typeDepth, bool arrayOk);
+
+	const deUint32			m_features;
+	const int				m_maxVertexBlocks;
+	const int				m_maxFragmentBlocks;
+	const int				m_maxSharedBlocks;
+	const int				m_maxInstances;
+	const int				m_maxArrayLength;
+	const int				m_maxStructDepth;
+	const int				m_maxBlockMembers;
+	const int				m_maxStructMembers;
+	const deUint32			m_seed;
+
+	int						m_blockNdx;
+	int						m_uniformNdx;
+	int						m_structNdx;
+};
+
+} // ubo
+} // vkt
+
+#endif // _VKTRANDOMUNIFORMBLOCKCASE_HPP
diff --git a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp
new file mode 100644
index 0000000..889d353
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.cpp
@@ -0,0 +1,2023 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Uniform block case.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktUniformBlockCase.hpp"
+
+#include "vkPrograms.hpp"
+
+#include "gluVarType.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuSurface.hpp"
+#include "deRandom.hpp"
+#include "deStringUtil.hpp"
+
+#include "tcuTextureUtil.hpp"
+#include "deSharedPtr.hpp"
+
+#include "vkMemUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkTypeUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkBuilderUtil.hpp"
+
+#include <map>
+#include <set>
+
+namespace vkt
+{
+namespace ubo
+{
+
+using namespace vk;
+
+// VarType implementation.
+
+VarType::VarType (void)
+	: m_type	(TYPE_LAST)
+	, m_flags	(0)
+{
+}
+
+VarType::VarType (const VarType& other)
+	: m_type	(TYPE_LAST)
+	, m_flags	(0)
+{
+	*this = other;
+}
+
+VarType::VarType (glu::DataType basicType, deUint32 flags)
+	: m_type	(TYPE_BASIC)
+	, m_flags	(flags)
+{
+	m_data.basicType = basicType;
+}
+
+VarType::VarType (const VarType& elementType, int arraySize)
+	: m_type	(TYPE_ARRAY)
+	, m_flags	(0)
+{
+	m_data.array.size			= arraySize;
+	m_data.array.elementType	= new VarType(elementType);
+}
+
+VarType::VarType (const StructType* structPtr)
+	: m_type	(TYPE_STRUCT)
+	, m_flags	(0)
+{
+	m_data.structPtr = structPtr;
+}
+
+VarType::~VarType (void)
+{
+	if (m_type == TYPE_ARRAY)
+		delete m_data.array.elementType;
+}
+
+VarType& VarType::operator= (const VarType& other)
+{
+	if (this == &other)
+		return *this; // Self-assignment.
+
+	if (m_type == TYPE_ARRAY)
+		delete m_data.array.elementType;
+
+	m_type	= other.m_type;
+	m_flags	= other.m_flags;
+	m_data	= Data();
+
+	if (m_type == TYPE_ARRAY)
+	{
+		m_data.array.elementType	= new VarType(*other.m_data.array.elementType);
+		m_data.array.size			= other.m_data.array.size;
+	}
+	else
+		m_data = other.m_data;
+
+	return *this;
+}
+
+// StructType implementation.
+
+void StructType::addMember (const std::string& name, const VarType& type, deUint32 flags)
+{
+	m_members.push_back(StructMember(name, type, flags));
+}
+
+// Uniform implementation.
+
+Uniform::Uniform (const std::string& name, const VarType& type, deUint32 flags)
+	: m_name	(name)
+	, m_type	(type)
+	, m_flags	(flags)
+{
+}
+
+// UniformBlock implementation.
+
+UniformBlock::UniformBlock (const std::string& blockName)
+	: m_blockName	(blockName)
+	, m_arraySize	(0)
+	, m_flags		(0)
+{
+}
+
+std::ostream& operator<< (std::ostream& stream, const BlockLayoutEntry& entry)
+{
+	stream << entry.name << " { name = " << entry.name
+		   << ", size = " << entry.size
+		   << ", activeUniformIndices = [";
+
+	for (std::vector<int>::const_iterator i = entry.activeUniformIndices.begin(); i != entry.activeUniformIndices.end(); i++)
+	{
+		if (i != entry.activeUniformIndices.begin())
+			stream << ", ";
+		stream << *i;
+	}
+
+	stream << "] }";
+	return stream;
+}
+
+std::ostream& operator<< (std::ostream& stream, const UniformLayoutEntry& entry)
+{
+	stream << entry.name << " { type = " << glu::getDataTypeName(entry.type)
+		   << ", size = " << entry.size
+		   << ", blockNdx = " << entry.blockNdx
+		   << ", offset = " << entry.offset
+		   << ", arrayStride = " << entry.arrayStride
+		   << ", matrixStride = " << entry.matrixStride
+		   << ", isRowMajor = " << (entry.isRowMajor ? "true" : "false")
+		   << " }";
+	return stream;
+}
+
+int UniformLayout::getUniformIndex (const std::string& name) const
+{
+	for (int ndx = 0; ndx < (int)uniforms.size(); ndx++)
+	{
+		if (uniforms[ndx].name == name)
+			return ndx;
+	}
+
+	return -1;
+}
+
+int UniformLayout::getBlockIndex (const std::string& name) const
+{
+	for (int ndx = 0; ndx < (int)blocks.size(); ndx++)
+	{
+		if (blocks[ndx].name == name)
+			return ndx;
+	}
+
+	return -1;
+}
+
+// ShaderInterface implementation.
+
+ShaderInterface::ShaderInterface (void)
+{
+}
+
+ShaderInterface::~ShaderInterface (void)
+{
+}
+
+StructType& ShaderInterface::allocStruct (const std::string& name)
+{
+	m_structs.push_back(StructTypeSP(new StructType(name)));
+	return *m_structs.back();
+}
+
+struct StructNameEquals
+{
+	std::string name;
+
+	StructNameEquals (const std::string& name_) : name(name_) {}
+
+	bool operator() (const StructTypeSP type) const
+	{
+		return type->hasTypeName() && name == type->getTypeName();
+	}
+};
+
+void ShaderInterface::getNamedStructs (std::vector<const StructType*>& structs) const
+{
+	for (std::vector<StructTypeSP>::const_iterator i = m_structs.begin(); i != m_structs.end(); i++)
+	{
+		if ((*i)->hasTypeName())
+			structs.push_back((*i).get());
+	}
+}
+
+UniformBlock& ShaderInterface::allocBlock (const std::string& name)
+{
+	m_uniformBlocks.push_back(UniformBlockSP(new UniformBlock(name)));
+	return *m_uniformBlocks.back();
+}
+
+namespace // Utilities
+{
+
+struct PrecisionFlagsFmt
+{
+	deUint32 flags;
+	PrecisionFlagsFmt (deUint32 flags_) : flags(flags_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const PrecisionFlagsFmt& fmt)
+{
+	// Precision.
+	DE_ASSERT(dePop32(fmt.flags & (PRECISION_LOW|PRECISION_MEDIUM|PRECISION_HIGH)) <= 1);
+	str << (fmt.flags & PRECISION_LOW		? "lowp"	:
+			fmt.flags & PRECISION_MEDIUM	? "mediump"	:
+			fmt.flags & PRECISION_HIGH		? "highp"	: "");
+	return str;
+}
+
+struct LayoutFlagsFmt
+{
+	deUint32 flags;
+	LayoutFlagsFmt (deUint32 flags_) : flags(flags_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const LayoutFlagsFmt& fmt)
+{
+	static const struct
+	{
+		deUint32	bit;
+		const char*	token;
+	} bitDesc[] =
+	{
+		{ LAYOUT_STD140,		"std140"		},
+		{ LAYOUT_ROW_MAJOR,		"row_major"		},
+		{ LAYOUT_COLUMN_MAJOR,	"column_major"	}
+	};
+
+	deUint32 remBits = fmt.flags;
+	for (int descNdx = 0; descNdx < DE_LENGTH_OF_ARRAY(bitDesc); descNdx++)
+	{
+		if (remBits & bitDesc[descNdx].bit)
+		{
+			if (remBits != fmt.flags)
+				str << ", ";
+			str << bitDesc[descNdx].token;
+			remBits &= ~bitDesc[descNdx].bit;
+		}
+	}
+	DE_ASSERT(remBits == 0);
+	return str;
+}
+
+// Layout computation.
+
+int getDataTypeByteSize (glu::DataType type)
+{
+	return glu::getDataTypeScalarSize(type)*(int)sizeof(deUint32);
+}
+
+int getDataTypeByteAlignment (glu::DataType type)
+{
+	switch (type)
+	{
+		case glu::TYPE_FLOAT:
+		case glu::TYPE_INT:
+		case glu::TYPE_UINT:
+		case glu::TYPE_BOOL:		return 1*(int)sizeof(deUint32);
+
+		case glu::TYPE_FLOAT_VEC2:
+		case glu::TYPE_INT_VEC2:
+		case glu::TYPE_UINT_VEC2:
+		case glu::TYPE_BOOL_VEC2:	return 2*(int)sizeof(deUint32);
+
+		case glu::TYPE_FLOAT_VEC3:
+		case glu::TYPE_INT_VEC3:
+		case glu::TYPE_UINT_VEC3:
+		case glu::TYPE_BOOL_VEC3:	// Fall-through to vec4
+
+		case glu::TYPE_FLOAT_VEC4:
+		case glu::TYPE_INT_VEC4:
+		case glu::TYPE_UINT_VEC4:
+		case glu::TYPE_BOOL_VEC4:	return 4*(int)sizeof(deUint32);
+
+		default:
+			DE_ASSERT(false);
+			return 0;
+	}
+}
+
+deInt32 getminUniformBufferOffsetAlignment (Context &ctx)
+{
+	VkPhysicalDeviceProperties properties;
+	ctx.getInstanceInterface().getPhysicalDeviceProperties(ctx.getPhysicalDevice(), &properties);
+	VkDeviceSize align = properties.limits.minUniformBufferOffsetAlignment;
+	DE_ASSERT(align == (VkDeviceSize)(deInt32)align);
+	return (deInt32)align;
+}
+
+int getDataTypeArrayStride (glu::DataType type)
+{
+	DE_ASSERT(!glu::isDataTypeMatrix(type));
+
+	const int baseStride	= getDataTypeByteSize(type);
+	const int vec4Alignment	= (int)sizeof(deUint32)*4;
+
+	DE_ASSERT(baseStride <= vec4Alignment);
+	return de::max(baseStride, vec4Alignment); // Really? See rule 4.
+}
+
+static inline int deRoundUp32 (int a, int b)
+{
+	int d = a/b;
+	return d*b == a ? a : (d+1)*b;
+}
+
+int computeStd140BaseAlignment (const VarType& type)
+{
+	const int vec4Alignment = (int)sizeof(deUint32)*4;
+
+	if (type.isBasicType())
+	{
+		glu::DataType basicType = type.getBasicType();
+
+		if (glu::isDataTypeMatrix(basicType))
+		{
+			bool	isRowMajor	= !!(type.getFlags() & LAYOUT_ROW_MAJOR);
+			int		vecSize		= isRowMajor ? glu::getDataTypeMatrixNumColumns(basicType)
+											 : glu::getDataTypeMatrixNumRows(basicType);
+
+			return getDataTypeArrayStride(glu::getDataTypeFloatVec(vecSize));
+		}
+		else
+			return getDataTypeByteAlignment(basicType);
+	}
+	else if (type.isArrayType())
+	{
+		int elemAlignment = computeStd140BaseAlignment(type.getElementType());
+
+		// Round up to alignment of vec4
+		return deRoundUp32(elemAlignment, vec4Alignment);
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		int maxBaseAlignment = 0;
+
+		for (StructType::ConstIterator memberIter = type.getStruct().begin(); memberIter != type.getStruct().end(); memberIter++)
+			maxBaseAlignment = de::max(maxBaseAlignment, computeStd140BaseAlignment(memberIter->getType()));
+
+		return deRoundUp32(maxBaseAlignment, vec4Alignment);
+	}
+}
+
+inline deUint32 mergeLayoutFlags (deUint32 prevFlags, deUint32 newFlags)
+{
+	const deUint32	packingMask		= LAYOUT_STD140;
+	const deUint32	matrixMask		= LAYOUT_ROW_MAJOR|LAYOUT_COLUMN_MAJOR;
+
+	deUint32 mergedFlags = 0;
+
+	mergedFlags |= ((newFlags & packingMask)	? newFlags : prevFlags) & packingMask;
+	mergedFlags |= ((newFlags & matrixMask)		? newFlags : prevFlags) & matrixMask;
+
+	return mergedFlags;
+}
+
+void computeStd140Layout (UniformLayout& layout, int& curOffset, int curBlockNdx, const std::string& curPrefix, const VarType& type, deUint32 layoutFlags)
+{
+	int baseAlignment = computeStd140BaseAlignment(type);
+
+	curOffset = deAlign32(curOffset, baseAlignment);
+
+	if (type.isBasicType())
+	{
+		glu::DataType		basicType	= type.getBasicType();
+		UniformLayoutEntry	entry;
+
+		entry.name			= curPrefix;
+		entry.type			= basicType;
+		entry.size			= 1;
+		entry.arrayStride	= 0;
+		entry.matrixStride	= 0;
+		entry.blockNdx		= curBlockNdx;
+
+		if (glu::isDataTypeMatrix(basicType))
+		{
+			// Array of vectors as specified in rules 5 & 7.
+			bool	isRowMajor	= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			int		vecSize		= isRowMajor ? glu::getDataTypeMatrixNumColumns(basicType)
+											 : glu::getDataTypeMatrixNumRows(basicType);
+			int		numVecs		= isRowMajor ? glu::getDataTypeMatrixNumRows(basicType)
+											 : glu::getDataTypeMatrixNumColumns(basicType);
+			int		stride		= getDataTypeArrayStride(glu::getDataTypeFloatVec(vecSize));
+
+			entry.offset		= curOffset;
+			entry.matrixStride	= stride;
+			entry.isRowMajor	= isRowMajor;
+
+			curOffset += numVecs*stride;
+		}
+		else
+		{
+			// Scalar or vector.
+			entry.offset = curOffset;
+
+			curOffset += getDataTypeByteSize(basicType);
+		}
+
+		layout.uniforms.push_back(entry);
+	}
+	else if (type.isArrayType())
+	{
+		const VarType&	elemType	= type.getElementType();
+
+		if (elemType.isBasicType() && !glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of scalars or vectors.
+			glu::DataType		elemBasicType	= elemType.getBasicType();
+			UniformLayoutEntry	entry;
+			int					stride			= getDataTypeArrayStride(elemBasicType);
+
+			entry.name			= curPrefix + "[0]"; // Array uniforms are always postfixed with [0]
+			entry.type			= elemBasicType;
+			entry.blockNdx		= curBlockNdx;
+			entry.offset		= curOffset;
+			entry.size			= type.getArraySize();
+			entry.arrayStride	= stride;
+			entry.matrixStride	= 0;
+
+			curOffset += stride*type.getArraySize();
+
+			layout.uniforms.push_back(entry);
+		}
+		else if (elemType.isBasicType() && glu::isDataTypeMatrix(elemType.getBasicType()))
+		{
+			// Array of matrices.
+			glu::DataType		elemBasicType	= elemType.getBasicType();
+			bool				isRowMajor		= !!(layoutFlags & LAYOUT_ROW_MAJOR);
+			int					vecSize			= isRowMajor ? glu::getDataTypeMatrixNumColumns(elemBasicType)
+															 : glu::getDataTypeMatrixNumRows(elemBasicType);
+			int					numVecs			= isRowMajor ? glu::getDataTypeMatrixNumRows(elemBasicType)
+															 : glu::getDataTypeMatrixNumColumns(elemBasicType);
+			int					stride			= getDataTypeArrayStride(glu::getDataTypeFloatVec(vecSize));
+			UniformLayoutEntry	entry;
+
+			entry.name			= curPrefix + "[0]"; // Array uniforms are always postfixed with [0]
+			entry.type			= elemBasicType;
+			entry.blockNdx		= curBlockNdx;
+			entry.offset		= curOffset;
+			entry.size			= type.getArraySize();
+			entry.arrayStride	= stride*numVecs;
+			entry.matrixStride	= stride;
+			entry.isRowMajor	= isRowMajor;
+
+			curOffset += numVecs*type.getArraySize()*stride;
+
+			layout.uniforms.push_back(entry);
+		}
+		else
+		{
+			DE_ASSERT(elemType.isStructType() || elemType.isArrayType());
+
+			for (int elemNdx = 0; elemNdx < type.getArraySize(); elemNdx++)
+				computeStd140Layout(layout, curOffset, curBlockNdx, curPrefix + "[" + de::toString(elemNdx) + "]", type.getElementType(), layoutFlags);
+		}
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		for (StructType::ConstIterator memberIter = type.getStruct().begin(); memberIter != type.getStruct().end(); memberIter++)
+			computeStd140Layout(layout, curOffset, curBlockNdx, curPrefix + "." + memberIter->getName(), memberIter->getType(), layoutFlags);
+
+		curOffset = deAlign32(curOffset, baseAlignment);
+	}
+}
+
+void computeStd140Layout (UniformLayout& layout, const ShaderInterface& interface)
+{
+	int numUniformBlocks = interface.getNumUniformBlocks();
+
+	for (int blockNdx = 0; blockNdx < numUniformBlocks; blockNdx++)
+	{
+		const UniformBlock&	block			= interface.getUniformBlock(blockNdx);
+		bool				hasInstanceName	= block.hasInstanceName();
+		std::string			blockPrefix		= hasInstanceName ? (block.getBlockName() + ".") : "";
+		int					curOffset		= 0;
+		int					activeBlockNdx	= (int)layout.blocks.size();
+		int					firstUniformNdx	= (int)layout.uniforms.size();
+
+		for (UniformBlock::ConstIterator uniformIter = block.begin(); uniformIter != block.end(); uniformIter++)
+		{
+			const Uniform& uniform = *uniformIter;
+			computeStd140Layout(layout, curOffset, activeBlockNdx, blockPrefix + uniform.getName(), uniform.getType(), mergeLayoutFlags(block.getFlags(), uniform.getFlags()));
+		}
+
+		int	uniformIndicesEnd	= (int)layout.uniforms.size();
+		int	blockSize			= curOffset;
+		int	numInstances		= block.isArray() ? block.getArraySize() : 1;
+
+		// Create block layout entries for each instance.
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			// Allocate entry for instance.
+			layout.blocks.push_back(BlockLayoutEntry());
+			BlockLayoutEntry& blockEntry = layout.blocks.back();
+
+			blockEntry.name = block.getBlockName();
+			blockEntry.size = blockSize;
+			blockEntry.bindingNdx = blockNdx;
+			blockEntry.instanceNdx = instanceNdx;
+
+			// Compute active uniform set for block.
+			for (int uniformNdx = firstUniformNdx; uniformNdx < uniformIndicesEnd; uniformNdx++)
+				blockEntry.activeUniformIndices.push_back(uniformNdx);
+
+			if (block.isArray())
+				blockEntry.name += "[" + de::toString(instanceNdx) + "]";
+		}
+	}
+}
+
+// Value generator.
+
+void generateValue (const UniformLayoutEntry& entry, void* basePtr, de::Random& rnd)
+{
+	glu::DataType	scalarType		= glu::getDataTypeScalarType(entry.type);
+	int				scalarSize		= glu::getDataTypeScalarSize(entry.type);
+	bool			isMatrix		= glu::isDataTypeMatrix(entry.type);
+	int				numVecs			= isMatrix ? (entry.isRowMajor ? glu::getDataTypeMatrixNumRows(entry.type) : glu::getDataTypeMatrixNumColumns(entry.type)) : 1;
+	int				vecSize			= scalarSize / numVecs;
+	bool			isArray			= entry.size > 1;
+	const int		compSize		= sizeof(deUint32);
+
+	DE_ASSERT(scalarSize%numVecs == 0);
+
+	for (int elemNdx = 0; elemNdx < entry.size; elemNdx++)
+	{
+		deUint8* elemPtr = (deUint8*)basePtr + entry.offset + (isArray ? elemNdx*entry.arrayStride : 0);
+
+		for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+		{
+			deUint8* vecPtr = elemPtr + (isMatrix ? vecNdx*entry.matrixStride : 0);
+
+			for (int compNdx = 0; compNdx < vecSize; compNdx++)
+			{
+				deUint8* compPtr = vecPtr + compSize*compNdx;
+
+				switch (scalarType)
+				{
+					case glu::TYPE_FLOAT:	*((float*)compPtr)		= (float)rnd.getInt(-9, 9);						break;
+					case glu::TYPE_INT:		*((int*)compPtr)		= rnd.getInt(-9, 9);							break;
+					case glu::TYPE_UINT:	*((deUint32*)compPtr)	= (deUint32)rnd.getInt(0, 9);					break;
+					// \note Random bit pattern is used for true values. Spec states that all non-zero values are
+					//       interpreted as true but some implementations fail this.
+					case glu::TYPE_BOOL:	*((deUint32*)compPtr)	= rnd.getBool() ? rnd.getUint32()|1u : 0u;		break;
+					default:
+						DE_ASSERT(false);
+				}
+			}
+		}
+	}
+}
+
+void generateValues (const UniformLayout& layout, const std::map<int, void*>& blockPointers, deUint32 seed)
+{
+	de::Random	rnd			(seed);
+	int			numBlocks	= (int)layout.blocks.size();
+
+	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		void*	basePtr		= blockPointers.find(blockNdx)->second;
+		int		numEntries	= (int)layout.blocks[blockNdx].activeUniformIndices.size();
+
+		for (int entryNdx = 0; entryNdx < numEntries; entryNdx++)
+		{
+			const UniformLayoutEntry& entry = layout.uniforms[layout.blocks[blockNdx].activeUniformIndices[entryNdx]];
+			generateValue(entry, basePtr, rnd);
+		}
+	}
+}
+
+// Shader generator.
+
+const char* getCompareFuncForType (glu::DataType type)
+{
+	switch (type)
+	{
+		case glu::TYPE_FLOAT:			return "mediump float compare_float    (highp float a, highp float b)  { return abs(a - b) < 0.05 ? 1.0 : 0.0; }\n";
+		case glu::TYPE_FLOAT_VEC2:		return "mediump float compare_vec2     (highp vec2 a, highp vec2 b)    { return compare_float(a.x, b.x)*compare_float(a.y, b.y); }\n";
+		case glu::TYPE_FLOAT_VEC3:		return "mediump float compare_vec3     (highp vec3 a, highp vec3 b)    { return compare_float(a.x, b.x)*compare_float(a.y, b.y)*compare_float(a.z, b.z); }\n";
+		case glu::TYPE_FLOAT_VEC4:		return "mediump float compare_vec4     (highp vec4 a, highp vec4 b)    { return compare_float(a.x, b.x)*compare_float(a.y, b.y)*compare_float(a.z, b.z)*compare_float(a.w, b.w); }\n";
+		case glu::TYPE_FLOAT_MAT2:		return "mediump float compare_mat2     (highp mat2 a, highp mat2 b)    { return compare_vec2(a[0], b[0])*compare_vec2(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT2X3:	return "mediump float compare_mat2x3   (highp mat2x3 a, highp mat2x3 b){ return compare_vec3(a[0], b[0])*compare_vec3(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT2X4:	return "mediump float compare_mat2x4   (highp mat2x4 a, highp mat2x4 b){ return compare_vec4(a[0], b[0])*compare_vec4(a[1], b[1]); }\n";
+		case glu::TYPE_FLOAT_MAT3X2:	return "mediump float compare_mat3x2   (highp mat3x2 a, highp mat3x2 b){ return compare_vec2(a[0], b[0])*compare_vec2(a[1], b[1])*compare_vec2(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT3:		return "mediump float compare_mat3     (highp mat3 a, highp mat3 b)    { return compare_vec3(a[0], b[0])*compare_vec3(a[1], b[1])*compare_vec3(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT3X4:	return "mediump float compare_mat3x4   (highp mat3x4 a, highp mat3x4 b){ return compare_vec4(a[0], b[0])*compare_vec4(a[1], b[1])*compare_vec4(a[2], b[2]); }\n";
+		case glu::TYPE_FLOAT_MAT4X2:	return "mediump float compare_mat4x2   (highp mat4x2 a, highp mat4x2 b){ return compare_vec2(a[0], b[0])*compare_vec2(a[1], b[1])*compare_vec2(a[2], b[2])*compare_vec2(a[3], b[3]); }\n";
+		case glu::TYPE_FLOAT_MAT4X3:	return "mediump float compare_mat4x3   (highp mat4x3 a, highp mat4x3 b){ return compare_vec3(a[0], b[0])*compare_vec3(a[1], b[1])*compare_vec3(a[2], b[2])*compare_vec3(a[3], b[3]); }\n";
+		case glu::TYPE_FLOAT_MAT4:		return "mediump float compare_mat4     (highp mat4 a, highp mat4 b)    { return compare_vec4(a[0], b[0])*compare_vec4(a[1], b[1])*compare_vec4(a[2], b[2])*compare_vec4(a[3], b[3]); }\n";
+		case glu::TYPE_INT:				return "mediump float compare_int      (highp int a, highp int b)      { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_INT_VEC2:		return "mediump float compare_ivec2    (highp ivec2 a, highp ivec2 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_INT_VEC3:		return "mediump float compare_ivec3    (highp ivec3 a, highp ivec3 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_INT_VEC4:		return "mediump float compare_ivec4    (highp ivec4 a, highp ivec4 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_UINT:			return "mediump float compare_uint     (highp uint a, highp uint b)    { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_UINT_VEC2:		return "mediump float compare_uvec2    (highp uvec2 a, highp uvec2 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_UINT_VEC3:		return "mediump float compare_uvec3    (highp uvec3 a, highp uvec3 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_UINT_VEC4:		return "mediump float compare_uvec4    (highp uvec4 a, highp uvec4 b)  { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_BOOL:			return "mediump float compare_bool     (bool a, bool b)                { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_BOOL_VEC2:		return "mediump float compare_bvec2    (bvec2 a, bvec2 b)              { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_BOOL_VEC3:		return "mediump float compare_bvec3    (bvec3 a, bvec3 b)              { return a == b ? 1.0 : 0.0; }\n";
+		case glu::TYPE_BOOL_VEC4:		return "mediump float compare_bvec4    (bvec4 a, bvec4 b)              { return a == b ? 1.0 : 0.0; }\n";
+		default:
+			DE_ASSERT(false);
+			return DE_NULL;
+	}
+}
+
+void getCompareDependencies (std::set<glu::DataType>& compareFuncs, glu::DataType basicType)
+{
+	switch (basicType)
+	{
+		case glu::TYPE_FLOAT_VEC2:
+		case glu::TYPE_FLOAT_VEC3:
+		case glu::TYPE_FLOAT_VEC4:
+			compareFuncs.insert(glu::TYPE_FLOAT);
+			compareFuncs.insert(basicType);
+			break;
+
+		case glu::TYPE_FLOAT_MAT2:
+		case glu::TYPE_FLOAT_MAT2X3:
+		case glu::TYPE_FLOAT_MAT2X4:
+		case glu::TYPE_FLOAT_MAT3X2:
+		case glu::TYPE_FLOAT_MAT3:
+		case glu::TYPE_FLOAT_MAT3X4:
+		case glu::TYPE_FLOAT_MAT4X2:
+		case glu::TYPE_FLOAT_MAT4X3:
+		case glu::TYPE_FLOAT_MAT4:
+			compareFuncs.insert(glu::TYPE_FLOAT);
+			compareFuncs.insert(glu::getDataTypeFloatVec(glu::getDataTypeMatrixNumRows(basicType)));
+			compareFuncs.insert(basicType);
+			break;
+
+		default:
+			compareFuncs.insert(basicType);
+			break;
+	}
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const VarType& type)
+{
+	if (type.isStructType())
+	{
+		for (StructType::ConstIterator iter = type.getStruct().begin(); iter != type.getStruct().end(); ++iter)
+			collectUniqueBasicTypes(basicTypes, iter->getType());
+	}
+	else if (type.isArrayType())
+		collectUniqueBasicTypes(basicTypes, type.getElementType());
+	else
+	{
+		DE_ASSERT(type.isBasicType());
+		basicTypes.insert(type.getBasicType());
+	}
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const UniformBlock& uniformBlock)
+{
+	for (UniformBlock::ConstIterator iter = uniformBlock.begin(); iter != uniformBlock.end(); ++iter)
+		collectUniqueBasicTypes(basicTypes, iter->getType());
+}
+
+void collectUniqueBasicTypes (std::set<glu::DataType>& basicTypes, const ShaderInterface& interface)
+{
+	for (int ndx = 0; ndx < interface.getNumUniformBlocks(); ++ndx)
+		collectUniqueBasicTypes(basicTypes, interface.getUniformBlock(ndx));
+}
+
+void generateCompareFuncs (std::ostream& str, const ShaderInterface& interface)
+{
+	std::set<glu::DataType> types;
+	std::set<glu::DataType> compareFuncs;
+
+	// Collect unique basic types
+	collectUniqueBasicTypes(types, interface);
+
+	// Set of compare functions required
+	for (std::set<glu::DataType>::const_iterator iter = types.begin(); iter != types.end(); ++iter)
+	{
+		getCompareDependencies(compareFuncs, *iter);
+	}
+
+	for (int type = 0; type < glu::TYPE_LAST; ++type)
+	{
+		if (compareFuncs.find(glu::DataType(type)) != compareFuncs.end())
+			str << getCompareFuncForType(glu::DataType(type));
+	}
+}
+
+struct Indent
+{
+	int level;
+	Indent (int level_) : level(level_) {}
+};
+
+std::ostream& operator<< (std::ostream& str, const Indent& indent)
+{
+	for (int i = 0; i < indent.level; i++)
+		str << "\t";
+	return str;
+}
+
+void		generateDeclaration			(std::ostringstream& src, const VarType& type, const std::string& name, int indentLevel, deUint32 unusedHints);
+void		generateDeclaration			(std::ostringstream& src, const Uniform& uniform, int indentLevel);
+void		generateDeclaration			(std::ostringstream& src, const StructType& structType, int indentLevel);
+
+void		generateLocalDeclaration	(std::ostringstream& src, const StructType& structType, int indentLevel);
+void		generateFullDeclaration		(std::ostringstream& src, const StructType& structType, int indentLevel);
+
+void generateDeclaration (std::ostringstream& src, const StructType& structType, int indentLevel)
+{
+	DE_ASSERT(structType.hasTypeName());
+	generateFullDeclaration(src, structType, indentLevel);
+	src << ";\n";
+}
+
+void generateFullDeclaration (std::ostringstream& src, const StructType& structType, int indentLevel)
+{
+	src << "struct";
+	if (structType.hasTypeName())
+		src << " " << structType.getTypeName();
+	src << "\n" << Indent(indentLevel) << "{\n";
+
+	for (StructType::ConstIterator memberIter = structType.begin(); memberIter != structType.end(); memberIter++)
+	{
+		src << Indent(indentLevel + 1);
+		generateDeclaration(src, memberIter->getType(), memberIter->getName(), indentLevel + 1, memberIter->getFlags() & UNUSED_BOTH);
+	}
+
+	src << Indent(indentLevel) << "}";
+}
+
+void generateLocalDeclaration (std::ostringstream& src, const StructType& structType, int /* indentLevel */)
+{
+	src << structType.getTypeName();
+}
+
+void generateDeclaration (std::ostringstream& src, const VarType& type, const std::string& name, int indentLevel, deUint32 unusedHints)
+{
+	deUint32 flags = type.getFlags();
+
+	if ((flags & LAYOUT_MASK) != 0)
+		src << "layout(" << LayoutFlagsFmt(flags & LAYOUT_MASK) << ") ";
+
+	if ((flags & PRECISION_MASK) != 0)
+		src << PrecisionFlagsFmt(flags & PRECISION_MASK) << " ";
+
+	if (type.isBasicType())
+		src << glu::getDataTypeName(type.getBasicType()) << " " << name;
+	else if (type.isArrayType())
+	{
+		std::vector<int>	arraySizes;
+		const VarType*		curType		= &type;
+		while (curType->isArrayType())
+		{
+			arraySizes.push_back(curType->getArraySize());
+			curType = &curType->getElementType();
+		}
+
+		if (curType->isBasicType())
+		{
+			if ((curType->getFlags() & PRECISION_MASK) != 0)
+				src << PrecisionFlagsFmt(curType->getFlags() & PRECISION_MASK) << " ";
+			src << glu::getDataTypeName(curType->getBasicType());
+		}
+		else
+		{
+			DE_ASSERT(curType->isStructType());
+			generateLocalDeclaration(src, curType->getStruct(), indentLevel+1);
+		}
+
+		src << " " << name;
+
+		for (std::vector<int>::const_iterator sizeIter = arraySizes.begin(); sizeIter != arraySizes.end(); sizeIter++)
+			src << "[" << *sizeIter << "]";
+	}
+	else
+	{
+		generateLocalDeclaration(src, type.getStruct(), indentLevel+1);
+		src << " " << name;
+	}
+
+	src << ";";
+
+	// Print out unused hints.
+	if (unusedHints != 0)
+		src << " // unused in " << (unusedHints == UNUSED_BOTH		? "both shaders"	:
+									unusedHints == UNUSED_VERTEX	? "vertex shader"	:
+									unusedHints == UNUSED_FRAGMENT	? "fragment shader" : "???");
+
+	src << "\n";
+}
+
+void generateDeclaration (std::ostringstream& src, const Uniform& uniform, int indentLevel)
+{
+	if ((uniform.getFlags() & LAYOUT_MASK) != 0)
+		src << "layout(" << LayoutFlagsFmt(uniform.getFlags() & LAYOUT_MASK) << ") ";
+
+	generateDeclaration(src, uniform.getType(), uniform.getName(), indentLevel, uniform.getFlags() & UNUSED_BOTH);
+}
+
+void generateDeclaration (std::ostringstream& src, int blockNdx, const UniformBlock& block)
+{
+	src << "layout(set = 0, binding = " << blockNdx;
+	if ((block.getFlags() & LAYOUT_MASK) != 0)
+		src << ", " << LayoutFlagsFmt(block.getFlags() & LAYOUT_MASK);
+	src << ") ";
+
+	src << "uniform " << block.getBlockName();
+	src << "\n{\n";
+
+	for (UniformBlock::ConstIterator uniformIter = block.begin(); uniformIter != block.end(); uniformIter++)
+	{
+		src << Indent(1);
+		generateDeclaration(src, *uniformIter, 1 /* indent level */);
+	}
+
+	src << "}";
+
+	if (block.hasInstanceName())
+	{
+		src << " " << block.getInstanceName();
+		if (block.isArray())
+			src << "[" << block.getArraySize() << "]";
+	}
+	else
+		DE_ASSERT(!block.isArray());
+
+	src << ";\n";
+}
+
+void generateValueSrc (std::ostringstream& src, const UniformLayoutEntry& entry, const void* basePtr, int elementNdx)
+{
+	glu::DataType	scalarType		= glu::getDataTypeScalarType(entry.type);
+	int				scalarSize		= glu::getDataTypeScalarSize(entry.type);
+	bool			isArray			= entry.size > 1;
+	const deUint8*	elemPtr			= (const deUint8*)basePtr + entry.offset + (isArray ? elementNdx * entry.arrayStride : 0);
+	const int		compSize		= sizeof(deUint32);
+
+	if (scalarSize > 1)
+		src << glu::getDataTypeName(entry.type) << "(";
+
+	if (glu::isDataTypeMatrix(entry.type))
+	{
+		int	numRows	= glu::getDataTypeMatrixNumRows(entry.type);
+		int	numCols	= glu::getDataTypeMatrixNumColumns(entry.type);
+
+		DE_ASSERT(scalarType == glu::TYPE_FLOAT);
+
+		// Constructed in column-wise order.
+		for (int colNdx = 0; colNdx < numCols; colNdx++)
+		{
+			for (int rowNdx = 0; rowNdx < numRows; rowNdx++)
+			{
+				const deUint8*	compPtr	= elemPtr + (entry.isRowMajor ? (rowNdx * entry.matrixStride + colNdx * compSize)
+																	  : (colNdx * entry.matrixStride + rowNdx * compSize));
+
+				if (colNdx > 0 || rowNdx > 0)
+					src << ", ";
+
+				src << de::floatToString(*((const float*)compPtr), 1);
+			}
+		}
+	}
+	else
+	{
+		for (int scalarNdx = 0; scalarNdx < scalarSize; scalarNdx++)
+		{
+			const deUint8* compPtr = elemPtr + scalarNdx * compSize;
+
+			if (scalarNdx > 0)
+				src << ", ";
+
+			switch (scalarType)
+			{
+				case glu::TYPE_FLOAT:	src << de::floatToString(*((const float*)compPtr), 1);			break;
+				case glu::TYPE_INT:		src << *((const int*)compPtr);									break;
+				case glu::TYPE_UINT:	src << *((const deUint32*)compPtr) << "u";						break;
+				case glu::TYPE_BOOL:	src << (*((const deUint32*)compPtr) != 0u ? "true" : "false");	break;
+				default:
+					DE_ASSERT(false);
+			}
+		}
+	}
+
+	if (scalarSize > 1)
+		src << ")";
+}
+
+void generateCompareSrc (std::ostringstream&	src,
+						 const char*			resultVar,
+						 const VarType&			type,
+						 const std::string&		srcName,
+						 const std::string&		apiName,
+						 const UniformLayout&	layout,
+						 const void*			basePtr,
+						 deUint32				unusedMask)
+{
+	if (type.isBasicType() || (type.isArrayType() && type.getElementType().isBasicType()))
+	{
+		// Basic type or array of basic types.
+		bool						isArray			= type.isArrayType();
+		glu::DataType				elementType		= isArray ? type.getElementType().getBasicType() : type.getBasicType();
+		const char*					typeName		= glu::getDataTypeName(elementType);
+		std::string					fullApiName		= std::string(apiName) + (isArray ? "[0]" : ""); // Arrays are always postfixed with [0]
+		int							uniformNdx		= layout.getUniformIndex(fullApiName);
+		const UniformLayoutEntry&	entry			= layout.uniforms[uniformNdx];
+
+		if (isArray)
+		{
+			for (int elemNdx = 0; elemNdx < type.getArraySize(); elemNdx++)
+			{
+				src << "\tresult *= compare_" << typeName << "(" << srcName << "[" << elemNdx << "], ";
+				generateValueSrc(src, entry, basePtr, elemNdx);
+				src << ");\n";
+			}
+		}
+		else
+		{
+			src << "\tresult *= compare_" << typeName << "(" << srcName << ", ";
+			generateValueSrc(src, entry, basePtr, 0);
+			src << ");\n";
+		}
+	}
+	else if (type.isArrayType())
+	{
+		const VarType& elementType = type.getElementType();
+
+		for (int elementNdx = 0; elementNdx < type.getArraySize(); elementNdx++)
+		{
+			std::string op = std::string("[") + de::toString(elementNdx) + "]";
+			std::string elementSrcName = std::string(srcName) + op;
+			std::string elementApiName = std::string(apiName) + op;
+			generateCompareSrc(src, resultVar, elementType, elementSrcName, elementApiName, layout, basePtr, unusedMask);
+		}
+	}
+	else
+	{
+		DE_ASSERT(type.isStructType());
+
+		for (StructType::ConstIterator memberIter = type.getStruct().begin(); memberIter != type.getStruct().end(); memberIter++)
+		{
+			if (memberIter->getFlags() & unusedMask)
+				continue; // Skip member.
+
+			std::string op = std::string(".") + memberIter->getName();
+			std::string memberSrcName = std::string(srcName) + op;
+			std::string memberApiName = std::string(apiName) + op;
+			generateCompareSrc(src, resultVar, memberIter->getType(), memberSrcName, memberApiName, layout, basePtr, unusedMask);
+		}
+	}
+}
+
+void generateCompareSrc (std::ostringstream& src, const char* resultVar, const ShaderInterface& interface, const UniformLayout& layout, const std::map<int, void*>& blockPointers, bool isVertex)
+{
+	deUint32 unusedMask = isVertex ? UNUSED_VERTEX : UNUSED_FRAGMENT;
+
+	for (int blockNdx = 0; blockNdx < interface.getNumUniformBlocks(); blockNdx++)
+	{
+		const UniformBlock& block = interface.getUniformBlock(blockNdx);
+
+		if ((block.getFlags() & (isVertex ? DECLARE_VERTEX : DECLARE_FRAGMENT)) == 0)
+			continue; // Skip.
+
+		bool			hasInstanceName	= block.hasInstanceName();
+		bool			isArray			= block.isArray();
+		int				numInstances	= isArray ? block.getArraySize() : 1;
+		std::string		apiPrefix		= hasInstanceName ? block.getBlockName() + "." : std::string("");
+
+		DE_ASSERT(!isArray || hasInstanceName);
+
+		for (int instanceNdx = 0; instanceNdx < numInstances; instanceNdx++)
+		{
+			std::string		instancePostfix		= isArray ? std::string("[") + de::toString(instanceNdx) + "]" : std::string("");
+			std::string		blockInstanceName	= block.getBlockName() + instancePostfix;
+			std::string		srcPrefix			= hasInstanceName ? block.getInstanceName() + instancePostfix + "." : std::string("");
+			int				activeBlockNdx		= layout.getBlockIndex(blockInstanceName);
+			void*			basePtr				= blockPointers.find(activeBlockNdx)->second;
+
+			for (UniformBlock::ConstIterator uniformIter = block.begin(); uniformIter != block.end(); uniformIter++)
+			{
+				const Uniform& uniform = *uniformIter;
+
+				if (uniform.getFlags() & unusedMask)
+					continue; // Don't read from that uniform.
+
+				std::string srcName = srcPrefix + uniform.getName();
+				std::string apiName = apiPrefix + uniform.getName();
+				generateCompareSrc(src, resultVar, uniform.getType(), srcName, apiName, layout, basePtr, unusedMask);
+			}
+		}
+	}
+}
+
+std::string generateVertexShader (const ShaderInterface& interface, const UniformLayout& layout, const std::map<int, void*>& blockPointers)
+{
+	std::ostringstream src;
+	src << "#version 450\n";
+
+	src << "layout(location = 0) in highp vec4 a_position;\n";
+	src << "layout(location = 0) out mediump float v_vtxResult;\n";
+	src << "\n";
+
+	std::vector<const StructType*> namedStructs;
+	interface.getNamedStructs(namedStructs);
+	for (std::vector<const StructType*>::const_iterator structIter = namedStructs.begin(); structIter != namedStructs.end(); structIter++)
+		generateDeclaration(src, **structIter, 0);
+
+	for (int blockNdx = 0; blockNdx < interface.getNumUniformBlocks(); blockNdx++)
+	{
+		const UniformBlock& block = interface.getUniformBlock(blockNdx);
+		if (block.getFlags() & DECLARE_VERTEX)
+			generateDeclaration(src, blockNdx, block);
+	}
+
+	// Comparison utilities.
+	src << "\n";
+	generateCompareFuncs(src, interface);
+
+	src << "\n"
+		   "void main (void)\n"
+		   "{\n"
+		   "	gl_Position = a_position;\n"
+		   "	mediump float result = 1.0;\n";
+
+	// Value compare.
+	generateCompareSrc(src, "result", interface, layout, blockPointers, true);
+
+	src << "	v_vtxResult = result;\n"
+		   "}\n";
+
+	return src.str();
+}
+
+std::string generateFragmentShader (const ShaderInterface& interface, const UniformLayout& layout, const std::map<int, void*>& blockPointers)
+{
+	std::ostringstream src;
+	src << "#version 450\n";
+
+	src << "layout(location = 0) in mediump float v_vtxResult;\n";
+	src << "layout(location = 0) out mediump vec4 dEQP_FragColor;\n";
+	src << "\n";
+
+	std::vector<const StructType*> namedStructs;
+	interface.getNamedStructs(namedStructs);
+	for (std::vector<const StructType*>::const_iterator structIter = namedStructs.begin(); structIter != namedStructs.end(); structIter++)
+		generateDeclaration(src, **structIter, 0);
+
+	for (int blockNdx = 0; blockNdx < interface.getNumUniformBlocks(); blockNdx++)
+	{
+		const UniformBlock& block = interface.getUniformBlock(blockNdx);
+		if (block.getFlags() & DECLARE_FRAGMENT)
+			generateDeclaration(src, blockNdx, block);
+	}
+
+	// Comparison utilities.
+	src << "\n";
+	generateCompareFuncs(src, interface);
+
+	src << "\n"
+		   "void main (void)\n"
+		   "{\n"
+		   "	mediump float result = 1.0;\n";
+
+	// Value compare.
+	generateCompareSrc(src, "result", interface, layout, blockPointers, false);
+
+	src << "	dEQP_FragColor = vec4(1.0, v_vtxResult, result, 1.0);\n"
+		   "}\n";
+
+	return src.str();
+}
+
+Move<VkBuffer> createBuffer (Context& context, VkDeviceSize bufferSize, vk::VkBufferUsageFlags usageFlags)
+{
+	const VkDevice				vkDevice			= context.getDevice();
+	const DeviceInterface&		vk					= context.getDeviceInterface();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+
+	const VkBufferCreateInfo	bufferInfo			=
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// VkStructureType		sType;
+		DE_NULL,								// const void*			pNext;
+		0u,										// VkBufferCreateFlags	flags;
+		bufferSize,								// VkDeviceSize			size;
+		usageFlags,								// VkBufferUsageFlags	usage;
+		VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode		sharingMode;
+		1u,										// deUint32				queueFamilyIndexCount;
+		&queueFamilyIndex						// const deUint32*		pQueueFamilyIndices;
+	};
+
+	return vk::createBuffer(vk, vkDevice, &bufferInfo);
+}
+
+Move<vk::VkImage> createImage2D (Context& context, deUint32 width, deUint32 height, vk::VkFormat format, vk::VkImageTiling tiling, vk::VkImageUsageFlags usageFlags)
+{
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	const vk::VkImageCreateInfo	params				=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// VkStructureType			sType
+		DE_NULL,									// const void*				pNext
+		0u,											// VkImageCreateFlags		flags
+		vk::VK_IMAGE_TYPE_2D,						// VkImageType				imageType
+		format,										// VkFormat					format
+		{ width, height, 1u },						// VkExtent3D				extent
+		1u,											// deUint32					mipLevels
+		1u,											// deUint32					arrayLayers
+		VK_SAMPLE_COUNT_1_BIT,						// VkSampleCountFlagBits	samples
+		tiling,										// VkImageTiling			tiling
+		usageFlags,									// VkImageUsageFlags		usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,				// VkSharingMode			sharingMode
+		1u,											// deUint32					queueFamilyIndexCount
+		&queueFamilyIndex,							// const deUint32*			pQueueFamilyIndices
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,				// VkImageLayout			initialLayout
+	};
+
+	return vk::createImage(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+de::MovePtr<vk::Allocation> allocateAndBindMemory (Context& context, vk::VkBuffer buffer, vk::MemoryRequirement memReqs)
+{
+	const vk::DeviceInterface&		vkd		= context.getDeviceInterface();
+	const vk::VkMemoryRequirements	bufReqs	= vk::getBufferMemoryRequirements(vkd, context.getDevice(), buffer);
+	de::MovePtr<vk::Allocation>		memory	= context.getDefaultAllocator().allocate(bufReqs, memReqs);
+
+	vkd.bindBufferMemory(context.getDevice(), buffer, memory->getMemory(), memory->getOffset());
+
+	return memory;
+}
+
+de::MovePtr<vk::Allocation> allocateAndBindMemory (Context& context, vk::VkImage image, vk::MemoryRequirement memReqs)
+{
+	const vk::DeviceInterface&	  vkd	 = context.getDeviceInterface();
+	const vk::VkMemoryRequirements  imgReqs = vk::getImageMemoryRequirements(vkd, context.getDevice(), image);
+	de::MovePtr<vk::Allocation>		 memory  = context.getDefaultAllocator().allocate(imgReqs, memReqs);
+
+	vkd.bindImageMemory(context.getDevice(), image, memory->getMemory(), memory->getOffset());
+
+	return memory;
+}
+
+Move<vk::VkImageView> createAttachmentView (Context& context, vk::VkImage image, vk::VkFormat format)
+{
+	const vk::VkImageViewCreateInfo params =
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// sType
+		DE_NULL,											// pNext
+		0u,													// flags
+		image,												// image
+		vk::VK_IMAGE_VIEW_TYPE_2D,							// viewType
+		format,												// format
+		vk::makeComponentMappingRGBA(),						// components
+		{ vk::VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u,1u },	// subresourceRange
+	};
+
+	return vk::createImageView(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkPipelineLayout> createPipelineLayout (Context& context, vk::VkDescriptorSetLayout descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo params =
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,	// sType
+		DE_NULL,											// pNext
+		0u,													// flags
+		1u,													// setLayoutCount
+		&descriptorSetLayout,								// pSetLayouts
+		0u,													// pushConstantRangeCount
+		DE_NULL,											// pPushConstantRanges
+	};
+
+	return vk::createPipelineLayout(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkCommandPool> createCmdPool (Context& context)
+{
+	const deUint32					queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	const vk::VkCommandPoolCreateInfo	params				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,			// sType
+		DE_NULL,												// pNext
+		vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,	// flags
+		queueFamilyIndex,										// queueFamilyIndex
+	};
+
+	return vk::createCommandPool(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkCommandBuffer> createCmdBuffer (Context& context, vk::VkCommandPool cmdPool)
+{
+	const vk::VkCommandBufferAllocateInfo params =
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// sType
+		DE_NULL,											// pNext
+		cmdPool,											// commandPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// level
+		1u,													// bufferCount
+	};
+
+	return vk::allocateCommandBuffer(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+
+// UniformBlockCaseInstance
+
+class UniformBlockCaseInstance : public vkt::TestInstance
+{
+public:
+									UniformBlockCaseInstance	(Context&						context,
+																 UniformBlockCase::BufferMode	bufferMode,
+																 const UniformLayout&			layout,
+																 const std::map<int, void*>&	blockPointers);
+	virtual							~UniformBlockCaseInstance	(void);
+	virtual tcu::TestStatus			iterate						(void);
+
+private:
+	enum
+	{
+		RENDER_WIDTH = 100,
+		RENDER_HEIGHT = 100,
+	};
+
+	vk::Move<VkRenderPass>			createRenderPass			(vk::VkFormat format) const;
+	vk::Move<VkFramebuffer>			createFramebuffer			(vk::VkRenderPass renderPass, vk::VkImageView colorImageView) const;
+	vk::Move<VkDescriptorSetLayout>	createDescriptorSetLayout	(void) const;
+	vk::Move<VkDescriptorPool>		createDescriptorPool		(void) const;
+	vk::Move<VkPipeline>			createPipeline				(vk::VkShaderModule vtxShaderModule, vk::VkShaderModule fragShaderModule, vk::VkPipelineLayout pipelineLayout, vk::VkRenderPass renderPass) const;
+
+	vk::VkDescriptorBufferInfo		addUniformData				(deUint32 size, const void* dataPtr);
+
+	UniformBlockCase::BufferMode	m_bufferMode;
+	const UniformLayout&			m_layout;
+	const std::map<int, void*>&		m_blockPointers;
+
+	typedef de::SharedPtr<vk::Unique<vk::VkBuffer> >	VkBufferSp;
+	typedef de::SharedPtr<vk::Allocation>				AllocationSp;
+
+	std::vector<VkBufferSp>			m_uniformBuffers;
+	std::vector<AllocationSp>		m_uniformAllocs;
+};
+
+UniformBlockCaseInstance::UniformBlockCaseInstance (Context&						ctx,
+													UniformBlockCase::BufferMode	bufferMode,
+													const UniformLayout&			layout,
+													const std::map<int, void*>&		blockPointers)
+	: vkt::TestInstance (ctx)
+	, m_bufferMode		(bufferMode)
+	, m_layout			(layout)
+	, m_blockPointers	(blockPointers)
+{
+}
+
+UniformBlockCaseInstance::~UniformBlockCaseInstance (void)
+{
+}
+
+tcu::TestStatus UniformBlockCaseInstance::iterate (void)
+{
+	const vk::DeviceInterface&		vk					= m_context.getDeviceInterface();
+	const vk::VkDevice				device				= m_context.getDevice();
+	const vk::VkQueue				queue				= m_context.getUniversalQueue();
+	const deUint32					queueFamilyIndex	= m_context.getUniversalQueueFamilyIndex();
+
+	const float positions[] =
+	{
+		-1.0f, -1.0f, 0.0f, 1.0f,
+		-1.0f, +1.0f, 0.0f, 1.0f,
+		+1.0f, -1.0f, 0.0f, 1.0f,
+		+1.0f, +1.0f, 0.0f, 1.0f
+	};
+
+	const deUint32 indices[] = { 0, 1, 2, 2, 1, 3 };
+
+	vk::Unique<VkBuffer>				positionsBuffer		(createBuffer(m_context, sizeof(positions), vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT));
+	de::UniquePtr<Allocation>			positionsAlloc		(allocateAndBindMemory(m_context, *positionsBuffer, MemoryRequirement::HostVisible));
+	vk::Unique<VkBuffer>				indicesBuffer		(createBuffer(m_context, sizeof(indices), vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT|vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT));
+	de::UniquePtr<Allocation>			indicesAlloc		(allocateAndBindMemory(m_context, *indicesBuffer, MemoryRequirement::HostVisible));
+
+	int minUniformBufferOffsetAlignment = getminUniformBufferOffsetAlignment(m_context);
+
+	// Upload attrbiutes data
+	{
+		deMemcpy(positionsAlloc->getHostPtr(), positions, sizeof(positions));
+		flushMappedMemoryRange(vk, device, positionsAlloc->getMemory(), positionsAlloc->getOffset(), sizeof(positions));
+
+		deMemcpy(indicesAlloc->getHostPtr(), indices, sizeof(indices));
+		flushMappedMemoryRange(vk, device, indicesAlloc->getMemory(), indicesAlloc->getOffset(), sizeof(indices));
+	}
+
+	vk::Unique<VkImage>					colorImage			(createImage2D(m_context,
+																			RENDER_WIDTH,
+																			RENDER_HEIGHT,
+																			vk::VK_FORMAT_R8G8B8A8_UNORM,
+																			vk::VK_IMAGE_TILING_OPTIMAL,
+																			vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT));
+	de::UniquePtr<Allocation>			colorImageAlloc		(allocateAndBindMemory(m_context, *colorImage, MemoryRequirement::Any));
+	vk::Unique<VkImageView>				colorImageView		(createAttachmentView(m_context, *colorImage, vk::VK_FORMAT_R8G8B8A8_UNORM));
+
+	vk::Unique<VkDescriptorSetLayout>	descriptorSetLayout	(createDescriptorSetLayout());
+	vk::Unique<VkDescriptorPool>		descriptorPool		(createDescriptorPool());
+
+	const VkDescriptorSetAllocateInfo	descriptorSetAllocateInfo =
+	{
+		VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,		// VkStructureType				sType;
+		DE_NULL,											// const void*					pNext;
+		*descriptorPool,									// VkDescriptorPool				descriptorPool;
+		1u,													// deUint32						setLayoutCount;
+		&descriptorSetLayout.get()							// const VkDescriptorSetLayout*	pSetLayouts;
+	};
+
+	vk::Unique<VkDescriptorSet>			descriptorSet(vk::allocateDescriptorSet(vk, device, &descriptorSetAllocateInfo));
+	int									numBlocks = (int)m_layout.blocks.size();
+	std::vector<vk::VkDescriptorBufferInfo>	descriptors(numBlocks);
+
+	// Upload uniform data
+	{
+		vk::DescriptorSetUpdateBuilder	descriptorSetUpdateBuilder;
+
+		if (m_bufferMode == UniformBlockCase::BUFFERMODE_PER_BLOCK)
+		{
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const BlockLayoutEntry& block = m_layout.blocks[blockNdx];
+				const void*	srcPtr = m_blockPointers.find(blockNdx)->second;
+
+				descriptors[blockNdx] = addUniformData(block.size, srcPtr);
+				descriptorSetUpdateBuilder.writeSingle(*descriptorSet, vk::DescriptorSetUpdateBuilder::Location::bindingArrayElement(block.bindingNdx, block.instanceNdx),
+														VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &descriptors[blockNdx]);
+			}
+		}
+		else
+		{
+			int currentOffset = 0;
+			std::map<int, int> offsets;
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				if (minUniformBufferOffsetAlignment > 0)
+					currentOffset = deAlign32(currentOffset, minUniformBufferOffsetAlignment);
+				offsets[blockNdx] = currentOffset;
+				currentOffset += m_layout.blocks[blockNdx].size;
+			}
+
+			deUint32 totalSize = currentOffset;
+
+			// Make a copy of the data that satisfies the device's min uniform buffer alignment
+			std::vector<deUint8> data;
+			data.resize(totalSize);
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				deMemcpy(&data[offsets[blockNdx]], m_blockPointers.find(blockNdx)->second, m_layout.blocks[blockNdx].size);
+			}
+
+			vk::VkBuffer buffer = addUniformData(totalSize, &data[0]).buffer;
+
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const BlockLayoutEntry& block = m_layout.blocks[blockNdx];
+				deUint32 size = block.size;
+
+				const VkDescriptorBufferInfo	descriptor =
+				{
+					buffer,							// VkBuffer		buffer;
+					(deUint32)offsets[blockNdx],	// VkDeviceSize	offset;
+					size,							// VkDeviceSize	range;
+				};
+
+				descriptors[blockNdx] = descriptor;
+				descriptorSetUpdateBuilder.writeSingle(*descriptorSet,
+														vk::DescriptorSetUpdateBuilder::Location::bindingArrayElement(block.bindingNdx, block.instanceNdx),
+														VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+														&descriptors[blockNdx]);
+			}
+		}
+
+		descriptorSetUpdateBuilder.update(vk, device);
+	}
+
+	vk::Unique<VkRenderPass>			renderPass			(createRenderPass(vk::VK_FORMAT_R8G8B8A8_UNORM));
+	vk::Unique<VkFramebuffer>			framebuffer			(createFramebuffer(*renderPass, *colorImageView));
+	vk::Unique<VkPipelineLayout>		pipelineLayout		(createPipelineLayout(m_context, *descriptorSetLayout));
+
+	vk::Unique<VkShaderModule>			vtxShaderModule		(vk::createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
+	vk::Unique<VkShaderModule>			fragShaderModule	(vk::createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
+	vk::Unique<VkPipeline>				pipeline			(createPipeline(*vtxShaderModule, *fragShaderModule, *pipelineLayout, *renderPass));
+	vk::Unique<VkCommandPool>			cmdPool				(createCmdPool(m_context));
+	vk::Unique<VkCommandBuffer>			cmdBuffer			(createCmdBuffer(m_context, *cmdPool));
+	vk::Unique<VkBuffer>				readImageBuffer		(createBuffer(m_context, (vk::VkDeviceSize)(RENDER_WIDTH * RENDER_HEIGHT * 4), vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT));
+	de::UniquePtr<Allocation>			readImageAlloc		(allocateAndBindMemory(m_context, *readImageBuffer, vk::MemoryRequirement::HostVisible));
+
+	// Record command buffer
+	const vk::VkCommandBufferBeginInfo beginInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// VkStructureType					sType;
+		DE_NULL,											// const void*						pNext;
+		0u,													// VkCommandBufferUsageFlags		flags;
+		(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+	};
+	VK_CHECK(vk.beginCommandBuffer(*cmdBuffer, &beginInfo));
+
+	const vk::VkClearValue clearValue = vk::makeClearValueColorF32(0.125f, 0.25f, 0.75f, 1.0f);
+	const vk::VkRenderPassBeginInfo passBeginInfo	=
+	{
+		vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,	// VkStructureType		sType;
+		DE_NULL,										// const void*			pNext;
+		*renderPass,									// VkRenderPass			renderPass;
+		*framebuffer,									// VkFramebuffer		framebuffer;
+		{ { 0, 0 }, { RENDER_WIDTH, RENDER_HEIGHT } },	// VkRect2D				renderArea;
+		1u,												// deUint32				clearValueCount;
+		&clearValue,									// const VkClearValue*	pClearValues;
+	};
+
+	vk.cmdBeginRenderPass(*cmdBuffer, &passBeginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
+
+	vk.cmdBindPipeline(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
+	vk.cmdBindDescriptorSets(*cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &*descriptorSet, 0u, DE_NULL);
+
+	const vk::VkDeviceSize offsets[] = { 0u };
+	vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &*positionsBuffer, offsets);
+	vk.cmdBindIndexBuffer(*cmdBuffer, *indicesBuffer, (vk::VkDeviceSize)0, vk::VK_INDEX_TYPE_UINT32);
+
+	vk.cmdDrawIndexed(*cmdBuffer, DE_LENGTH_OF_ARRAY(indices), 1u, 0u, 0u, 0u);
+	vk.cmdEndRenderPass(*cmdBuffer);
+
+	// Add render finish barrier
+	{
+		const vk::VkImageMemoryBarrier  renderFinishBarrier =
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// VkStructureType			sType;
+			DE_NULL,										// const void*				pNext
+			vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// VVkAccessFlags			srcAccessMask;
+			vk::VK_ACCESS_TRANSFER_READ_BIT,				// VkAccessFlags			dstAccessMask;
+			vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// VkImageLayout			oldLayout;
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// VkImageLayout			newLayout;
+			queueFamilyIndex,								// deUint32					srcQueueFamilyIndex;
+			queueFamilyIndex,								// deUint32					dstQueueFamilyIndex;
+			*colorImage,									// VkImage					image;
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,			// VkImageAspectFlags	aspectMask;
+				0u,										// deUint32				baseMipLevel;
+				1u,										// deUint32				mipLevels;
+				0u,										// deUint32				baseArraySlice;
+				1u,										// deUint32				arraySize;
+			}												// VkImageSubresourceRange	subresourceRange
+		};
+
+		vk.cmdPipelineBarrier(*cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0,
+							  0, (const vk::VkMemoryBarrier*)DE_NULL,
+							  0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+							  1, &renderFinishBarrier);
+	}
+
+	// Add Image->Buffer copy command
+	{
+		const vk::VkBufferImageCopy copyParams =
+		{
+			(vk::VkDeviceSize)0u,					// VkDeviceSize				bufferOffset;
+			(deUint32)RENDER_WIDTH,					// deUint32					bufferRowLength;
+			(deUint32)RENDER_HEIGHT,				// deUint32					bufferImageHeight;
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,	// VkImageAspect	aspect;
+				0u,								// deUint32			mipLevel;
+				0u,								// deUint32			arrayLayer;
+				1u,								// deUint32			arraySize;
+			},										// VkImageSubresourceCopy	imageSubresource
+			{ 0u, 0u, 0u },							// VkOffset3D				imageOffset;
+			{ RENDER_WIDTH, RENDER_HEIGHT, 1u }		// VkExtent3D				imageExtent;
+		};
+
+		vk.cmdCopyImageToBuffer(*cmdBuffer, *colorImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *readImageBuffer, 1u, &copyParams);
+	}
+
+	// Add copy finish barrier
+	{
+		const vk::VkBufferMemoryBarrier copyFinishBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,		// VkStructureType		sType;
+			DE_NULL,											// const void*			pNext;
+			VK_ACCESS_TRANSFER_WRITE_BIT,						// VkAccessFlags		srcAccessMask;
+			VK_ACCESS_HOST_READ_BIT,							// VkAccessFlags		dstAccessMask;
+			queueFamilyIndex,									// deUint32				srcQueueFamilyIndex;
+			queueFamilyIndex,									// deUint32				destQueueFamilyIndex;
+			*readImageBuffer,									// VkBuffer				buffer;
+			0u,													// VkDeviceSize			offset;
+			(vk::VkDeviceSize)(RENDER_WIDTH * RENDER_HEIGHT * 4)// VkDeviceSize			size;
+		};
+
+		vk.cmdPipelineBarrier(*cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0,
+							  0, (const vk::VkMemoryBarrier*)DE_NULL,
+							  1, &copyFinishBarrier,
+							  0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vk.endCommandBuffer(*cmdBuffer));
+
+	// Submit the command buffer
+	{
+		const vk::VkFenceCreateInfo fenceParams =
+		{
+			vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// VkStructureType		sType;
+			DE_NULL,									// const void*			pNext;
+			0u,											// VkFenceCreateFlags	flags;
+		};
+		const Unique<vk::VkFence> fence(vk::createFence(vk, device, &fenceParams));
+
+		const VkSubmitInfo			submitInfo	=
+		{
+			VK_STRUCTURE_TYPE_SUBMIT_INFO,	// VkStructureType			sType;
+			DE_NULL,						// const void*				pNext;
+			0u,								// deUint32					waitSemaphoreCount;
+			DE_NULL,						// const VkSemaphore*		pWaitSemaphores;
+			(const VkPipelineStageFlags*)DE_NULL,
+			1u,								// deUint32					commandBufferCount;
+			&cmdBuffer.get(),				// const VkCommandBuffer*	pCommandBuffers;
+			0u,								// deUint32					signalSemaphoreCount;
+			DE_NULL							// const VkSemaphore*		pSignalSemaphores;
+		};
+
+		VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, *fence));
+		VK_CHECK(vk.waitForFences(device, 1u, &fence.get(), DE_TRUE, ~0ull));
+	}
+
+	// Read back the results
+	tcu::Surface surface(RENDER_WIDTH, RENDER_HEIGHT);
+	{
+		const tcu::TextureFormat textureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8);
+		const tcu::ConstPixelBufferAccess imgAccess(textureFormat, RENDER_WIDTH, RENDER_HEIGHT, 1, readImageAlloc->getHostPtr());
+		const vk::VkDeviceSize bufferSize = RENDER_WIDTH * RENDER_HEIGHT * 4;
+		invalidateMappedMemoryRange(vk, device, readImageAlloc->getMemory(), readImageAlloc->getOffset(), bufferSize);
+
+		tcu::copy(surface.getAccess(), imgAccess);
+	}
+
+	// Check if the result image is all white
+	tcu::RGBA white(tcu::RGBA::white());
+	int numFailedPixels = 0;
+
+	for (int y = 0; y < surface.getHeight(); y++)
+	{
+		for (int x = 0; x < surface.getWidth(); x++)
+		{
+			if (surface.getPixel(x, y) != white)
+				numFailedPixels += 1;
+		}
+	}
+
+	if (numFailedPixels > 0)
+	{
+		tcu::TestLog& log = m_context.getTestContext().getLog();
+		log << tcu::TestLog::Image("Image", "Rendered image", surface);
+		log << tcu::TestLog::Message << "Image comparison failed, got " << numFailedPixels << " non-white pixels" << tcu::TestLog::EndMessage;
+
+		for (size_t blockNdx = 0; blockNdx < m_layout.blocks.size(); blockNdx++)
+		{
+			const BlockLayoutEntry& block = m_layout.blocks[blockNdx];
+			log << tcu::TestLog::Message << "Block index: " << blockNdx << " infos: " << block << tcu::TestLog::EndMessage;
+		}
+
+		for (size_t uniformNdx = 0; uniformNdx < m_layout.uniforms.size(); uniformNdx++)
+		{
+			log << tcu::TestLog::Message << "Uniform index: " << uniformNdx << " infos: " << m_layout.uniforms[uniformNdx] << tcu::TestLog::EndMessage;
+		}
+
+		return tcu::TestStatus::fail("Detected non-white pixels");
+	}
+	else
+		return tcu::TestStatus::pass("Full white image ok");
+}
+
+vk::VkDescriptorBufferInfo UniformBlockCaseInstance::addUniformData (deUint32 size, const void* dataPtr)
+{
+	const VkDevice					vkDevice			= m_context.getDevice();
+	const DeviceInterface&			vk					= m_context.getDeviceInterface();
+
+	Move<VkBuffer>					buffer	= createBuffer(m_context, size, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
+	de::MovePtr<Allocation>			alloc	= allocateAndBindMemory(m_context, *buffer, vk::MemoryRequirement::HostVisible);
+
+	deMemcpy(alloc->getHostPtr(), dataPtr, size);
+	flushMappedMemoryRange(vk, vkDevice, alloc->getMemory(), alloc->getOffset(), size);
+
+	const VkDescriptorBufferInfo			descriptor			=
+	{
+		*buffer,				// VkBuffer		buffer;
+		0u,						// VkDeviceSize	offset;
+		size,					// VkDeviceSize	range;
+
+	};
+
+	m_uniformBuffers.push_back(VkBufferSp(new vk::Unique<vk::VkBuffer>(buffer)));
+	m_uniformAllocs.push_back(AllocationSp(alloc.release()));
+
+	return descriptor;
+}
+
+vk::Move<VkRenderPass> UniformBlockCaseInstance::createRenderPass (vk::VkFormat format) const
+{
+	const VkDevice					vkDevice				= m_context.getDevice();
+	const DeviceInterface&			vk						= m_context.getDeviceInterface();
+
+	const VkAttachmentDescription	attachmentDescription	=
+	{
+		0u,												// VkAttachmentDescriptorFlags	flags;
+		format,											// VkFormat						format;
+		VK_SAMPLE_COUNT_1_BIT,							// VkSampleCountFlagBits		samples;
+		VK_ATTACHMENT_LOAD_OP_CLEAR,					// VkAttachmentLoadOp			loadOp;
+		VK_ATTACHMENT_STORE_OP_STORE,					// VkAttachmentStoreOp			storeOp;
+		VK_ATTACHMENT_LOAD_OP_DONT_CARE,				// VkAttachmentLoadOp			stencilLoadOp;
+		VK_ATTACHMENT_STORE_OP_DONT_CARE,				// VkAttachmentStoreOp			stencilStoreOp;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		// VkImageLayout				initialLayout;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,		// VkImageLayout				finalLayout;
+	};
+
+	const VkAttachmentReference		attachmentReference		=
+	{
+		0u,											// deUint32			attachment;
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL	// VkImageLayout	layout;
+	};
+
+
+	const VkSubpassDescription		subpassDescription		=
+	{
+		0u,												// VkSubpassDescriptionFlags	flags;
+		VK_PIPELINE_BIND_POINT_GRAPHICS,				// VkPipelineBindPoint			pipelineBindPoint;
+		0u,												// deUint32						inputAttachmentCount;
+		DE_NULL,										// const VkAttachmentReference*	pInputAttachments;
+		1u,												// deUint32						colorAttachmentCount;
+		&attachmentReference,							// const VkAttachmentReference*	pColorAttachments;
+		DE_NULL,										// const VkAttachmentReference*	pResolveAttachments;
+		DE_NULL,										// const VkAttachmentReference*	pDepthStencilAttachment;
+		0u,												// deUint32						preserveAttachmentCount;
+		DE_NULL											// const VkAttachmentReference*	pPreserveAttachments;
+	};
+
+	const VkRenderPassCreateInfo	renderPassParams		=
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,		// VkStructureType					sType;
+		DE_NULL,										// const void*						pNext;
+		0u,												// VkRenderPassCreateFlags			flags;
+		1u,												// deUint32							attachmentCount;
+		&attachmentDescription,							// const VkAttachmentDescription*	pAttachments;
+		1u,												// deUint32							subpassCount;
+		&subpassDescription,							// const VkSubpassDescription*		pSubpasses;
+		0u,												// deUint32							dependencyCount;
+		DE_NULL											// const VkSubpassDependency*		pDependencies;
+	};
+
+	return vk::createRenderPass(vk, vkDevice, &renderPassParams);
+}
+
+vk::Move<VkFramebuffer> UniformBlockCaseInstance::createFramebuffer (vk::VkRenderPass renderPass, vk::VkImageView colorImageView) const
+{
+	const VkDevice					vkDevice			= m_context.getDevice();
+	const DeviceInterface&			vk					= m_context.getDeviceInterface();
+
+	const VkFramebufferCreateInfo	framebufferParams	=
+	{
+		VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,		// VkStructureType			sType;
+		DE_NULL,										// const void*				pNext;
+		0u,												// VkFramebufferCreateFlags	flags;
+		renderPass,										// VkRenderPass				renderPass;
+		1u,												// deUint32					attachmentCount;
+		&colorImageView,								// const VkImageView*		pAttachments;
+		RENDER_WIDTH,									// deUint32					width;
+		RENDER_HEIGHT,									// deUint32					height;
+		1u												// deUint32					layers;
+	};
+
+	return vk::createFramebuffer(vk, vkDevice, &framebufferParams);
+}
+
+vk::Move<VkDescriptorSetLayout> UniformBlockCaseInstance::createDescriptorSetLayout (void) const
+{
+	int numBlocks = (int)m_layout.blocks.size();
+	int lastBindingNdx = -1;
+	std::vector<int> lengths;
+
+	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		const BlockLayoutEntry& block = m_layout.blocks[blockNdx];
+
+		if (block.bindingNdx == lastBindingNdx)
+		{
+			lengths.back()++;
+		}
+		else
+		{
+			lengths.push_back(1);
+			lastBindingNdx = block.bindingNdx;
+		}
+	}
+
+	vk::DescriptorSetLayoutBuilder layoutBuilder;
+	for (size_t i = 0; i < lengths.size(); i++)
+	{
+		if (lengths[i] > 0)
+		{
+			layoutBuilder.addArrayBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, lengths[i], vk::VK_SHADER_STAGE_ALL);
+		}
+		else
+		{
+			layoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, vk::VK_SHADER_STAGE_ALL);
+		}
+	}
+
+	return layoutBuilder.build(m_context.getDeviceInterface(), m_context.getDevice());
+}
+
+vk::Move<VkDescriptorPool> UniformBlockCaseInstance::createDescriptorPool (void) const
+{
+	vk::DescriptorPoolBuilder poolBuilder;
+
+	return poolBuilder
+		.addType(VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, (int)m_layout.blocks.size())
+		.build(m_context.getDeviceInterface(), m_context.getDevice(), VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+}
+
+vk::Move<VkPipeline> UniformBlockCaseInstance::createPipeline (vk::VkShaderModule vtxShaderModule, vk::VkShaderModule fragShaderModule, vk::VkPipelineLayout pipelineLayout, vk::VkRenderPass renderPass) const
+{
+	const VkDevice									vkDevice				= m_context.getDevice();
+	const DeviceInterface&							vk						= m_context.getDeviceInterface();
+
+	const VkVertexInputBindingDescription			vertexBinding			=
+	{
+		0,									// deUint32					binding;
+		(deUint32)sizeof(float) * 4,		// deUint32					strideInBytes;
+		VK_VERTEX_INPUT_RATE_VERTEX			// VkVertexInputStepRate	inputRate;
+	};
+
+	const VkVertexInputAttributeDescription			vertexAttribute			=
+	{
+		0,									// deUint32		location;
+		0,									// deUint32		binding;
+		VK_FORMAT_R32G32B32A32_SFLOAT,		// VkFormat		format;
+		0u									// deUint32		offset;
+	};
+
+	const VkPipelineShaderStageCreateInfo			shaderStages[2]	=
+	{
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType					sType;
+			DE_NULL,												// const void*						pNext;
+			0u,														// VkPipelineShaderStageCreateFlags	flags;
+			VK_SHADER_STAGE_VERTEX_BIT,								// VkShaderStageFlagBits			stage;
+			vtxShaderModule,										// VkShaderModule					module;
+			"main",													// const char*						pName;
+			DE_NULL													// const VkSpecializationInfo*		pSpecializationInfo;
+		},
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// VkStructureType					sType;
+			DE_NULL,												// const void*						pNext;
+			0u,														// VkPipelineShaderStageCreateFlags flags;
+			VK_SHADER_STAGE_FRAGMENT_BIT,							// VkShaderStageFlagBits			stage;
+			fragShaderModule,										// VkShaderModule					module;
+			"main",													// const char*						pName;
+			DE_NULL													// const VkSpecializationInfo*		pSpecializationInfo;
+		}
+	};
+
+	const VkPipelineVertexInputStateCreateInfo		vertexInputStateParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineVertexInputStateCreateFlags	flags;
+		1u,															// deUint32									vertexBindingDescriptionCount;
+		&vertexBinding,												// const VkVertexInputBindingDescription*	pVertexBindingDescriptions;
+		1u,															// deUint32									vertexAttributeDescriptionCount;
+		&vertexAttribute,											// const VkVertexInputAttributeDescription*	pVertexAttributeDescriptions;
+	};
+
+	const VkPipelineInputAssemblyStateCreateInfo	inputAssemblyStateParams	=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineInputAssemblyStateCreateFlags	flags;
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,						// VkPrimitiveTopology						topology;
+		false														// VkBool32									primitiveRestartEnable;
+	};
+
+	const VkViewport								viewport					=
+	{
+		0.0f,					// float	originX;
+		0.0f,					// float	originY;
+		(float)RENDER_WIDTH,	// float	width;
+		(float)RENDER_HEIGHT,	// float	height;
+		0.0f,					// float	minDepth;
+		1.0f					// float	maxDepth;
+	};
+
+
+	const VkRect2D									scissor						=
+	{
+		{
+			0u,				// deUint32	x;
+			0u,				// deUint32	y;
+		},						// VkOffset2D	offset;
+		{
+			RENDER_WIDTH,	// deUint32	width;
+			RENDER_HEIGHT,	// deUint32	height;
+		},						// VkExtent2D	extent;
+	};
+
+	const VkPipelineViewportStateCreateInfo			viewportStateParams			=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,		// VkStructureType						sType;
+		DE_NULL,													// const void*							pNext;
+		0u,															// VkPipelineViewportStateCreateFlags	flags;
+		1u,															// deUint32								viewportCount;
+		&viewport,													// const VkViewport*					pViewports;
+		1u,															// deUint32								scissorsCount;
+		&scissor,													// const VkRect2D*						pScissors;
+	};
+
+	const VkPipelineRasterizationStateCreateInfo	rasterStateParams			=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, // VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineRasterizationStateCreateFlags	flags;
+		false,														// VkBool32									depthClampEnable;
+		false,														// VkBool32									rasterizerDiscardEnable;
+		VK_POLYGON_MODE_FILL,										// VkPolygonMode							polygonMode;
+		VK_CULL_MODE_NONE,											// VkCullModeFlags							cullMode;
+		VK_FRONT_FACE_COUNTER_CLOCKWISE,							// VkFrontFace								frontFace;
+		false,														// VkBool32									depthBiasEnable;
+		0.0f,														// float									depthBiasConstantFactor;
+		0.0f,														// float									depthBiasClamp;
+		0.0f,														// float									depthBiasSlopeFactor;
+		1.0f,														// float									lineWidth;
+	};
+
+	const VkPipelineMultisampleStateCreateInfo		multisampleStateParams =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,	// VkStructureType							sType;
+		DE_NULL,													// const void*								pNext;
+		0u,															// VkPipelineMultisampleStateCreateFlags	flags;
+		VK_SAMPLE_COUNT_1_BIT,										// VkSampleCountFlagBits					rasterizationSamples;
+		VK_FALSE,													// VkBool32									sampleShadingEnable;
+		0.0f,														// float									minSampleShading;
+		DE_NULL,													// const VkSampleMask*						pSampleMask;
+		VK_FALSE,													// VkBool32									alphaToCoverageEnable;
+		VK_FALSE													// VkBool32									alphaToOneEnable;
+	 };
+
+	const VkPipelineColorBlendAttachmentState		colorBlendAttachmentState	=
+	{
+		false,																		// VkBool32			blendEnable;
+		VK_BLEND_FACTOR_ONE,														// VkBlend			srcBlendColor;
+		VK_BLEND_FACTOR_ZERO,														// VkBlend			destBlendColor;
+		VK_BLEND_OP_ADD,															// VkBlendOp		blendOpColor;
+		VK_BLEND_FACTOR_ONE,														// VkBlend			srcBlendAlpha;
+		VK_BLEND_FACTOR_ZERO,														// VkBlend			destBlendAlpha;
+		VK_BLEND_OP_ADD,															// VkBlendOp		blendOpAlpha;
+		VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT |						// VkChannelFlags	channelWriteMask;
+		VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT
+	};
+
+	const VkPipelineColorBlendStateCreateInfo		colorBlendStateParams		=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,	// VkStructureType								sType;
+		DE_NULL,													// const void*									pNext;
+		0u,															// VkPipelineColorBlendStateCreateFlags			flags;
+		false,														// VkBool32										logicOpEnable;
+		VK_LOGIC_OP_COPY,											// VkLogicOp									logicOp;
+		1u,															// deUint32										attachmentCount;
+		&colorBlendAttachmentState,									// const VkPipelineColorBlendAttachmentState*	pAttachments;
+		{ 0.0f, 0.0f, 0.0f, 0.0f },									// float										blendConstants[4];
+	};
+
+	const VkPipelineDynamicStateCreateInfo			dynamicStateInfo			=
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,	// VkStructureType						sType;
+		DE_NULL,												// const void*							pNext;
+		0u,														// VkPipelineDynamicStateCreateFlags	flags;
+		0u,														// deUint32								dynamicStateCount;
+		DE_NULL													// const VkDynamicState*				pDynamicStates;
+	};
+
+	const VkGraphicsPipelineCreateInfo				graphicsPipelineParams		=
+	{
+		VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,	// VkStructureType									sType;
+		DE_NULL,											// const void*										pNext;
+		0u,													// VkPipelineCreateFlags							flags;
+		2u,													// deUint32											stageCount;
+		shaderStages,										// const VkPipelineShaderStageCreateInfo*			pStages;
+		&vertexInputStateParams,							// const VkPipelineVertexInputStateCreateInfo*		pVertexInputState;
+		&inputAssemblyStateParams,							// const VkPipelineInputAssemblyStateCreateInfo*	pInputAssemblyState;
+		DE_NULL,											// const VkPipelineTessellationStateCreateInfo*		pTessellationState;
+		&viewportStateParams,								// const VkPipelineViewportStateCreateInfo*			pViewportState;
+		&rasterStateParams,									// const VkPipelineRasterizationStateCreateInfo*	pRasterizationState;
+		&multisampleStateParams,							// const VkPipelineMultisampleStateCreateInfo*		pMultisampleState;
+		DE_NULL,											// const VkPipelineDepthStencilStateCreateInfo*		pDepthStencilState;
+		&colorBlendStateParams,								// const VkPipelineColorBlendStateCreateInfo*		pColorBlendState;
+		&dynamicStateInfo,									// const VkPipelineDynamicStateCreateInfo*			pDynamicState;
+		pipelineLayout,										// VkPipelineLayout									layout;
+		renderPass,											// VkRenderPass										renderPass;
+		0u,													// deUint32											subpass;
+		0u,													// VkPipeline										basePipelineHandle;
+		0u													// deInt32											basePipelineIndex;
+	};
+
+	return vk::createGraphicsPipeline(vk, vkDevice, DE_NULL, &graphicsPipelineParams);
+}
+
+} // anonymous (utilities)
+
+// UniformBlockCase.
+
+UniformBlockCase::UniformBlockCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, BufferMode bufferMode)
+	: TestCase		(testCtx, name, description)
+	, m_bufferMode	(bufferMode)
+{
+}
+
+UniformBlockCase::~UniformBlockCase (void)
+{
+}
+
+void UniformBlockCase::initPrograms (vk::SourceCollections& programCollection) const
+{
+	DE_ASSERT(!m_vertShaderSource.empty());
+	DE_ASSERT(!m_fragShaderSource.empty());
+
+	programCollection.glslSources.add("vert") << glu::VertexSource(m_vertShaderSource);
+	programCollection.glslSources.add("frag") << glu::FragmentSource(m_fragShaderSource);
+}
+
+TestInstance* UniformBlockCase::createInstance (Context& context) const
+{
+	return new UniformBlockCaseInstance(context, m_bufferMode, m_uniformLayout, m_blockPointers);
+}
+
+void UniformBlockCase::init (void)
+{
+	// Compute reference layout.
+	computeStd140Layout(m_uniformLayout, m_interface);
+
+	// Assign storage for reference values.
+	{
+		int totalSize = 0;
+		for (std::vector<BlockLayoutEntry>::const_iterator blockIter = m_uniformLayout.blocks.begin(); blockIter != m_uniformLayout.blocks.end(); blockIter++)
+			totalSize += blockIter->size;
+		m_data.resize(totalSize);
+
+		// Pointers for each block.
+		int curOffset = 0;
+		for (int blockNdx = 0; blockNdx < (int)m_uniformLayout.blocks.size(); blockNdx++)
+		{
+			m_blockPointers[blockNdx] = &m_data[0] + curOffset;
+			curOffset += m_uniformLayout.blocks[blockNdx].size;
+		}
+	}
+
+	// Generate values.
+	generateValues(m_uniformLayout, m_blockPointers, 1 /* seed */);
+
+	// Generate shaders.
+	m_vertShaderSource = generateVertexShader(m_interface, m_uniformLayout, m_blockPointers);
+	m_fragShaderSource = generateFragmentShader(m_interface, m_uniformLayout, m_blockPointers);
+}
+
+} // ubo
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.hpp b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.hpp
new file mode 100644
index 0000000..cb883c1
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockCase.hpp
@@ -0,0 +1,332 @@
+#ifndef _VKTUNIFORMBLOCKCASE_HPP
+#define _VKTUNIFORMBLOCKCASE_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Uniform block tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "deSharedPtr.hpp"
+#include "vktTestCase.hpp"
+#include "tcuDefs.hpp"
+#include "gluShaderUtil.hpp"
+
+#include <map>
+
+namespace vkt
+{
+namespace ubo
+{
+
+// Uniform block details.
+
+enum UniformFlags
+{
+	PRECISION_LOW		= (1<<0),
+	PRECISION_MEDIUM	= (1<<1),
+	PRECISION_HIGH		= (1<<2),
+	PRECISION_MASK		= PRECISION_LOW|PRECISION_MEDIUM|PRECISION_HIGH,
+
+	LAYOUT_SHARED		= (1<<3),
+	LAYOUT_PACKED		= (1<<4),
+	LAYOUT_STD140		= (1<<5),
+	LAYOUT_ROW_MAJOR	= (1<<6),
+	LAYOUT_COLUMN_MAJOR	= (1<<7),	//!< \note Lack of both flags means column-major matrix.
+	LAYOUT_MASK			= LAYOUT_SHARED|LAYOUT_PACKED|LAYOUT_STD140|LAYOUT_ROW_MAJOR|LAYOUT_COLUMN_MAJOR,
+
+	DECLARE_VERTEX		= (1<<8),
+	DECLARE_FRAGMENT	= (1<<9),
+	DECLARE_BOTH		= DECLARE_VERTEX|DECLARE_FRAGMENT,
+
+	UNUSED_VERTEX		= (1<<10),	//!< Uniform or struct member is not read in vertex shader.
+	UNUSED_FRAGMENT		= (1<<11),	//!< Uniform or struct member is not read in fragment shader.
+	UNUSED_BOTH			= UNUSED_VERTEX|UNUSED_FRAGMENT
+};
+
+class StructType;
+
+class VarType
+{
+public:
+						VarType			(void);
+						VarType			(const VarType& other);
+						VarType			(glu::DataType basicType, deUint32 flags);
+						VarType			(const VarType& elementType, int arraySize);
+	explicit			VarType			(const StructType* structPtr);
+						~VarType		(void);
+
+	bool				isBasicType		(void) const	{ return m_type == TYPE_BASIC;	}
+	bool				isArrayType		(void) const	{ return m_type == TYPE_ARRAY;	}
+	bool				isStructType	(void) const	{ return m_type == TYPE_STRUCT;	}
+
+	deUint32			getFlags		(void) const	{ return m_flags;					}
+	glu::DataType		getBasicType	(void) const	{ return m_data.basicType;			}
+
+	const VarType&		getElementType	(void) const	{ return *m_data.array.elementType;	}
+	int					getArraySize	(void) const	{ return m_data.array.size;			}
+
+	const StructType&	getStruct		(void) const	{ return *m_data.structPtr;			}
+
+	VarType&			operator=		(const VarType& other);
+
+private:
+	enum Type
+	{
+		TYPE_BASIC,
+		TYPE_ARRAY,
+		TYPE_STRUCT,
+
+		TYPE_LAST
+	};
+
+	Type				m_type;
+	deUint32			m_flags;
+	union Data
+	{
+		glu::DataType		basicType;
+		struct
+		{
+			VarType*		elementType;
+			int				size;
+		} array;
+		const StructType*	structPtr;
+
+		Data (void)
+		{
+			array.elementType	= DE_NULL;
+			array.size			= 0;
+		};
+	} m_data;
+};
+
+class StructMember
+{
+public:
+						StructMember	(const std::string& name, const VarType& type, deUint32 flags)
+							: m_name(name), m_type(type), m_flags(flags)
+						{}
+						StructMember	(void)
+							: m_flags(0)
+						{}
+
+	const std::string&	getName			(void) const { return m_name;	}
+	const VarType&		getType			(void) const { return m_type;	}
+	deUint32			getFlags		(void) const { return m_flags;	}
+
+private:
+	std::string			m_name;
+	VarType				m_type;
+	deUint32			m_flags;
+};
+
+class StructType
+{
+public:
+	typedef std::vector<StructMember>::iterator			Iterator;
+	typedef std::vector<StructMember>::const_iterator	ConstIterator;
+
+								StructType		(const std::string& typeName) : m_typeName(typeName) {}
+								~StructType		(void) {}
+
+	const std::string&			getTypeName		(void) const	{ return m_typeName;			}
+	bool						hasTypeName		(void) const	{ return !m_typeName.empty();	}
+
+	inline Iterator				begin			(void)			{ return m_members.begin();		}
+	inline ConstIterator		begin			(void) const	{ return m_members.begin();		}
+	inline Iterator				end				(void)			{ return m_members.end();		}
+	inline ConstIterator		end				(void) const	{ return m_members.end();		}
+
+	void						addMember		(const std::string& name, const VarType& type, deUint32 flags = 0);
+
+private:
+	std::string					m_typeName;
+	std::vector<StructMember>	m_members;
+};
+
+class Uniform
+{
+public:
+						Uniform			(const std::string& name, const VarType& type, deUint32 flags = 0);
+
+	const std::string&	getName			(void) const { return m_name;	}
+	const VarType&		getType			(void) const { return m_type;	}
+	deUint32			getFlags		(void) const { return m_flags;	}
+
+private:
+	std::string			m_name;
+	VarType				m_type;
+	deUint32			m_flags;
+};
+
+class UniformBlock
+{
+public:
+	typedef std::vector<Uniform>::iterator			Iterator;
+	typedef std::vector<Uniform>::const_iterator	ConstIterator;
+
+							UniformBlock		(const std::string& blockName);
+
+	const std::string&		getBlockName		(void) const { return m_blockName;		}
+	const std::string&		getInstanceName		(void) const { return m_instanceName;	}
+	bool					hasInstanceName		(void) const { return !m_instanceName.empty();	}
+	bool					isArray				(void) const { return m_arraySize > 0;			}
+	int						getArraySize		(void) const { return m_arraySize;				}
+	deUint32				getFlags			(void) const { return m_flags;					}
+
+	void					setInstanceName		(const std::string& name)	{ m_instanceName = name;			}
+	void					setFlags			(deUint32 flags)			{ m_flags = flags;					}
+	void					setArraySize		(int arraySize)				{ m_arraySize = arraySize;			}
+	void					addUniform			(const Uniform& uniform)	{ m_uniforms.push_back(uniform);	}
+
+	inline Iterator			begin				(void)			{ return m_uniforms.begin();	}
+	inline ConstIterator	begin				(void) const	{ return m_uniforms.begin();	}
+	inline Iterator			end					(void)			{ return m_uniforms.end();		}
+	inline ConstIterator	end					(void) const	{ return m_uniforms.end();		}
+
+private:
+	std::string				m_blockName;
+	std::string				m_instanceName;
+	std::vector<Uniform>	m_uniforms;
+	int						m_arraySize;	//!< Array size or 0 if not interface block array.
+	deUint32				m_flags;
+};
+
+typedef de::SharedPtr<StructType>	StructTypeSP;
+typedef de::SharedPtr<UniformBlock>	UniformBlockSP;
+
+class ShaderInterface
+{
+public:
+								ShaderInterface			(void);
+								~ShaderInterface		(void);
+
+	StructType&					allocStruct				(const std::string& name);
+	void						getNamedStructs			(std::vector<const StructType*>& structs) const;
+
+	UniformBlock&				allocBlock				(const std::string& name);
+
+	int							getNumUniformBlocks		(void) const	{ return (int)m_uniformBlocks.size();	}
+	const UniformBlock&			getUniformBlock			(int ndx) const	{ return *m_uniformBlocks[ndx];			}
+
+private:
+	std::vector<StructTypeSP>		m_structs;
+	std::vector<UniformBlockSP>		m_uniformBlocks;
+};
+
+struct BlockLayoutEntry
+{
+	BlockLayoutEntry (void)
+		: size(0)
+	{
+	}
+
+	std::string			name;
+	int					size;
+	std::vector<int>	activeUniformIndices;
+	int					bindingNdx;
+	int					instanceNdx;
+};
+
+struct UniformLayoutEntry
+{
+	UniformLayoutEntry (void)
+		: type			(glu::TYPE_LAST)
+		, size			(0)
+		, blockNdx		(-1)
+		, offset		(-1)
+		, arrayStride	(-1)
+		, matrixStride	(-1)
+		, isRowMajor	(false)
+		, instanceNdx	(0)
+	{
+	}
+
+	std::string			name;
+	glu::DataType		type;
+	int					size;
+	int					blockNdx;
+	int					offset;
+	int					arrayStride;
+	int					matrixStride;
+	bool				isRowMajor;
+	int					instanceNdx;
+};
+
+class UniformLayout
+{
+public:
+	std::vector<BlockLayoutEntry>		blocks;
+	std::vector<UniformLayoutEntry>		uniforms;
+
+	int									getUniformIndex			(const std::string& name) const;
+	int									getBlockIndex			(const std::string& name) const;
+};
+
+class UniformBlockCase : public vkt::TestCase
+{
+public:
+	enum BufferMode
+	{
+		BUFFERMODE_SINGLE = 0,	//!< Single buffer shared between uniform blocks.
+		BUFFERMODE_PER_BLOCK,	//!< Per-block buffers
+
+		BUFFERMODE_LAST
+	};
+
+								UniformBlockCase			(tcu::TestContext&	testCtx,
+															 const std::string&	name,
+															 const std::string&	description,
+															 BufferMode			bufferMode);
+								~UniformBlockCase			(void);
+
+	virtual	void				initPrograms				(vk::SourceCollections& programCollection) const;
+	virtual TestInstance*		createInstance				(Context& context) const;
+
+protected:
+	void						init						(void);
+
+	BufferMode					m_bufferMode;
+	ShaderInterface				m_interface;
+
+private:
+	std::string					m_vertShaderSource;
+	std::string					m_fragShaderSource;
+
+	std::vector<deUint8>		m_data;				//!< Data.
+	std::map<int, void*>		m_blockPointers;	//!< Reference block pointers.
+	UniformLayout				m_uniformLayout;	//!< std140 layout.
+};
+
+} // ubo
+} // vkt
+
+#endif // _VKTUNIFORMBLOCKCASE_HPP
diff --git a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.cpp b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.cpp
new file mode 100644
index 0000000..5eb2029
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.cpp
@@ -0,0 +1,820 @@
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Uniform block tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktUniformBlockTests.hpp"
+
+#include "vktUniformBlockCase.hpp"
+#include "vktRandomUniformBlockCase.hpp"
+
+#include "tcuCommandLine.hpp"
+#include "deStringUtil.hpp"
+
+namespace vkt
+{
+namespace ubo
+{
+
+namespace
+{
+
+class BlockBasicTypeCase : public UniformBlockCase
+{
+public:
+	BlockBasicTypeCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, const VarType& type, deUint32 layoutFlags, int numInstances)
+		: UniformBlockCase(testCtx, name, description, BUFFERMODE_PER_BLOCK)
+	{
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("var", type, 0));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setArraySize(numInstances);
+			block.setInstanceName("block");
+		}
+
+		init();
+	}
+};
+
+static void createBlockBasicTypeCases (tcu::TestCaseGroup* group, tcu::TestContext& testCtx, const std::string& name, const VarType& type, deUint32 layoutFlags, int numInstances = 0)
+{
+	group->addChild(new BlockBasicTypeCase(testCtx, name + "_vertex",	"", type, layoutFlags|DECLARE_VERTEX,					numInstances));
+	group->addChild(new BlockBasicTypeCase(testCtx, name + "_fragment",	"", type, layoutFlags|DECLARE_FRAGMENT,					numInstances));
+	group->addChild(new BlockBasicTypeCase(testCtx, name + "_both",	"",		type, layoutFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	numInstances));
+}
+
+class BlockSingleStructCase : public UniformBlockCase
+{
+public:
+	BlockSingleStructCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, PRECISION_HIGH), UNUSED_BOTH); // First member is unused.
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH));
+
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("s", VarType(&typeS), 0));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class BlockSingleStructArrayCase : public UniformBlockCase
+{
+public:
+	BlockSingleStructArrayCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, PRECISION_HIGH), UNUSED_BOTH);
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH));
+
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("u", VarType(glu::TYPE_UINT, PRECISION_LOW)));
+		block.addUniform(Uniform("s", VarType(VarType(&typeS), 3)));
+		block.addUniform(Uniform("v", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_MEDIUM)));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class BlockSingleNestedStructCase : public UniformBlockCase
+{
+public:
+	BlockSingleNestedStructCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, PRECISION_HIGH));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH), UNUSED_BOTH);
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM));
+		typeT.addMember("b", VarType(&typeS));
+
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("s", VarType(&typeS), 0));
+		block.addUniform(Uniform("v", VarType(glu::TYPE_FLOAT_VEC2, PRECISION_LOW), UNUSED_BOTH));
+		block.addUniform(Uniform("t", VarType(&typeT), 0));
+		block.addUniform(Uniform("u", VarType(glu::TYPE_UINT, PRECISION_HIGH), 0));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class BlockSingleNestedStructArrayCase : public UniformBlockCase
+{
+public:
+	BlockSingleNestedStructArrayCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_INT_VEC3, PRECISION_HIGH));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_INT_VEC2, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH), UNUSED_BOTH);
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM));
+		typeT.addMember("b", VarType(VarType(&typeS), 3));
+
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("s", VarType(&typeS), 0));
+		block.addUniform(Uniform("v", VarType(glu::TYPE_FLOAT_VEC2, PRECISION_LOW), UNUSED_BOTH));
+		block.addUniform(Uniform("t", VarType(VarType(&typeT), 2), 0));
+		block.addUniform(Uniform("u", VarType(glu::TYPE_UINT, PRECISION_HIGH), 0));
+		block.setFlags(layoutFlags);
+
+		if (numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class BlockMultiBasicTypesCase : public UniformBlockCase
+{
+public:
+	BlockMultiBasicTypesCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 flagsA, deUint32 flagsB, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		UniformBlock& blockA = m_interface.allocBlock("BlockA");
+		blockA.addUniform(Uniform("a", VarType(glu::TYPE_FLOAT, PRECISION_HIGH)));
+		blockA.addUniform(Uniform("b", VarType(glu::TYPE_UINT_VEC3, PRECISION_LOW), UNUSED_BOTH));
+		blockA.addUniform(Uniform("c", VarType(glu::TYPE_FLOAT_MAT2, PRECISION_MEDIUM)));
+		blockA.setInstanceName("blockA");
+		blockA.setFlags(flagsA);
+
+		UniformBlock& blockB = m_interface.allocBlock("BlockB");
+		blockB.addUniform(Uniform("a", VarType(glu::TYPE_FLOAT_MAT3, PRECISION_MEDIUM)));
+		blockB.addUniform(Uniform("b", VarType(glu::TYPE_INT_VEC2, PRECISION_LOW)));
+		blockB.addUniform(Uniform("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH), UNUSED_BOTH));
+		blockB.addUniform(Uniform("d", VarType(glu::TYPE_BOOL, 0)));
+		blockB.setInstanceName("blockB");
+		blockB.setFlags(flagsB);
+
+		if (numInstances > 0)
+		{
+			blockA.setArraySize(numInstances);
+			blockB.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class BlockMultiNestedStructCase : public UniformBlockCase
+{
+public:
+	BlockMultiNestedStructCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 flagsA, deUint32 flagsB, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_FLOAT_MAT3, PRECISION_LOW));
+		typeS.addMember("b", VarType(VarType(glu::TYPE_INT_VEC2, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_FLOAT_VEC4, PRECISION_HIGH));
+
+		StructType& typeT = m_interface.allocStruct("T");
+		typeT.addMember("a", VarType(glu::TYPE_UINT, PRECISION_MEDIUM), UNUSED_BOTH);
+		typeT.addMember("b", VarType(&typeS));
+		typeT.addMember("c", VarType(glu::TYPE_BOOL_VEC4, 0));
+
+		UniformBlock& blockA = m_interface.allocBlock("BlockA");
+		blockA.addUniform(Uniform("a", VarType(glu::TYPE_FLOAT, PRECISION_HIGH)));
+		blockA.addUniform(Uniform("b", VarType(&typeS)));
+		blockA.addUniform(Uniform("c", VarType(glu::TYPE_UINT_VEC3, PRECISION_LOW), UNUSED_BOTH));
+		blockA.setInstanceName("blockA");
+		blockA.setFlags(flagsA);
+
+		UniformBlock& blockB = m_interface.allocBlock("BlockB");
+		blockB.addUniform(Uniform("a", VarType(glu::TYPE_FLOAT_MAT2, PRECISION_MEDIUM)));
+		blockB.addUniform(Uniform("b", VarType(&typeT)));
+		blockB.addUniform(Uniform("c", VarType(glu::TYPE_BOOL_VEC4, 0), UNUSED_BOTH));
+		blockB.addUniform(Uniform("d", VarType(glu::TYPE_BOOL, 0)));
+		blockB.setInstanceName("blockB");
+		blockB.setFlags(flagsB);
+
+		if (numInstances > 0)
+		{
+			blockA.setArraySize(numInstances);
+			blockB.setArraySize(numInstances);
+		}
+
+		init();
+	}
+};
+
+class Block2LevelStructArrayCase : public UniformBlockCase
+{
+public:
+	Block2LevelStructArrayCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description, deUint32 layoutFlags, BufferMode bufferMode, int numInstances)
+		: UniformBlockCase	(testCtx, name, description, bufferMode)
+		, m_layoutFlags		(layoutFlags)
+		, m_numInstances	(numInstances)
+	{
+		StructType& typeS = m_interface.allocStruct("S");
+		typeS.addMember("a", VarType(glu::TYPE_UINT_VEC3, PRECISION_HIGH), UNUSED_BOTH);
+		typeS.addMember("b", VarType(VarType(glu::TYPE_FLOAT_MAT2, PRECISION_MEDIUM), 4));
+		typeS.addMember("c", VarType(glu::TYPE_UINT, PRECISION_LOW));
+
+		UniformBlock& block = m_interface.allocBlock("Block");
+		block.addUniform(Uniform("u", VarType(glu::TYPE_INT, PRECISION_MEDIUM)));
+		block.addUniform(Uniform("s", VarType(VarType(VarType(&typeS), 3), 2)));
+		block.addUniform(Uniform("v", VarType(glu::TYPE_FLOAT_VEC2, PRECISION_MEDIUM)));
+		block.setFlags(m_layoutFlags);
+
+		if (m_numInstances > 0)
+		{
+			block.setInstanceName("block");
+			block.setArraySize(m_numInstances);
+		}
+
+		init();
+	}
+
+private:
+	deUint32	m_layoutFlags;
+	int			m_numInstances;
+};
+
+void createRandomCaseGroup (tcu::TestCaseGroup* parentGroup, tcu::TestContext& testCtx, const char* groupName, const char* description, UniformBlockCase::BufferMode bufferMode, deUint32 features, int numCases, deUint32 baseSeed)
+{
+	tcu::TestCaseGroup* group = new tcu::TestCaseGroup(testCtx, groupName, description);
+	parentGroup->addChild(group);
+
+	baseSeed += (deUint32)testCtx.getCommandLine().getBaseSeed();
+
+	for (int ndx = 0; ndx < numCases; ndx++)
+		group->addChild(new RandomUniformBlockCase(testCtx, de::toString(ndx), "", bufferMode, features, (deUint32)ndx + baseSeed));
+}
+
+// UniformBlockTests
+
+class UniformBlockTests : public tcu::TestCaseGroup
+{
+public:
+							UniformBlockTests		(tcu::TestContext& testCtx);
+							~UniformBlockTests		(void);
+
+	void					init					(void);
+
+private:
+							UniformBlockTests		(const UniformBlockTests& other);
+	UniformBlockTests&		operator=				(const UniformBlockTests& other);
+};
+
+UniformBlockTests::UniformBlockTests (tcu::TestContext& testCtx)
+	: TestCaseGroup(testCtx, "ubo", "Uniform Block tests")
+{
+}
+
+UniformBlockTests::~UniformBlockTests (void)
+{
+}
+
+void UniformBlockTests::init (void)
+{
+	static const glu::DataType basicTypes[] =
+	{
+		glu::TYPE_FLOAT,
+		glu::TYPE_FLOAT_VEC2,
+		glu::TYPE_FLOAT_VEC3,
+		glu::TYPE_FLOAT_VEC4,
+		glu::TYPE_INT,
+		glu::TYPE_INT_VEC2,
+		glu::TYPE_INT_VEC3,
+		glu::TYPE_INT_VEC4,
+		glu::TYPE_UINT,
+		glu::TYPE_UINT_VEC2,
+		glu::TYPE_UINT_VEC3,
+		glu::TYPE_UINT_VEC4,
+		glu::TYPE_BOOL,
+		glu::TYPE_BOOL_VEC2,
+		glu::TYPE_BOOL_VEC3,
+		glu::TYPE_BOOL_VEC4,
+		glu::TYPE_FLOAT_MAT2,
+		glu::TYPE_FLOAT_MAT3,
+		glu::TYPE_FLOAT_MAT4,
+		glu::TYPE_FLOAT_MAT2X3,
+		glu::TYPE_FLOAT_MAT2X4,
+		glu::TYPE_FLOAT_MAT3X2,
+		glu::TYPE_FLOAT_MAT3X4,
+		glu::TYPE_FLOAT_MAT4X2,
+		glu::TYPE_FLOAT_MAT4X3
+	};
+
+	static const struct
+	{
+		const std::string	name;
+		deUint32			flags;
+	} precisionFlags[] =
+	{
+		// TODO remove PRECISION_LOW because both PRECISION_LOW and PRECISION_MEDIUM means relaxed precision?
+		{ "lowp",		PRECISION_LOW		},
+		{ "mediump",	PRECISION_MEDIUM	},
+		{ "highp",		PRECISION_HIGH		}
+	};
+
+	static const struct
+	{
+		const char*			name;
+		deUint32			flags;
+	} layoutFlags[] =
+	{
+		{ "std140",		LAYOUT_STD140	}
+	};
+
+	static const struct
+	{
+		const std::string	name;
+		deUint32			flags;
+	} matrixFlags[] =
+	{
+		{ "row_major",		LAYOUT_ROW_MAJOR	},
+		{ "column_major",	LAYOUT_COLUMN_MAJOR }
+	};
+
+	static const struct
+	{
+		const char*							name;
+		UniformBlockCase::BufferMode		mode;
+	} bufferModes[] =
+	{
+		{ "per_block_buffer",	UniformBlockCase::BUFFERMODE_PER_BLOCK },
+		{ "single_buffer",		UniformBlockCase::BUFFERMODE_SINGLE	}
+	};
+
+	// ubo.2_level_array
+	{
+		tcu::TestCaseGroup* nestedArrayGroup = new tcu::TestCaseGroup(m_testCtx, "2_level_array", "2-level basic array variable in single buffer");
+		addChild(nestedArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			nestedArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				const glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*			typeName	= glu::getDataTypeName(type);
+				const int			childSize	= 4;
+				const int			parentSize	= 3;
+				const VarType		childType	(VarType(type, glu::isDataTypeBoolOrBVec(type) ? 0 : PRECISION_HIGH), childSize);
+				const VarType		parentType	(childType, parentSize);
+
+				createBlockBasicTypeCases(layoutGroup, m_testCtx, typeName, parentType, layoutFlags[layoutFlagNdx].flags);
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						createBlockBasicTypeCases(layoutGroup, m_testCtx, (std::string(matrixFlags[matFlagNdx].name) + "_" + typeName),
+												  parentType, layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags);
+				}
+			}
+		}
+	}
+
+	// ubo.3_level_array
+	{
+		tcu::TestCaseGroup* nestedArrayGroup = new tcu::TestCaseGroup(m_testCtx, "3_level_array", "3-level basic array variable in single buffer");
+		addChild(nestedArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			nestedArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				const glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*			typeName	= glu::getDataTypeName(type);
+				const int			childSize0	= 2;
+				const int			childSize1	= 4;
+				const int			parentSize	= 3;
+				const VarType		childType0	(VarType(type, glu::isDataTypeBoolOrBVec(type) ? 0 : PRECISION_HIGH), childSize0);
+				const VarType		childType1	(childType0, childSize1);
+				const VarType		parentType	(childType1, parentSize);
+
+				createBlockBasicTypeCases(layoutGroup, m_testCtx, typeName, parentType, layoutFlags[layoutFlagNdx].flags);
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						createBlockBasicTypeCases(layoutGroup, m_testCtx, (std::string(matrixFlags[matFlagNdx].name) + "_" + typeName),
+												  parentType, layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags);
+				}
+			}
+		}
+	}
+
+	// ubo.2_level_struct_array
+	{
+		tcu::TestCaseGroup* structArrayArrayGroup = new tcu::TestCaseGroup(m_testCtx, "2_level_struct_array", "Struct array in one uniform block");
+		addChild(structArrayArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			structArrayArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == UniformBlockCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new Block2LevelStructArrayCase(m_testCtx, (baseName + "_vertex"),	"", baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new Block2LevelStructArrayCase(m_testCtx, (baseName + "_fragment"),	"", baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new Block2LevelStructArrayCase(m_testCtx, (baseName + "_both"),	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_basic_type
+	{
+		tcu::TestCaseGroup* singleBasicTypeGroup = new tcu::TestCaseGroup(m_testCtx, "single_basic_type", "Single basic variable in single buffer");
+		addChild(singleBasicTypeGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			singleBasicTypeGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+
+				if (glu::isDataTypeBoolOrBVec(type))
+					createBlockBasicTypeCases(layoutGroup, m_testCtx, typeName, VarType(type, 0), layoutFlags[layoutFlagNdx].flags);
+				else
+				{
+					for (int precNdx = 0; precNdx < DE_LENGTH_OF_ARRAY(precisionFlags); precNdx++)
+						createBlockBasicTypeCases(layoutGroup, m_testCtx, precisionFlags[precNdx].name + "_" + typeName,
+												  VarType(type, precisionFlags[precNdx].flags), layoutFlags[layoutFlagNdx].flags);
+				}
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+					{
+						for (int precNdx = 0; precNdx < DE_LENGTH_OF_ARRAY(precisionFlags); precNdx++)
+							createBlockBasicTypeCases(layoutGroup, m_testCtx, matrixFlags[matFlagNdx].name + "_" + precisionFlags[precNdx].name + "_" + typeName,
+													  VarType(type, precisionFlags[precNdx].flags), layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags);
+					}
+				}
+			}
+		}
+	}
+
+	// ubo.single_basic_array
+	{
+		tcu::TestCaseGroup* singleBasicArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_basic_array", "Single basic array variable in single buffer");
+		addChild(singleBasicArrayGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			singleBasicArrayGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type		= basicTypes[basicTypeNdx];
+				const char*		typeName	= glu::getDataTypeName(type);
+				const int		arraySize	= 3;
+
+				createBlockBasicTypeCases(layoutGroup, m_testCtx, typeName,
+										  VarType(VarType(type, glu::isDataTypeBoolOrBVec(type) ? 0 : PRECISION_HIGH), arraySize),
+										  layoutFlags[layoutFlagNdx].flags);
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						createBlockBasicTypeCases(layoutGroup, m_testCtx, matrixFlags[matFlagNdx].name + "_" + typeName,
+												  VarType(VarType(type, PRECISION_HIGH), arraySize),
+												  layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags);
+				}
+			}
+		}
+	}
+
+	// ubo.single_struct
+	{
+		tcu::TestCaseGroup* singleStructGroup = new tcu::TestCaseGroup(m_testCtx, "single_struct", "Single struct in uniform block");
+		addChild(singleStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == UniformBlockCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleStructCase(m_testCtx, baseName + "_vertex",		"", baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleStructCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleStructCase(m_testCtx, baseName + "_both",	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_struct_array
+	{
+		tcu::TestCaseGroup* singleStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_struct_array", "Struct array in one uniform block");
+		addChild(singleStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == UniformBlockCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleStructArrayCase(m_testCtx, baseName + "_vertex",		"", baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleStructArrayCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleStructArrayCase(m_testCtx, baseName + "_both",	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_nested_struct
+	{
+		tcu::TestCaseGroup* singleNestedStructGroup = new tcu::TestCaseGroup(m_testCtx, "single_nested_struct", "Nested struct in one uniform block");
+		addChild(singleNestedStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleNestedStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == UniformBlockCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleNestedStructCase(m_testCtx, baseName + "_vertex",	"", baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleNestedStructCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleNestedStructCase(m_testCtx, baseName + "_both",	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.single_nested_struct_array
+	{
+		tcu::TestCaseGroup* singleNestedStructArrayGroup = new tcu::TestCaseGroup(m_testCtx, "single_nested_struct_array", "Nested struct array in one uniform block");
+		addChild(singleNestedStructArrayGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			singleNestedStructArrayGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (bufferModes[modeNdx].mode == UniformBlockCase::BUFFERMODE_SINGLE && isArray == 0)
+						continue; // Doesn't make sense to add this variant.
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockSingleNestedStructArrayCase(m_testCtx, baseName + "_vertex",	"", baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleNestedStructArrayCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockSingleNestedStructArrayCase(m_testCtx, baseName + "_both",	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.instance_array_basic_type
+	{
+		tcu::TestCaseGroup* instanceArrayBasicTypeGroup = new tcu::TestCaseGroup(m_testCtx, "instance_array_basic_type", "Single basic variable in instance array");
+		addChild(instanceArrayBasicTypeGroup);
+
+		for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+		{
+			tcu::TestCaseGroup* layoutGroup = new tcu::TestCaseGroup(m_testCtx, layoutFlags[layoutFlagNdx].name, "");
+			instanceArrayBasicTypeGroup->addChild(layoutGroup);
+
+			for (int basicTypeNdx = 0; basicTypeNdx < DE_LENGTH_OF_ARRAY(basicTypes); basicTypeNdx++)
+			{
+				glu::DataType	type			= basicTypes[basicTypeNdx];
+				const char*		typeName		= glu::getDataTypeName(type);
+				const int		numInstances	= 3;
+
+				createBlockBasicTypeCases(layoutGroup, m_testCtx, typeName,
+										  VarType(type, glu::isDataTypeBoolOrBVec(type) ? 0 : PRECISION_HIGH),
+										  layoutFlags[layoutFlagNdx].flags, numInstances);
+
+				if (glu::isDataTypeMatrix(type))
+				{
+					for (int matFlagNdx = 0; matFlagNdx < DE_LENGTH_OF_ARRAY(matrixFlags); matFlagNdx++)
+						createBlockBasicTypeCases(layoutGroup, m_testCtx, matrixFlags[matFlagNdx].name + "_" + typeName,
+												  VarType(type, PRECISION_HIGH), layoutFlags[layoutFlagNdx].flags|matrixFlags[matFlagNdx].flags,
+												  numInstances);
+				}
+			}
+		}
+	}
+
+	// ubo.multi_basic_types
+	{
+		tcu::TestCaseGroup* multiBasicTypesGroup = new tcu::TestCaseGroup(m_testCtx, "multi_basic_types", "Multiple buffers with basic types");
+		addChild(multiBasicTypesGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			multiBasicTypesGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockMultiBasicTypesCase(m_testCtx, baseName + "_vertex",	"", baseFlags|DECLARE_VERTEX,					baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiBasicTypesCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiBasicTypesCase(m_testCtx, baseName + "_both",	"", baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiBasicTypesCase(m_testCtx, baseName + "_mixed",	"", baseFlags|DECLARE_VERTEX,					baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.multi_nested_struct
+	{
+		tcu::TestCaseGroup* multiNestedStructGroup = new tcu::TestCaseGroup(m_testCtx, "multi_nested_struct", "Multiple buffers with nested structs");
+		addChild(multiNestedStructGroup);
+
+		for (int modeNdx = 0; modeNdx < DE_LENGTH_OF_ARRAY(bufferModes); modeNdx++)
+		{
+			tcu::TestCaseGroup* modeGroup = new tcu::TestCaseGroup(m_testCtx, bufferModes[modeNdx].name, "");
+			multiNestedStructGroup->addChild(modeGroup);
+
+			for (int layoutFlagNdx = 0; layoutFlagNdx < DE_LENGTH_OF_ARRAY(layoutFlags); layoutFlagNdx++)
+			{
+				for (int isArray = 0; isArray < 2; isArray++)
+				{
+					std::string	baseName	= layoutFlags[layoutFlagNdx].name;
+					deUint32	baseFlags	= layoutFlags[layoutFlagNdx].flags;
+
+					if (isArray)
+						baseName += "_instance_array";
+
+					modeGroup->addChild(new BlockMultiNestedStructCase(m_testCtx, baseName + "_vertex",		"", baseFlags|DECLARE_VERTEX,					baseFlags|DECLARE_VERTEX,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiNestedStructCase(m_testCtx, baseName + "_fragment",	"", baseFlags|DECLARE_FRAGMENT,					baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiNestedStructCase(m_testCtx, baseName + "_both",	"",		baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	baseFlags|DECLARE_VERTEX|DECLARE_FRAGMENT,	bufferModes[modeNdx].mode, isArray ? 3 : 0));
+					modeGroup->addChild(new BlockMultiNestedStructCase(m_testCtx, baseName + "_mixed",		"", baseFlags|DECLARE_VERTEX,					baseFlags|DECLARE_FRAGMENT,					bufferModes[modeNdx].mode, isArray ? 3 : 0));
+				}
+			}
+		}
+	}
+
+	// ubo.random
+	{
+		const deUint32	allShaders		= FEATURE_VERTEX_BLOCKS|FEATURE_FRAGMENT_BLOCKS|FEATURE_SHARED_BLOCKS;
+		const deUint32	allLayouts		= FEATURE_STD140_LAYOUT;
+		const deUint32	allBasicTypes	= FEATURE_VECTORS|FEATURE_MATRICES;
+		const deUint32	unused			= FEATURE_UNUSED_MEMBERS|FEATURE_UNUSED_UNIFORMS;
+		const deUint32	matFlags		= FEATURE_MATRIX_LAYOUT;
+		const deUint32	allFeatures		= ~FEATURE_ARRAYS_OF_ARRAYS;
+
+		tcu::TestCaseGroup* randomGroup = new tcu::TestCaseGroup(m_testCtx, "random", "Random Uniform Block cases");
+		addChild(randomGroup);
+
+		// Basic types.
+		createRandomCaseGroup(randomGroup, m_testCtx, "scalar_types",	"Scalar types only, per-block buffers",				UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused,										25, 0);
+		createRandomCaseGroup(randomGroup, m_testCtx, "vector_types",	"Scalar and vector types only, per-block buffers",	UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|FEATURE_VECTORS,						25, 25);
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_types",	"All basic types, per-block buffers",				UniformBlockCase::BUFFERMODE_PER_BLOCK, allShaders|allLayouts|unused|allBasicTypes|matFlags,				25, 50);
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_arrays",	"Arrays, per-block buffers",						UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_ARRAYS,	25, 50);
+
+		createRandomCaseGroup(randomGroup, m_testCtx, "basic_instance_arrays",					"Basic instance arrays, per-block buffers",				UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_INSTANCE_ARRAYS,								25, 75);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs",							"Nested structs, per-block buffers",					UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_STRUCTS,										25, 100);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_arrays",					"Nested structs, arrays, per-block buffers",			UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_STRUCTS|FEATURE_ARRAYS,							25, 150);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_instance_arrays",			"Nested structs, instance arrays, per-block buffers",	UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_STRUCTS|FEATURE_INSTANCE_ARRAYS,				25, 125);
+		createRandomCaseGroup(randomGroup, m_testCtx, "nested_structs_arrays_instance_arrays",	"Nested structs, instance arrays, per-block buffers",	UniformBlockCase::BUFFERMODE_PER_BLOCK,	allShaders|allLayouts|unused|allBasicTypes|matFlags|FEATURE_STRUCTS|FEATURE_ARRAYS|FEATURE_INSTANCE_ARRAYS,	25, 175);
+
+		createRandomCaseGroup(randomGroup, m_testCtx, "all_per_block_buffers",	"All random features, per-block buffers",	UniformBlockCase::BUFFERMODE_PER_BLOCK,	allFeatures,	50, 200);
+		createRandomCaseGroup(randomGroup, m_testCtx, "all_shared_buffer",		"All random features, shared buffer",		UniformBlockCase::BUFFERMODE_SINGLE,	allFeatures,	50, 250);
+	}
+}
+
+} // anonymous
+
+tcu::TestCaseGroup*	createTests	(tcu::TestContext& testCtx)
+{
+	return new UniformBlockTests(testCtx);
+}
+
+} // ubo
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.hpp b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.hpp
new file mode 100644
index 0000000..d971426
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/ubo/vktUniformBlockTests.hpp
@@ -0,0 +1,51 @@
+#ifndef _VKTUNIFORMBLOCKTESTS_HPP
+#define _VKTUNIFORMBLOCKTESTS_HPP
+/*------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 The Khronos Group Inc.
+ * Copyright (c) 2015 Samsung Electronics Co., Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by Khronos,
+ * at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Uniform block tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+namespace ubo
+{
+
+tcu::TestCaseGroup*		createTests		(tcu::TestContext& testCtx);
+
+} // ubo
+} // vkt
+
+#endif // _VKTUNIFORMBLOCKTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/vktBuildPrograms.cpp b/external/vulkancts/modules/vulkan/vktBuildPrograms.cpp
new file mode 100644
index 0000000..edf1f8d
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktBuildPrograms.cpp
@@ -0,0 +1,285 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Utility for pre-compiling source programs to SPIR-V
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuCommandLine.hpp"
+#include "tcuPlatform.hpp"
+#include "tcuResource.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuTestHierarchyIterator.hpp"
+#include "deUniquePtr.hpp"
+#include "vkPrograms.hpp"
+#include "vkBinaryRegistry.hpp"
+#include "vktTestCase.hpp"
+#include "vktTestPackage.hpp"
+#include "deUniquePtr.hpp"
+#include "deCommandLine.hpp"
+
+#include <iostream>
+
+using std::vector;
+using std::string;
+using de::UniquePtr;
+using de::MovePtr;
+
+namespace vkt
+{
+
+tcu::TestPackageRoot* createRoot (tcu::TestContext& testCtx)
+{
+	vector<tcu::TestNode*>	children;
+	children.push_back(new TestPackage(testCtx));
+	return new tcu::TestPackageRoot(testCtx, children);
+}
+
+enum BuildMode
+{
+	BUILDMODE_BUILD = 0,
+	BUILDMODE_VERIFY,
+
+	BUILDMODE_LAST
+};
+
+struct BuildStats
+{
+	int		numSucceeded;
+	int		numFailed;
+
+	BuildStats (void)
+		: numSucceeded	(0)
+		, numFailed		(0)
+	{
+	}
+};
+
+namespace // anonymous
+{
+
+vk::ProgramBinary* compileProgram (const glu::ProgramSources& source, glu::ShaderProgramInfo* buildInfo)
+{
+	return vk::buildProgram(source, vk::PROGRAM_FORMAT_SPIRV, buildInfo);
+}
+
+vk::ProgramBinary* compileProgram (const vk::SpirVAsmSource& source, vk::SpirVProgramInfo* buildInfo)
+{
+	return vk::assembleProgram(source, buildInfo);
+}
+
+void writeVerboseLogs (const glu::ShaderProgramInfo& buildInfo)
+{
+	for (size_t shaderNdx = 0; shaderNdx < buildInfo.shaders.size(); shaderNdx++)
+	{
+		const glu::ShaderInfo&	shaderInfo	= buildInfo.shaders[shaderNdx];
+		const char* const		shaderName	= getShaderTypeName(shaderInfo.type);
+
+		tcu::print("%s source:\n---\n%s\n---\n", shaderName, shaderInfo.source.c_str());
+		tcu::print("%s compile log:\n---\n%s\n---\n", shaderName, shaderInfo.infoLog.c_str());
+	}
+}
+
+void writeVerboseLogs (const vk::SpirVProgramInfo& buildInfo)
+{
+	tcu::print("source:\n---\n%s\n---\n", buildInfo.source->program.str().c_str());
+	tcu::print("compile log:\n---\n%s\n---\n", buildInfo.infoLog.c_str());
+}
+
+template <typename InfoType, typename IteratorType>
+void buildProgram (const std::string&			casePath,
+				   bool							printLogs,
+				   IteratorType					iter,
+				   BuildMode					mode,
+				   BuildStats*					stats,
+				   vk::BinaryRegistryReader*	reader,
+				   vk::BinaryRegistryWriter*	writer)
+{
+	InfoType							buildInfo;
+	try
+	{
+		const vk::ProgramIdentifier			progId		(casePath, iter.getName());
+		const UniquePtr<vk::ProgramBinary>	binary		(compileProgram(iter.getProgram(), &buildInfo));
+
+		if (mode == BUILDMODE_BUILD)
+			writer->storeProgram(progId, *binary);
+		else
+		{
+			DE_ASSERT(mode == BUILDMODE_VERIFY);
+
+			const UniquePtr<vk::ProgramBinary>	storedBinary	(reader->loadProgram(progId));
+
+			if (binary->getSize() != storedBinary->getSize())
+				throw tcu::Exception("Binary size doesn't match");
+
+			if (deMemCmp(binary->getBinary(), storedBinary->getBinary(), binary->getSize()))
+				throw tcu::Exception("Binary contents don't match");
+		}
+
+		tcu::print("  OK: %s\n", iter.getName().c_str());
+		stats->numSucceeded += 1;
+	}
+	catch (const std::exception& e)
+	{
+		tcu::print("  ERROR: %s: %s\n", iter.getName().c_str(), e.what());
+		if (printLogs)
+		{
+			writeVerboseLogs(buildInfo);
+		}
+		stats->numFailed += 1;
+	}
+}
+
+} // anonymous
+BuildStats buildPrograms (tcu::TestContext& testCtx, const std::string& dstPath, BuildMode mode, bool verbose)
+{
+	const UniquePtr<tcu::TestPackageRoot>	root		(createRoot(testCtx));
+	tcu::DefaultHierarchyInflater			inflater	(testCtx);
+	tcu::TestHierarchyIterator				iterator	(*root, inflater, testCtx.getCommandLine());
+	const tcu::DirArchive					srcArchive	(dstPath.c_str());
+	UniquePtr<vk::BinaryRegistryWriter>		writer		(mode == BUILDMODE_BUILD	? new vk::BinaryRegistryWriter(dstPath)			: DE_NULL);
+	UniquePtr<vk::BinaryRegistryReader>		reader		(mode == BUILDMODE_VERIFY	? new vk::BinaryRegistryReader(srcArchive, "")	: DE_NULL);
+	BuildStats								stats;
+	const bool								printLogs	= verbose;
+
+	while (iterator.getState() != tcu::TestHierarchyIterator::STATE_FINISHED)
+	{
+		if (iterator.getState() == tcu::TestHierarchyIterator::STATE_ENTER_NODE &&
+			tcu::isTestNodeTypeExecutable(iterator.getNode()->getNodeType()))
+		{
+			const TestCase* const		testCase	= dynamic_cast<TestCase*>(iterator.getNode());
+			const string				casePath	= iterator.getNodePath();
+			vk::SourceCollections		progs;
+
+			tcu::print("%s\n", casePath.c_str());
+
+			testCase->initPrograms(progs);
+
+			for (vk::GlslSourceCollection::Iterator progIter = progs.glslSources.begin(); progIter != progs.glslSources.end(); ++progIter)
+			{
+				buildProgram<glu::ShaderProgramInfo, vk::GlslSourceCollection::Iterator>(casePath, printLogs, progIter, mode, &stats, reader.get(), writer.get());
+			}
+
+			for (vk::SpirVAsmCollection::Iterator progIter = progs.spirvAsmSources.begin(); progIter != progs.spirvAsmSources.end(); ++progIter)
+			{
+				buildProgram<vk::SpirVProgramInfo, vk::SpirVAsmCollection::Iterator>(casePath, printLogs, progIter, mode, &stats, reader.get(), writer.get());
+			}
+		}
+
+		iterator.next();
+	}
+
+	if (mode == BUILDMODE_BUILD)
+		writer->writeIndex();
+
+	return stats;
+}
+
+} // vkt
+
+namespace opt
+{
+
+DE_DECLARE_COMMAND_LINE_OPT(DstPath,	std::string);
+DE_DECLARE_COMMAND_LINE_OPT(Mode,		vkt::BuildMode);
+DE_DECLARE_COMMAND_LINE_OPT(Verbose,	bool);
+DE_DECLARE_COMMAND_LINE_OPT(Cases,		std::string);
+
+} // opt
+
+void registerOptions (de::cmdline::Parser& parser)
+{
+	using de::cmdline::Option;
+	using de::cmdline::NamedValue;
+
+	static const NamedValue<vkt::BuildMode> s_modes[] =
+	{
+		{ "build",	vkt::BUILDMODE_BUILD	},
+		{ "verify",	vkt::BUILDMODE_VERIFY	}
+	};
+
+	parser << Option<opt::DstPath>	("d", "dst-path",	"Destination path",	"out")
+		   << Option<opt::Mode>		("m", "mode",		"Build mode",		s_modes,	"build")
+		   << Option<opt::Verbose>	("v", "verbose",	"Verbose output")
+		   << Option<opt::Cases>	("n", "deqp-case",	"Case path filter (works as in test binaries)");
+}
+
+int main (int argc, const char* argv[])
+{
+	de::cmdline::CommandLine	cmdLine;
+	tcu::CommandLine			deqpCmdLine;
+
+	{
+		de::cmdline::Parser		parser;
+		registerOptions(parser);
+		if (!parser.parse(argc, argv, &cmdLine, std::cerr))
+		{
+			parser.help(std::cout);
+			return -1;
+		}
+	}
+
+	{
+		vector<const char*> deqpArgv;
+
+		deqpArgv.push_back("unused");
+
+		if (cmdLine.hasOption<opt::Cases>())
+		{
+			deqpArgv.push_back("--deqp-case");
+			deqpArgv.push_back(cmdLine.getOption<opt::Cases>().c_str());
+		}
+
+		if (!deqpCmdLine.parse((int)deqpArgv.size(), &deqpArgv[0]))
+			return -1;
+	}
+
+	try
+	{
+		tcu::DirArchive			archive			(".");
+		tcu::TestLog			log				(deqpCmdLine.getLogFileName(), deqpCmdLine.getLogFlags());
+		tcu::Platform			platform;
+		tcu::TestContext		testCtx			(platform, archive, log, deqpCmdLine, DE_NULL);
+
+		const vkt::BuildStats	stats			= vkt::buildPrograms(testCtx,
+																	 cmdLine.getOption<opt::DstPath>(),
+																	 cmdLine.getOption<opt::Mode>(),
+																	 cmdLine.getOption<opt::Verbose>());
+
+		tcu::print("DONE: %d passed, %d failed\n", stats.numSucceeded, stats.numFailed);
+
+		return stats.numFailed == 0 ? 0 : -1;
+	}
+	catch (const std::exception& e)
+	{
+		tcu::die("%s", e.what());
+	}
+}
diff --git a/external/vulkancts/modules/vulkan/vktInfoTests.cpp b/external/vulkancts/modules/vulkan/vktInfoTests.cpp
new file mode 100644
index 0000000..da125ae
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktInfoTests.cpp
@@ -0,0 +1,176 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2016 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Build and Device Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktInfoTests.hpp"
+#include "vktTestCaseUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkApiVersion.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuCommandLine.hpp"
+#include "tcuPlatform.hpp"
+#include "deStringUtil.hpp"
+
+namespace vkt
+{
+
+namespace
+{
+
+using tcu::TestLog;
+using std::string;
+
+std::string getOsName (int os)
+{
+	switch (os)
+	{
+		case DE_OS_VANILLA:		return "DE_OS_VANILLA";
+		case DE_OS_WIN32:		return "DE_OS_WIN32";
+		case DE_OS_UNIX:		return "DE_OS_UNIX";
+		case DE_OS_WINCE:		return "DE_OS_WINCE";
+		case DE_OS_OSX:			return "DE_OS_OSX";
+		case DE_OS_ANDROID:		return "DE_OS_ANDROID";
+		case DE_OS_SYMBIAN:		return "DE_OS_SYMBIAN";
+		case DE_OS_IOS:			return "DE_OS_IOS";
+		default:
+			return de::toString(os);
+	}
+}
+
+std::string getCompilerName (int compiler)
+{
+	switch (compiler)
+	{
+		case DE_COMPILER_VANILLA:	return "DE_COMPILER_VANILLA";
+		case DE_COMPILER_MSC:		return "DE_COMPILER_MSC";
+		case DE_COMPILER_GCC:		return "DE_COMPILER_GCC";
+		case DE_COMPILER_CLANG:		return "DE_COMPILER_CLANG";
+		default:
+			return de::toString(compiler);
+	}
+}
+
+std::string getCpuName (int cpu)
+{
+	switch (cpu)
+	{
+		case DE_CPU_VANILLA:	return "DE_CPU_VANILLA";
+		case DE_CPU_ARM:		return "DE_CPU_ARM";
+		case DE_CPU_X86:		return "DE_CPU_X86";
+		case DE_CPU_X86_64:		return "DE_CPU_X86_64";
+		case DE_CPU_ARM_64:		return "DE_CPU_ARM_64";
+		case DE_CPU_MIPS:		return "DE_CPU_MIPS";
+		case DE_CPU_MIPS_64:	return "DE_CPU_MIPS_64";
+		default:
+			return de::toString(cpu);
+	}
+}
+
+std::string getEndiannessName (int endianness)
+{
+	switch (endianness)
+	{
+		case DE_BIG_ENDIAN:		return "DE_BIG_ENDIAN";
+		case DE_LITTLE_ENDIAN:	return "DE_LITTLE_ENDIAN";
+		default:
+			return de::toString(endianness);
+	}
+}
+
+tcu::TestStatus logBuildInfo (Context& context)
+{
+#if defined(DE_DEBUG)
+	const bool	isDebug	= true;
+#else
+	const bool	isDebug	= false;
+#endif
+
+	context.getTestContext().getLog()
+		<< TestLog::Message
+		<< "DE_OS: " << getOsName(DE_OS) << "\n"
+		<< "DE_CPU: " << getCpuName(DE_CPU) << "\n"
+		<< "DE_PTR_SIZE: " << DE_PTR_SIZE << "\n"
+		<< "DE_ENDIANNESS: " << getEndiannessName(DE_ENDIANNESS) << "\n"
+		<< "DE_COMPILER: " << getCompilerName(DE_COMPILER) << "\n"
+		<< "DE_DEBUG: " << (isDebug ? "true" : "false") << "\n"
+		<< TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Not validated");
+}
+
+tcu::TestStatus logDeviceInfo (Context& context)
+{
+	TestLog&								log			= context.getTestContext().getLog();
+	const vk::VkPhysicalDeviceProperties&	properties	= context.getDeviceProperties();
+
+	log << TestLog::Message
+		<< "Using --deqp-vk-device-id="
+		<< context.getTestContext().getCommandLine().getVKDeviceId()
+		<< TestLog::EndMessage;
+
+	log << TestLog::Message
+		<< "apiVersion: " << vk::unpackVersion(properties.apiVersion) << "\n"
+		<< "driverVersion: " << tcu::toHex(properties.driverVersion) << "\n"
+		<< "deviceName: " << (const char*)properties.deviceName << "\n"
+		<< "vendorID: " << tcu::toHex(properties.vendorID) << "\n"
+		<< "deviceID: " << tcu::toHex(properties.deviceID) << "\n"
+		<< TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Not validated");
+}
+
+tcu::TestStatus logPlatformInfo (Context& context)
+{
+	std::ostringstream details;
+
+	context.getTestContext().getPlatform().getVulkanPlatform().describePlatform(details);
+
+	context.getTestContext().getLog()
+		<< TestLog::Message
+		<< details.str()
+		<< TestLog::EndMessage;
+
+	return tcu::TestStatus::pass("Not validated");
+}
+
+} // anonymous
+
+void createInfoTests (tcu::TestCaseGroup* testGroup)
+{
+	addFunctionCase(testGroup, "build",		"Build Info",		logBuildInfo);
+	addFunctionCase(testGroup, "device",	"Device Info",		logDeviceInfo);
+	addFunctionCase(testGroup, "platform",	"Platform Info",	logPlatformInfo);
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktInfoTests.hpp b/external/vulkancts/modules/vulkan/vktInfoTests.hpp
new file mode 100644
index 0000000..1a76ea9
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktInfoTests.hpp
@@ -0,0 +1,47 @@
+#ifndef _VKTINFOTESTS_HPP
+#define _VKTINFOTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2016 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Build and Device Tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+void	createInfoTests		(tcu::TestCaseGroup* testGroup);
+
+} // vkt
+
+#endif // _VKTINFOTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/vktRenderPassTests.cpp b/external/vulkancts/modules/vulkan/vktRenderPassTests.cpp
new file mode 100644
index 0000000..9ebe800
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktRenderPassTests.cpp
@@ -0,0 +1,4612 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief RenderPass tests
+ *//*--------------------------------------------------------------------*/
+
+#include "vktRenderPassTests.hpp"
+
+#include "vktTestCaseUtil.hpp"
+
+#include "vkDefs.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkImageUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkStrUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "tcuTestLog.hpp"
+#include "tcuResultCollector.hpp"
+#include "tcuFormatUtil.hpp"
+#include "tcuTextureUtil.hpp"
+#include "tcuFloat.hpp"
+#include "tcuMaybe.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deSharedPtr.hpp"
+#include "deStringUtil.hpp"
+#include "deSTLUtil.hpp"
+#include "deRandom.hpp"
+
+#include <limits>
+
+using namespace vk;
+
+using tcu::Maybe;
+using tcu::nothing;
+using tcu::just;
+using tcu::TestLog;
+using tcu::Vec2;
+using tcu::IVec2;
+using tcu::UVec2;
+using tcu::IVec4;
+using tcu::UVec4;
+using tcu::Vec4;
+using tcu::BVec4;
+using tcu::ConstPixelBufferAccess;
+using tcu::PixelBufferAccess;
+
+using de::UniquePtr;
+
+using std::vector;
+using std::string;
+
+namespace vkt
+{
+namespace
+{
+enum
+{
+	STENCIL_VALUE = 84u,
+	// Limit integer values that are representable as floats
+	MAX_INTEGER_VALUE = ((1u<<22u)-1u)
+};
+
+// Utility functions using flattened structs
+Move<VkFence> createFence (const DeviceInterface& vk, VkDevice device, VkFenceCreateFlags flags)
+{
+	const VkFenceCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+		DE_NULL,
+
+		flags
+	};
+	return createFence(vk, device, &pCreateInfo);
+}
+
+Move<VkFramebuffer> createFramebuffer (const DeviceInterface&	vk,
+									   VkDevice					device,
+									   VkFramebufferCreateFlags	pCreateInfo_flags,
+									   VkRenderPass				pCreateInfo_renderPass,
+									   deUint32					pCreateInfo_attachmentCount,
+									   const VkImageView*		pCreateInfo_pAttachments,
+									   deUint32					pCreateInfo_width,
+									   deUint32					pCreateInfo_height,
+									   deUint32					pCreateInfo_layers)
+{
+	const VkFramebufferCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+		DE_NULL,
+		pCreateInfo_flags,
+		pCreateInfo_renderPass,
+		pCreateInfo_attachmentCount,
+		pCreateInfo_pAttachments,
+		pCreateInfo_width,
+		pCreateInfo_height,
+		pCreateInfo_layers,
+	};
+	return createFramebuffer(vk, device, &pCreateInfo);
+}
+
+Move<VkImage> createImage (const DeviceInterface&	vk,
+						   VkDevice					device,
+						   VkImageCreateFlags		pCreateInfo_flags,
+						   VkImageType				pCreateInfo_imageType,
+						   VkFormat					pCreateInfo_format,
+						   VkExtent3D				pCreateInfo_extent,
+						   deUint32					pCreateInfo_mipLevels,
+						   deUint32					pCreateInfo_arrayLayers,
+						   VkSampleCountFlagBits	pCreateInfo_samples,
+						   VkImageTiling			pCreateInfo_tiling,
+						   VkImageUsageFlags		pCreateInfo_usage,
+						   VkSharingMode			pCreateInfo_sharingMode,
+						   deUint32					pCreateInfo_queueFamilyCount,
+						   const deUint32*			pCreateInfo_pQueueFamilyIndices,
+						   VkImageLayout			pCreateInfo_initialLayout)
+{
+	const VkImageCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+		DE_NULL,
+		pCreateInfo_flags,
+		pCreateInfo_imageType,
+		pCreateInfo_format,
+		pCreateInfo_extent,
+		pCreateInfo_mipLevels,
+		pCreateInfo_arrayLayers,
+		pCreateInfo_samples,
+		pCreateInfo_tiling,
+		pCreateInfo_usage,
+		pCreateInfo_sharingMode,
+		pCreateInfo_queueFamilyCount,
+		pCreateInfo_pQueueFamilyIndices,
+		pCreateInfo_initialLayout
+	};
+	return createImage(vk, device, &pCreateInfo);
+}
+
+void bindBufferMemory (const DeviceInterface& vk, VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset)
+{
+	VK_CHECK(vk.bindBufferMemory(device, buffer, mem, memOffset));
+}
+
+void bindImageMemory (const DeviceInterface& vk, VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset)
+{
+	VK_CHECK(vk.bindImageMemory(device, image, mem, memOffset));
+}
+
+Move<VkImageView> createImageView (const DeviceInterface&	vk,
+									VkDevice				device,
+									VkImageViewCreateFlags	pCreateInfo_flags,
+									VkImage					pCreateInfo_image,
+									VkImageViewType			pCreateInfo_viewType,
+									VkFormat				pCreateInfo_format,
+									VkComponentMapping		pCreateInfo_components,
+									VkImageSubresourceRange	pCreateInfo_subresourceRange)
+{
+	const VkImageViewCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+		DE_NULL,
+		pCreateInfo_flags,
+		pCreateInfo_image,
+		pCreateInfo_viewType,
+		pCreateInfo_format,
+		pCreateInfo_components,
+		pCreateInfo_subresourceRange,
+	};
+	return createImageView(vk, device, &pCreateInfo);
+}
+
+Move<VkBuffer> createBuffer (const DeviceInterface&	vk,
+							 VkDevice				device,
+							 VkBufferCreateFlags	pCreateInfo_flags,
+							 VkDeviceSize			pCreateInfo_size,
+							 VkBufferUsageFlags		pCreateInfo_usage,
+							 VkSharingMode			pCreateInfo_sharingMode,
+							 deUint32				pCreateInfo_queueFamilyCount,
+							 const deUint32*		pCreateInfo_pQueueFamilyIndices)
+{
+	const VkBufferCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+		DE_NULL,
+		pCreateInfo_flags,
+		pCreateInfo_size,
+		pCreateInfo_usage,
+		pCreateInfo_sharingMode,
+		pCreateInfo_queueFamilyCount,
+		pCreateInfo_pQueueFamilyIndices,
+	};
+	return createBuffer(vk, device, &pCreateInfo);
+}
+
+Move<VkCommandPool> createCommandPool (const DeviceInterface&	vk,
+									   VkDevice					device,
+									   VkCommandPoolCreateFlags	pCreateInfo_flags,
+									   deUint32					pCreateInfo_queueFamilyIndex)
+{
+	const VkCommandPoolCreateInfo pCreateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+		DE_NULL,
+		pCreateInfo_flags,
+		pCreateInfo_queueFamilyIndex,
+	};
+	return createCommandPool(vk, device, &pCreateInfo);
+}
+
+void cmdBeginRenderPass (const DeviceInterface&	vk,
+						 VkCommandBuffer		cmdBuffer,
+						 VkRenderPass			pRenderPassBegin_renderPass,
+						 VkFramebuffer			pRenderPassBegin_framebuffer,
+						 VkRect2D				pRenderPassBegin_renderArea,
+						 deUint32				pRenderPassBegin_clearValueCount,
+						 const VkClearValue*	pRenderPassBegin_pAttachmentClearValues,
+						 VkSubpassContents		contents)
+{
+	const VkRenderPassBeginInfo pRenderPassBegin =
+	{
+		VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+		DE_NULL,
+		pRenderPassBegin_renderPass,
+		pRenderPassBegin_framebuffer,
+		pRenderPassBegin_renderArea,
+		pRenderPassBegin_clearValueCount,
+		pRenderPassBegin_pAttachmentClearValues,
+	};
+	vk.cmdBeginRenderPass(cmdBuffer, &pRenderPassBegin, contents);
+}
+
+Move<VkCommandBuffer> allocateCommandBuffer (const DeviceInterface&	vk,
+											 VkDevice				device,
+											 VkCommandPool			pCreateInfo_commandPool,
+											 VkCommandBufferLevel	pCreateInfo_level)
+{
+	const VkCommandBufferAllocateInfo pAllocateInfo =
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+		DE_NULL,
+		pCreateInfo_commandPool,
+		pCreateInfo_level,
+		1u,												// bufferCount
+	};
+	return allocateCommandBuffer(vk, device, &pAllocateInfo);
+}
+
+void beginCommandBuffer (const DeviceInterface&			vk,
+						 VkCommandBuffer				cmdBuffer,
+						 VkCommandBufferUsageFlags		pBeginInfo_flags,
+						 VkRenderPass					pInheritanceInfo_renderPass,
+						 deUint32						pInheritanceInfo_subpass,
+						 VkFramebuffer					pInheritanceInfo_framebuffer,
+						 VkBool32						pInheritanceInfo_occlusionQueryEnable,
+						 VkQueryControlFlags			pInheritanceInfo_queryFlags,
+						 VkQueryPipelineStatisticFlags	pInheritanceInfo_pipelineStatistics)
+{
+	const VkCommandBufferInheritanceInfo	pInheritanceInfo	=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+		DE_NULL,
+		pInheritanceInfo_renderPass,
+		pInheritanceInfo_subpass,
+		pInheritanceInfo_framebuffer,
+		pInheritanceInfo_occlusionQueryEnable,
+		pInheritanceInfo_queryFlags,
+		pInheritanceInfo_pipelineStatistics,
+	};
+	const VkCommandBufferBeginInfo			pBeginInfo			=
+	{
+		VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+		DE_NULL,
+		pBeginInfo_flags,
+		&pInheritanceInfo,
+	};
+	VK_CHECK(vk.beginCommandBuffer(cmdBuffer, &pBeginInfo));
+}
+
+void endCommandBuffer (const DeviceInterface& vk, VkCommandBuffer cmdBuffer)
+{
+	VK_CHECK(vk.endCommandBuffer(cmdBuffer));
+}
+
+void queueSubmit (const DeviceInterface& vk, VkQueue queue, deUint32 cmdBufferCount, const VkCommandBuffer* pCmdBuffers, VkFence fence)
+{
+	const VkSubmitInfo submitInfo =
+	{
+		VK_STRUCTURE_TYPE_SUBMIT_INFO,
+		DE_NULL,
+		0u,								// waitSemaphoreCount
+		(const VkSemaphore*)DE_NULL,	// pWaitSemaphores
+		(const VkPipelineStageFlags*)DE_NULL,
+		cmdBufferCount,					// commandBufferCount
+		pCmdBuffers,
+		0u,								// signalSemaphoreCount
+		(const VkSemaphore*)DE_NULL,	// pSignalSemaphores
+	};
+	VK_CHECK(vk.queueSubmit(queue, 1u, &submitInfo, fence));
+}
+
+void waitForFences (const DeviceInterface& vk, VkDevice device, deUint32 fenceCount, const VkFence* pFences, VkBool32 waitAll, deUint64 timeout)
+{
+	VK_CHECK(vk.waitForFences(device, fenceCount, pFences, waitAll, timeout));
+}
+
+VkImageAspectFlags getImageAspectFlags (VkFormat vkFormat)
+{
+	const tcu::TextureFormat format = mapVkFormat(vkFormat);
+
+	DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELORDER_LAST == 21);
+
+	switch (format.order)
+	{
+		case tcu::TextureFormat::DS:
+			return VK_IMAGE_ASPECT_STENCIL_BIT | VK_IMAGE_ASPECT_DEPTH_BIT;
+
+		case tcu::TextureFormat::D:
+			return VK_IMAGE_ASPECT_DEPTH_BIT;
+
+		case tcu::TextureFormat::S:
+			return VK_IMAGE_ASPECT_STENCIL_BIT;
+
+		default:
+			return VK_IMAGE_ASPECT_COLOR_BIT;
+	}
+}
+
+VkAccessFlags getAllMemoryReadFlags (void)
+{
+	return VK_ACCESS_TRANSFER_READ_BIT
+		   | VK_ACCESS_UNIFORM_READ_BIT
+		   | VK_ACCESS_HOST_READ_BIT
+		   | VK_ACCESS_INDEX_READ_BIT
+		   | VK_ACCESS_SHADER_READ_BIT
+		   | VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
+		   | VK_ACCESS_INDIRECT_COMMAND_READ_BIT
+		   | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
+		   | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
+		   | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
+}
+
+VkAccessFlags getAllMemoryWriteFlags (void)
+{
+	return VK_ACCESS_TRANSFER_WRITE_BIT
+		   | VK_ACCESS_HOST_WRITE_BIT
+		   | VK_ACCESS_SHADER_WRITE_BIT
+		   | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
+		   | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
+}
+
+VkPipelineStageFlags getAllPipelineStageFlags (void)
+{
+	return VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT
+		   | VK_PIPELINE_STAGE_TRANSFER_BIT
+		   | VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT
+		   | VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT
+		   | VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT
+		   | VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
+		   | VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
+		   | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
+		   | VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT
+		   | VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
+		   | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT
+		   | VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
+		   | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
+}
+
+class AttachmentReference
+{
+public:
+					AttachmentReference		(deUint32		attachment,
+											 VkImageLayout	layout)
+		: m_attachment	(attachment)
+		, m_layout		(layout)
+	{
+	}
+
+	deUint32		getAttachment			(void) const { return m_attachment;	}
+	VkImageLayout	getImageLayout			(void) const { return m_layout;		}
+
+private:
+	deUint32		m_attachment;
+	VkImageLayout	m_layout;
+};
+
+class Subpass
+{
+public:
+										Subpass						(VkPipelineBindPoint				pipelineBindPoint,
+																	 VkSubpassDescriptionFlags			flags,
+																	 const vector<AttachmentReference>&	inputAttachments,
+																	 const vector<AttachmentReference>&	colorAttachments,
+																	 const vector<AttachmentReference>&	resolveAttachments,
+																	 AttachmentReference				depthStencilAttachment,
+																	 const vector<AttachmentReference>&	preserveAttachments)
+		: m_pipelineBindPoint		(pipelineBindPoint)
+		, m_flags					(flags)
+		, m_inputAttachments		(inputAttachments)
+		, m_colorAttachments		(colorAttachments)
+		, m_resolveAttachments		(resolveAttachments)
+		, m_depthStencilAttachment	(depthStencilAttachment)
+		, m_preserveAttachments		(preserveAttachments)
+	{
+	}
+
+	VkPipelineBindPoint					getPipelineBindPoint		(void) const { return m_pipelineBindPoint;		}
+	VkSubpassDescriptionFlags			getFlags					(void) const { return m_flags;					}
+	const vector<AttachmentReference>&	getInputAttachments			(void) const { return m_inputAttachments;		}
+	const vector<AttachmentReference>&	getColorAttachments			(void) const { return m_colorAttachments;		}
+	const vector<AttachmentReference>&	getResolveAttachments		(void) const { return m_resolveAttachments;		}
+	const AttachmentReference&			getDepthStencilAttachment	(void) const { return m_depthStencilAttachment;	}
+	const vector<AttachmentReference>&	getPreserveAttachments		(void) const { return m_preserveAttachments;	}
+
+private:
+	VkPipelineBindPoint					m_pipelineBindPoint;
+	VkSubpassDescriptionFlags			m_flags;
+
+	vector<AttachmentReference>			m_inputAttachments;
+	vector<AttachmentReference>			m_colorAttachments;
+	vector<AttachmentReference>			m_resolveAttachments;
+	AttachmentReference					m_depthStencilAttachment;
+
+	vector<AttachmentReference>			m_preserveAttachments;
+};
+
+class SubpassDependency
+{
+public:
+							SubpassDependency	(deUint32				srcPass,
+												 deUint32				dstPass,
+
+												 VkPipelineStageFlags	srcStageMask,
+												 VkPipelineStageFlags	dstStageMask,
+
+												 VkAccessFlags			outputMask,
+												 VkAccessFlags			inputMask,
+
+												 VkDependencyFlags		flags)
+		: m_srcPass			(srcPass)
+		, m_dstPass			(dstPass)
+
+		, m_srcStageMask	(srcStageMask)
+		, m_dstStageMask	(dstStageMask)
+
+		, m_outputMask		(outputMask)
+		, m_inputMask		(inputMask)
+		, m_flags			(flags)
+	{
+	}
+
+	deUint32				getSrcPass			(void) const { return m_srcPass;		}
+	deUint32				getDstPass			(void) const { return m_dstPass;		}
+
+	VkPipelineStageFlags	getSrcStageMask		(void) const { return m_srcStageMask;	}
+	VkPipelineStageFlags	getDstStageMask		(void) const { return m_dstStageMask;	}
+
+	VkAccessFlags			getOutputMask		(void) const { return m_outputMask;		}
+	VkAccessFlags			getInputMask		(void) const { return m_inputMask;		}
+
+	VkDependencyFlags		getFlags			(void) const { return m_flags;		}
+
+private:
+	deUint32				m_srcPass;
+	deUint32				m_dstPass;
+
+	VkPipelineStageFlags	m_srcStageMask;
+	VkPipelineStageFlags	m_dstStageMask;
+
+	VkAccessFlags			m_outputMask;
+	VkAccessFlags			m_inputMask;
+	VkDependencyFlags		m_flags;
+};
+
+class Attachment
+{
+public:
+						Attachment			(VkFormat				format,
+											 VkSampleCountFlagBits	samples,
+
+											 VkAttachmentLoadOp		loadOp,
+											 VkAttachmentStoreOp	storeOp,
+
+											 VkAttachmentLoadOp		stencilLoadOp,
+											 VkAttachmentStoreOp	stencilStoreOp,
+
+											 VkImageLayout			initialLayout,
+											 VkImageLayout			finalLayout)
+		: m_format			(format)
+		, m_samples			(samples)
+
+		, m_loadOp			(loadOp)
+		, m_storeOp			(storeOp)
+
+		, m_stencilLoadOp	(stencilLoadOp)
+		, m_stencilStoreOp	(stencilStoreOp)
+
+		, m_initialLayout	(initialLayout)
+		, m_finalLayout		(finalLayout)
+	{
+	}
+
+	VkFormat				getFormat			(void) const { return m_format;			}
+	VkSampleCountFlagBits	getSamples			(void) const { return m_samples;		}
+
+	VkAttachmentLoadOp		getLoadOp			(void) const { return m_loadOp;			}
+	VkAttachmentStoreOp		getStoreOp			(void) const { return m_storeOp;		}
+
+
+	VkAttachmentLoadOp		getStencilLoadOp	(void) const { return m_stencilLoadOp;	}
+	VkAttachmentStoreOp		getStencilStoreOp	(void) const { return m_stencilStoreOp;	}
+
+	VkImageLayout			getInitialLayout	(void) const { return m_initialLayout;	}
+	VkImageLayout			getFinalLayout		(void) const { return m_finalLayout;	}
+
+private:
+	VkFormat				m_format;
+	VkSampleCountFlagBits	m_samples;
+
+	VkAttachmentLoadOp		m_loadOp;
+	VkAttachmentStoreOp		m_storeOp;
+
+	VkAttachmentLoadOp		m_stencilLoadOp;
+	VkAttachmentStoreOp		m_stencilStoreOp;
+
+	VkImageLayout			m_initialLayout;
+	VkImageLayout			m_finalLayout;
+};
+
+class RenderPass
+{
+public:
+										RenderPass		(const vector<Attachment>&			attachments,
+														 const vector<Subpass>&				subpasses,
+														 const vector<SubpassDependency>&	dependencies)
+		: m_attachments		(attachments)
+		, m_subpasses		(subpasses)
+		, m_dependencies	(dependencies)
+	{
+	}
+
+	const vector<Attachment>&			getAttachments	(void) const { return m_attachments;	}
+	const vector<Subpass>&				getSubpasses	(void) const { return m_subpasses;		}
+	const vector<SubpassDependency>&	getDependencies	(void) const { return m_dependencies;	}
+
+private:
+	const vector<Attachment>			m_attachments;
+	const vector<Subpass>				m_subpasses;
+	const vector<SubpassDependency>		m_dependencies;
+};
+
+struct TestConfig
+{
+	enum RenderTypes
+	{
+		RENDERTYPES_NONE	= 0,
+		RENDERTYPES_CLEAR	= (1<<1),
+		RENDERTYPES_DRAW	= (1<<2)
+	};
+
+	enum CommandBufferTypes
+	{
+		COMMANDBUFFERTYPES_INLINE		= (1<<0),
+		COMMANDBUFFERTYPES_SECONDARY	= (1<<1)
+	};
+
+	enum ImageMemory
+	{
+		IMAGEMEMORY_STRICT		= (1<<0),
+		IMAGEMEMORY_LAZY		= (1<<1)
+	};
+
+	TestConfig (const RenderPass&	renderPass_,
+				RenderTypes			renderTypes_,
+				CommandBufferTypes	commandBufferTypes_,
+				ImageMemory			imageMemory_,
+				const UVec2&		targetSize_,
+				const UVec2&		renderPos_,
+				const UVec2&		renderSize_,
+				deUint32			seed_)
+		: renderPass			(renderPass_)
+		, renderTypes			(renderTypes_)
+		, commandBufferTypes	(commandBufferTypes_)
+		, imageMemory			(imageMemory_)
+		, targetSize			(targetSize_)
+		, renderPos				(renderPos_)
+		, renderSize			(renderSize_)
+		, seed					(seed_)
+	{
+	}
+
+	RenderPass			renderPass;
+	RenderTypes			renderTypes;
+	CommandBufferTypes	commandBufferTypes;
+	ImageMemory			imageMemory;
+	UVec2				targetSize;
+	UVec2				renderPos;
+	UVec2				renderSize;
+	deUint32			seed;
+};
+
+TestConfig::RenderTypes operator| (TestConfig::RenderTypes a, TestConfig::RenderTypes b)
+{
+	return (TestConfig::RenderTypes)(((deUint32)a) | ((deUint32)b));
+}
+
+TestConfig::CommandBufferTypes operator| (TestConfig::CommandBufferTypes a, TestConfig::CommandBufferTypes b)
+{
+	return (TestConfig::CommandBufferTypes)(((deUint32)a) | ((deUint32)b));
+}
+
+TestConfig::ImageMemory operator| (TestConfig::ImageMemory a, TestConfig::ImageMemory b)
+{
+	return (TestConfig::ImageMemory)(((deUint32)a) | ((deUint32)b));
+}
+
+void logRenderPassInfo (TestLog&			log,
+						const RenderPass&	renderPass)
+{
+	const tcu::ScopedLogSection section (log, "RenderPass", "RenderPass");
+
+	{
+		const tcu::ScopedLogSection	attachmentsSection	(log, "Attachments", "Attachments");
+		const vector<Attachment>&	attachments			= renderPass.getAttachments();
+
+		for (size_t attachmentNdx = 0; attachmentNdx < attachments.size(); attachmentNdx++)
+		{
+			const tcu::ScopedLogSection	attachmentSection	(log, "Attachment" + de::toString(attachmentNdx), "Attachment " + de::toString(attachmentNdx));
+			const Attachment&			attachment			= attachments[attachmentNdx];
+
+			log << TestLog::Message << "Format: " << attachment.getFormat() << TestLog::EndMessage;
+			log << TestLog::Message << "Samples: " << attachment.getSamples() << TestLog::EndMessage;
+
+			log << TestLog::Message << "LoadOp: " << attachment.getLoadOp() << TestLog::EndMessage;
+			log << TestLog::Message << "StoreOp: " << attachment.getStoreOp() << TestLog::EndMessage;
+
+			log << TestLog::Message << "StencilLoadOp: " << attachment.getStencilLoadOp() << TestLog::EndMessage;
+			log << TestLog::Message << "StencilStoreOp: " << attachment.getStencilStoreOp() << TestLog::EndMessage;
+
+			log << TestLog::Message << "InitialLayout: " << attachment.getInitialLayout() << TestLog::EndMessage;
+			log << TestLog::Message << "FinalLayout: " << attachment.getFinalLayout() << TestLog::EndMessage;
+		}
+	}
+
+	{
+		const tcu::ScopedLogSection	subpassesSection	(log, "Subpasses", "Subpasses");
+		const vector<Subpass>&		subpasses			= renderPass.getSubpasses();
+
+		for (size_t subpassNdx = 0; subpassNdx < subpasses.size(); subpassNdx++)
+		{
+			const tcu::ScopedLogSection			subpassSection		(log, "Subpass" + de::toString(subpassNdx), "Subpass " + de::toString(subpassNdx));
+			const Subpass&						subpass				= subpasses[subpassNdx];
+
+			const vector<AttachmentReference>&	inputAttachments	= subpass.getInputAttachments();
+			const vector<AttachmentReference>&	colorAttachments	= subpass.getColorAttachments();
+			const vector<AttachmentReference>&	resolveAttachments	= subpass.getResolveAttachments();
+			const vector<AttachmentReference>&	preserveAttachments	= subpass.getPreserveAttachments();
+
+			if (!inputAttachments.empty())
+			{
+				const tcu::ScopedLogSection		inputAttachmentsSection	(log, "Inputs", "Inputs");
+
+				for (size_t inputNdx = 0; inputNdx < inputAttachments.size(); inputNdx++)
+				{
+					const tcu::ScopedLogSection		inputAttachmentSection	(log, "Input" + de::toString(inputNdx), "Input " + de::toString(inputNdx));
+					const AttachmentReference&		inputAttachment			= inputAttachments[inputNdx];
+
+					log << TestLog::Message << "Attachment: " << inputAttachment.getAttachment() << TestLog::EndMessage;
+					log << TestLog::Message << "Layout: " << inputAttachment.getImageLayout() << TestLog::EndMessage;
+				}
+			}
+
+			if (subpass.getDepthStencilAttachment().getAttachment() != VK_ATTACHMENT_UNUSED)
+			{
+				const tcu::ScopedLogSection		depthStencilAttachmentSection	(log, "DepthStencil", "DepthStencil");
+				const AttachmentReference&		depthStencilAttachment			= subpass.getDepthStencilAttachment();
+
+				log << TestLog::Message << "Attachment: " << depthStencilAttachment.getAttachment() << TestLog::EndMessage;
+				log << TestLog::Message << "Layout: " << depthStencilAttachment.getImageLayout() << TestLog::EndMessage;
+			}
+
+			if (!colorAttachments.empty())
+			{
+				const tcu::ScopedLogSection		colorAttachmentsSection	(log, "Colors", "Colors");
+
+				for (size_t colorNdx = 0; colorNdx < colorAttachments.size(); colorNdx++)
+				{
+					const tcu::ScopedLogSection		colorAttachmentSection	(log, "Color" + de::toString(colorNdx), "Color " + de::toString(colorNdx));
+					const AttachmentReference&		colorAttachment			= colorAttachments[colorNdx];
+
+					log << TestLog::Message << "Attachment: " << colorAttachment.getAttachment() << TestLog::EndMessage;
+					log << TestLog::Message << "Layout: " << colorAttachment.getImageLayout() << TestLog::EndMessage;
+				}
+			}
+
+			if (!resolveAttachments.empty())
+			{
+				const tcu::ScopedLogSection		resolveAttachmentsSection	(log, "Resolves", "Resolves");
+
+				for (size_t resolveNdx = 0; resolveNdx < resolveAttachments.size(); resolveNdx++)
+				{
+					const tcu::ScopedLogSection		resolveAttachmentSection	(log, "Resolve" + de::toString(resolveNdx), "Resolve " + de::toString(resolveNdx));
+					const AttachmentReference&		resolveAttachment			= resolveAttachments[resolveNdx];
+
+					log << TestLog::Message << "Attachment: " << resolveAttachment.getAttachment() << TestLog::EndMessage;
+					log << TestLog::Message << "Layout: " << resolveAttachment.getImageLayout() << TestLog::EndMessage;
+				}
+			}
+
+			if (!preserveAttachments.empty())
+			{
+				const tcu::ScopedLogSection		preserveAttachmentsSection	(log, "Preserves", "Preserves");
+
+				for (size_t preserveNdx = 0; preserveNdx < preserveAttachments.size(); preserveNdx++)
+				{
+					const tcu::ScopedLogSection		preserveAttachmentSection	(log, "Preserve" + de::toString(preserveNdx), "Preserve " + de::toString(preserveNdx));
+					const AttachmentReference&		preserveAttachment			= preserveAttachments[preserveNdx];
+
+					log << TestLog::Message << "Attachment: " << preserveAttachment.getAttachment() << TestLog::EndMessage;
+					log << TestLog::Message << "Layout: " << preserveAttachment.getImageLayout() << TestLog::EndMessage;
+				}
+			}
+		}
+
+	}
+
+	if (!renderPass.getDependencies().empty())
+	{
+		const tcu::ScopedLogSection	dependenciesSection	(log, "Dependencies", "Dependencies");
+
+		for (size_t depNdx = 0; depNdx < renderPass.getDependencies().size(); depNdx++)
+		{
+			const tcu::ScopedLogSection	dependencySection	(log, "Dependency" + de::toString(depNdx), "Dependency " + de::toString(depNdx));
+			const SubpassDependency&	dep					= renderPass.getDependencies()[depNdx];
+
+			log << TestLog::Message << "Source: " << dep.getSrcPass() << TestLog::EndMessage;
+			log << TestLog::Message << "Destination: " << dep.getDstPass() << TestLog::EndMessage;
+
+			log << TestLog::Message << "Source Stage Mask: " << dep.getSrcStageMask() << TestLog::EndMessage;
+			log << TestLog::Message << "Destination Stage Mask: " << dep.getDstStageMask() << TestLog::EndMessage;
+
+			log << TestLog::Message << "Input Mask: " << dep.getInputMask() << TestLog::EndMessage;
+			log << TestLog::Message << "Output Mask: " << dep.getOutputMask() << TestLog::EndMessage;
+			log << TestLog::Message << "Dependency Flags: " << getDependencyFlagsStr(dep.getFlags()) << TestLog::EndMessage;
+		}
+	}
+}
+
+std::string clearColorToString (VkFormat vkFormat, VkClearColorValue value)
+{
+	const tcu::TextureFormat		format			= mapVkFormat(vkFormat);
+	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
+	const tcu::BVec4				channelMask		= tcu::getTextureFormatChannelMask(format);
+
+	std::ostringstream				stream;
+
+	stream << "(";
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER:
+			for (int i = 0; i < 4; i++)
+			{
+				if (i > 0)
+					stream << ", ";
+
+				if (channelMask[i])
+					stream << value.int32[i];
+				else
+					stream << "Undef";
+			}
+			break;
+
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER:
+			for (int i = 0; i < 4; i++)
+			{
+				if (i > 0)
+					stream << ", ";
+
+				if (channelMask[i])
+					stream << value.uint32[i];
+				else
+					stream << "Undef";
+			}
+			break;
+
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+			for (int i = 0; i < 4; i++)
+			{
+				if (i > 0)
+					stream << ", ";
+
+				if (channelMask[i])
+					stream << value.float32[i];
+				else
+					stream << "Undef";
+			}
+			break;
+
+		default:
+			DE_FATAL("Unknown channel class");
+	}
+
+	stream << ")";
+
+	return stream.str();
+}
+
+std::string clearValueToString (VkFormat vkFormat, VkClearValue value)
+{
+	const tcu::TextureFormat	format	= mapVkFormat(vkFormat);
+
+	if (tcu::hasStencilComponent(format.order) || tcu::hasDepthComponent(format.order))
+	{
+		std::ostringstream stream;
+
+		stream << "(";
+
+		if (tcu::hasStencilComponent(format.order))
+			stream << "stencil: " << value.depthStencil.stencil;
+
+		if (tcu::hasStencilComponent(format.order) && tcu::hasDepthComponent(format.order))
+			stream << ", ";
+
+		if (tcu::hasDepthComponent(format.order))
+			stream << "depth: " << value.depthStencil.depth;
+
+		stream << ")";
+
+		return stream.str();
+	}
+	else
+		return clearColorToString(vkFormat, value.color);
+}
+
+VkClearColorValue randomColorClearValue (const Attachment& attachment, de::Random& rng)
+{
+	const float						clearNan		= tcu::Float32::nan().asFloat();
+	const tcu::TextureFormat		format			= mapVkFormat(attachment.getFormat());
+	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
+	const tcu::BVec4				channelMask		= tcu::getTextureFormatChannelMask(format);
+	VkClearColorValue				clearColor;
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER:
+		{
+			const tcu::IVec4 valueMin = tcu::getFormatMinIntValue(format);
+			const tcu::IVec4 valueMax = tcu::getFormatMaxIntValue(format);
+
+			for (int ndx = 0; ndx < 4; ndx++)
+			{
+				if (!channelMask[ndx])
+					clearColor.int32[ndx] = std::numeric_limits<deInt32>::min();
+				else
+					clearColor.uint32[ndx] = rng.getInt(valueMin[ndx], valueMax[ndx]);
+			}
+			break;
+		}
+
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER:
+		{
+			const UVec4 valueMax = tcu::getFormatMaxUintValue(format);
+
+			for (int ndx = 0; ndx < 4; ndx++)
+			{
+				if (!channelMask[ndx])
+					clearColor.uint32[ndx] = std::numeric_limits<deUint32>::max();
+				else
+					clearColor.uint32[ndx] = rng.getUint32() % valueMax[ndx];
+			}
+			break;
+		}
+
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+		{
+			const tcu::TextureFormatInfo	formatInfo		= tcu::getTextureFormatInfo(format);
+
+			for (int ndx = 0; ndx < 4; ndx++)
+			{
+				if (!channelMask[ndx])
+					clearColor.float32[ndx] = clearNan;
+				else
+					clearColor.float32[ndx] = formatInfo.valueMin[ndx] + rng.getFloat() * (formatInfo.valueMax[ndx] - formatInfo.valueMin[ndx]);
+			}
+			break;
+		}
+
+		default:
+			DE_FATAL("Unknown channel class");
+	}
+
+	return clearColor;
+}
+
+VkAttachmentDescription createAttachmentDescription (const Attachment& attachment)
+{
+	const VkAttachmentDescription attachmentDescription =
+	{
+		0,											// flags
+
+		attachment.getFormat(),						// format
+		attachment.getSamples(),					// samples
+
+		attachment.getLoadOp(),						// loadOp
+		attachment.getStoreOp(),					// storeOp
+
+		attachment.getStencilLoadOp(),				// stencilLoadOp
+		attachment.getStencilStoreOp(),				// stencilStoreOp
+
+		attachment.getInitialLayout(),				// initialLayout
+		attachment.getFinalLayout(),				// finalLayout
+	};
+
+	return attachmentDescription;
+}
+
+VkAttachmentReference createAttachmentReference (const AttachmentReference& referenceInfo)
+{
+	const VkAttachmentReference reference =
+	{
+		referenceInfo.getAttachment(),	// attachment;
+		referenceInfo.getImageLayout()	// layout;
+	};
+
+	return reference;
+}
+
+VkSubpassDescription createSubpassDescription (const Subpass&					subpass,
+											   vector<VkAttachmentReference>*	attachmentReferenceLists,
+											   vector<deUint32>*				preserveAttachmentReferences)
+{
+	vector<VkAttachmentReference>&	inputAttachmentReferences			= attachmentReferenceLists[0];
+	vector<VkAttachmentReference>&	colorAttachmentReferences			= attachmentReferenceLists[1];
+	vector<VkAttachmentReference>&	resolveAttachmentReferences			= attachmentReferenceLists[2];
+	vector<VkAttachmentReference>&	depthStencilAttachmentReferences	= attachmentReferenceLists[3];
+
+	for (size_t attachmentNdx = 0; attachmentNdx < subpass.getColorAttachments().size(); attachmentNdx++)
+		colorAttachmentReferences.push_back(createAttachmentReference(subpass.getColorAttachments()[attachmentNdx]));
+
+	for (size_t attachmentNdx = 0; attachmentNdx < subpass.getInputAttachments().size(); attachmentNdx++)
+		inputAttachmentReferences.push_back(createAttachmentReference(subpass.getInputAttachments()[attachmentNdx]));
+
+	for (size_t attachmentNdx = 0; attachmentNdx < subpass.getResolveAttachments().size(); attachmentNdx++)
+		resolveAttachmentReferences.push_back(createAttachmentReference(subpass.getResolveAttachments()[attachmentNdx]));
+
+	depthStencilAttachmentReferences.push_back(createAttachmentReference(subpass.getDepthStencilAttachment()));
+
+	for (size_t attachmentNdx = 0; attachmentNdx < subpass.getPreserveAttachments().size(); attachmentNdx++)
+		preserveAttachmentReferences->push_back(subpass.getPreserveAttachments()[attachmentNdx].getAttachment());
+
+	DE_ASSERT(resolveAttachmentReferences.empty() || colorAttachmentReferences.size() == resolveAttachmentReferences.size());
+
+	{
+		const VkSubpassDescription subpassDescription =
+		{
+			subpass.getFlags(),																		// flags;
+			subpass.getPipelineBindPoint(),															// pipelineBindPoint;
+
+			(deUint32)inputAttachmentReferences.size(),												// inputCount;
+			inputAttachmentReferences.empty() ? DE_NULL : &inputAttachmentReferences[0],			// inputAttachments;
+
+			(deUint32)colorAttachmentReferences.size(),												// colorCount;
+			colorAttachmentReferences.empty() ? DE_NULL :  &colorAttachmentReferences[0],			// colorAttachments;
+			resolveAttachmentReferences.empty() ? DE_NULL : &resolveAttachmentReferences[0],		// resolveAttachments;
+
+			&depthStencilAttachmentReferences[0],													// pDepthStencilAttachment;
+			(deUint32)preserveAttachmentReferences->size(),											// preserveCount;
+			preserveAttachmentReferences->empty() ? DE_NULL : &(*preserveAttachmentReferences)[0]	// preserveAttachments;
+		};
+
+		return subpassDescription;
+	}
+}
+
+VkSubpassDependency createSubpassDependency	(const SubpassDependency& dependencyInfo)
+{
+	const VkSubpassDependency dependency =
+	{
+		dependencyInfo.getSrcPass(),			// srcSubpass;
+		dependencyInfo.getDstPass(),			// destSubpass;
+
+		dependencyInfo.getSrcStageMask(),		// srcStageMask;
+		dependencyInfo.getDstStageMask(),		// destStageMask;
+
+		dependencyInfo.getOutputMask(),			// outputMask;
+		dependencyInfo.getInputMask(),			// inputMask;
+
+		dependencyInfo.getFlags()				// dependencyFlags;
+	};
+
+	return dependency;
+}
+
+Move<VkRenderPass> createRenderPass (const DeviceInterface&	vk,
+									 VkDevice				device,
+									 const RenderPass&		renderPassInfo)
+{
+	const size_t							perSubpassAttachmentReferenceLists = 4;
+	vector<VkAttachmentDescription>			attachments;
+	vector<VkSubpassDescription>			subpasses;
+	vector<VkSubpassDependency>				dependencies;
+	vector<vector<VkAttachmentReference> >	attachmentReferenceLists(renderPassInfo.getSubpasses().size() * perSubpassAttachmentReferenceLists);
+	vector<vector<deUint32> >				preserveAttachments(renderPassInfo.getSubpasses().size());
+
+	for (size_t attachmentNdx = 0; attachmentNdx < renderPassInfo.getAttachments().size(); attachmentNdx++)
+		attachments.push_back(createAttachmentDescription(renderPassInfo.getAttachments()[attachmentNdx]));
+
+	for (size_t subpassNdx = 0; subpassNdx < renderPassInfo.getSubpasses().size(); subpassNdx++)
+		subpasses.push_back(createSubpassDescription(renderPassInfo.getSubpasses()[subpassNdx], &(attachmentReferenceLists[subpassNdx * perSubpassAttachmentReferenceLists]), &preserveAttachments[subpassNdx]));
+
+	for (size_t depNdx = 0; depNdx < renderPassInfo.getDependencies().size(); depNdx++)
+		dependencies.push_back(createSubpassDependency(renderPassInfo.getDependencies()[depNdx]));
+
+	{
+		const VkRenderPassCreateInfo	createInfo	=
+		{
+			VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+			DE_NULL,
+			(VkRenderPassCreateFlags)0u,
+			(deUint32)attachments.size(),
+			(attachments.empty() ? DE_NULL : &attachments[0]),
+			(deUint32)subpasses.size(),
+			(subpasses.empty() ? DE_NULL : &subpasses[0]),
+			(deUint32)dependencies.size(),
+			(dependencies.empty() ? DE_NULL : &dependencies[0])
+		};
+
+		return createRenderPass(vk, device, &createInfo);
+	}
+}
+
+Move<VkFramebuffer> createFramebuffer (const DeviceInterface&		vk,
+									   VkDevice						device,
+									   VkRenderPass					renderPass,
+									   const UVec2&					size,
+									   const vector<VkImageView>&	attachments)
+{
+	return createFramebuffer(vk, device, 0u, renderPass, (deUint32)attachments.size(), attachments.empty() ? DE_NULL : &attachments[0], size.x(), size.y(), 1u);
+}
+
+Move<VkImage> createAttachmentImage (const DeviceInterface&	vk,
+									 VkDevice				device,
+									 deUint32				queueIndex,
+									 const UVec2&			size,
+									 VkFormat				format,
+									 VkSampleCountFlagBits	samples,
+									 VkImageUsageFlags		usageFlags,
+									 VkImageLayout			layout)
+{
+	const VkExtent3D size_					= { size.x(), size.y(), 1u };
+	VkImageUsageFlags targetUsageFlags		= 0;
+	const tcu::TextureFormat textureFormat	= mapVkFormat(format);
+
+	if (tcu::hasDepthComponent(textureFormat.order) || tcu::hasStencilComponent(textureFormat.order))
+	{
+		targetUsageFlags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
+	}
+	else
+	{
+		targetUsageFlags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
+	}
+
+	return createImage(vk, device,
+					   (VkImageCreateFlags)0,
+					   VK_IMAGE_TYPE_2D,
+					   format,
+					   size_,
+					   1u /* mipLevels */,
+					   1u /* arraySize */,
+					   samples,
+					   VK_IMAGE_TILING_OPTIMAL,
+					   usageFlags | targetUsageFlags,
+					   VK_SHARING_MODE_EXCLUSIVE,
+					   1,
+					   &queueIndex,
+					   layout);
+}
+
+de::MovePtr<Allocation> createImageMemory (const DeviceInterface&	vk,
+										   VkDevice					device,
+										   Allocator&				allocator,
+										   VkImage					image,
+										   bool						lazy)
+{
+	de::MovePtr<Allocation> allocation (allocator.allocate(getImageMemoryRequirements(vk, device, image), lazy ? MemoryRequirement::LazilyAllocated : MemoryRequirement::Any));
+	bindImageMemory(vk, device, image, allocation->getMemory(), allocation->getOffset());
+	return allocation;
+}
+
+Move<VkImageView> createImageAttachmentView (const DeviceInterface&	vk,
+											 VkDevice				device,
+											 VkImage				image,
+											 VkFormat				format,
+											 VkImageAspectFlags		aspect)
+{
+	const VkImageSubresourceRange range =
+	{
+		aspect,
+		0,
+		1,
+		0,
+		1
+	};
+
+	return createImageView(vk, device, 0u, image, VK_IMAGE_VIEW_TYPE_2D, format, makeComponentMappingRGBA(), range);
+}
+
+VkClearValue randomClearValue (const Attachment& attachment, de::Random& rng)
+{
+	const float					clearNan	= tcu::Float32::nan().asFloat();
+	const tcu::TextureFormat	format		= mapVkFormat(attachment.getFormat());
+
+	if (tcu::hasStencilComponent(format.order) || tcu::hasDepthComponent(format.order))
+	{
+		VkClearValue clearValue;
+
+		clearValue.depthStencil.depth	= clearNan;
+		clearValue.depthStencil.stencil	= 255;
+
+		if (tcu::hasStencilComponent(format.order))
+			clearValue.depthStencil.stencil	= rng.getInt(0, 255);
+
+		if (tcu::hasDepthComponent(format.order))
+			clearValue.depthStencil.depth	= rng.getFloat();
+
+		return clearValue;
+	}
+	else
+	{
+		VkClearValue clearValue;
+
+		clearValue.color = randomColorClearValue(attachment, rng);
+
+		return clearValue;
+	}
+}
+
+class AttachmentResources
+{
+public:
+	AttachmentResources (const DeviceInterface&		vk,
+						 VkDevice					device,
+						 Allocator&					allocator,
+						 deUint32					queueIndex,
+						 const UVec2&				size,
+						 const Attachment&			attachmentInfo,
+						 bool						lazy)
+		: m_image			(createAttachmentImage(vk, device, queueIndex, size, attachmentInfo.getFormat(), attachmentInfo.getSamples(), lazy ? VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT : VK_IMAGE_USAGE_TRANSFER_SRC_BIT, VK_IMAGE_LAYOUT_UNDEFINED))
+		, m_imageMemory		(createImageMemory(vk, device, allocator, *m_image, lazy))
+		, m_attachmentView	(createImageAttachmentView(vk, device, *m_image, attachmentInfo.getFormat(), getImageAspectFlags(attachmentInfo.getFormat())))
+	{
+		if (!lazy)
+		{
+			const tcu::TextureFormat format = mapVkFormat(attachmentInfo.getFormat());
+
+			if (tcu::hasDepthComponent(format.order) && tcu::hasStencilComponent(format.order))
+			{
+				const tcu::TextureFormat	depthFormat		= getDepthCopyFormat(attachmentInfo.getFormat());
+				const tcu::TextureFormat	stencilFormat	= getStencilCopyFormat(attachmentInfo.getFormat());
+
+				m_bufferSize			= size.x() * size.y() * depthFormat.getPixelSize();
+				m_secondaryBufferSize	= size.x() * size.y() * stencilFormat.getPixelSize();
+
+				m_buffer				= createBuffer(vk, device, 0, m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_SHARING_MODE_EXCLUSIVE, 1, &queueIndex);
+				m_bufferMemory			= allocator.allocate(getBufferMemoryRequirements(vk, device, *m_buffer), MemoryRequirement::HostVisible);
+
+				bindBufferMemory(vk, device, *m_buffer, m_bufferMemory->getMemory(), m_bufferMemory->getOffset());
+
+				m_secondaryBuffer		= createBuffer(vk, device, 0, m_secondaryBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_SHARING_MODE_EXCLUSIVE, 1, &queueIndex);
+				m_secondaryBufferMemory	= allocator.allocate(getBufferMemoryRequirements(vk, device, *m_secondaryBuffer), MemoryRequirement::HostVisible);
+
+				bindBufferMemory(vk, device, *m_secondaryBuffer, m_secondaryBufferMemory->getMemory(), m_secondaryBufferMemory->getOffset());
+			}
+			else
+			{
+				m_bufferSize	= size.x() * size.y() * format.getPixelSize();
+
+				m_buffer		= createBuffer(vk, device, 0, m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_SHARING_MODE_EXCLUSIVE, 1, &queueIndex);
+				m_bufferMemory	= allocator.allocate(getBufferMemoryRequirements(vk, device, *m_buffer), MemoryRequirement::HostVisible);
+
+				bindBufferMemory(vk, device, *m_buffer, m_bufferMemory->getMemory(), m_bufferMemory->getOffset());
+			}
+		}
+	}
+
+	~AttachmentResources (void)
+	{
+	}
+
+	VkImageView getAttachmentView (void) const
+	{
+		return *m_attachmentView;
+	}
+
+	VkImage getImage (void) const
+	{
+		return *m_image;
+	}
+
+	VkBuffer getBuffer (void) const
+	{
+		DE_ASSERT(*m_buffer != DE_NULL);
+		return *m_buffer;
+	}
+
+	VkDeviceSize getBufferSize (void) const
+	{
+		DE_ASSERT(*m_buffer != DE_NULL);
+		return m_bufferSize;
+	}
+
+	const Allocation& getResultMemory (void) const
+	{
+		DE_ASSERT(m_bufferMemory);
+		return *m_bufferMemory;
+	}
+
+	VkBuffer getSecondaryBuffer (void) const
+	{
+		DE_ASSERT(*m_secondaryBuffer != DE_NULL);
+		return *m_secondaryBuffer;
+	}
+
+	VkDeviceSize getSecondaryBufferSize (void) const
+	{
+		DE_ASSERT(*m_secondaryBuffer != DE_NULL);
+		return m_secondaryBufferSize;
+	}
+
+	const Allocation& getSecondaryResultMemory (void) const
+	{
+		DE_ASSERT(m_secondaryBufferMemory);
+		return *m_secondaryBufferMemory;
+	}
+
+private:
+	const Unique<VkImage>			m_image;
+	const UniquePtr<Allocation>		m_imageMemory;
+	const Unique<VkImageView>		m_attachmentView;
+
+	Move<VkBuffer>					m_buffer;
+	VkDeviceSize					m_bufferSize;
+	de::MovePtr<Allocation>			m_bufferMemory;
+
+	Move<VkBuffer>					m_secondaryBuffer;
+	VkDeviceSize					m_secondaryBufferSize;
+	de::MovePtr<Allocation>			m_secondaryBufferMemory;
+};
+
+void uploadBufferData (const DeviceInterface&	vk,
+					   VkDevice					device,
+					   const Allocation&		memory,
+					   size_t					size,
+					   const void*				data)
+{
+	const VkMappedMemoryRange range =
+	{
+		VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,	// sType;
+		DE_NULL,								// pNext;
+		memory.getMemory(),						// mem;
+		memory.getOffset(),						// offset;
+		(VkDeviceSize)size						// size;
+	};
+	void* const ptr = memory.getHostPtr();
+
+	deMemcpy(ptr, data, size);
+	VK_CHECK(vk.flushMappedMemoryRanges(device, 1, &range));
+}
+
+VkImageAspectFlagBits getPrimaryImageAspect (tcu::TextureFormat::ChannelOrder order)
+{
+	DE_STATIC_ASSERT(tcu::TextureFormat::CHANNELORDER_LAST == 21);
+
+	switch (order)
+	{
+		case tcu::TextureFormat::D:
+		case tcu::TextureFormat::DS:
+			return VK_IMAGE_ASPECT_DEPTH_BIT;
+
+		case tcu::TextureFormat::S:
+			return VK_IMAGE_ASPECT_STENCIL_BIT;
+
+		default:
+			return VK_IMAGE_ASPECT_COLOR_BIT;
+	}
+}
+
+class RenderQuad
+{
+public:
+	RenderQuad (const Vec4& posA, const Vec4& posB)
+		: m_vertices(6)
+	{
+		m_vertices[0] = posA;
+		m_vertices[1] = Vec4(posA[0], posB[1], posA[2], posA[3]);
+		m_vertices[2] = posB;
+
+		m_vertices[3] = posB;
+		m_vertices[4] = Vec4(posB[0], posA[1], posB[2], posA[3]);
+		m_vertices[5] = posA;
+	}
+
+	const Vec4&		getCornerA			(void) const
+	{
+		return m_vertices[0];
+	}
+
+	const Vec4&		getCornerB			(void) const
+	{
+		return m_vertices[2];
+	}
+
+	const void*		getVertexPointer	(void) const
+	{
+		return &m_vertices[0];
+	}
+
+	size_t			getVertexDataSize	(void) const
+	{
+		return sizeof(Vec4) * m_vertices.size();
+	}
+
+private:
+	vector<Vec4>	m_vertices;
+};
+
+class ColorClear
+{
+public:
+	ColorClear	(const UVec2&				offset,
+				 const UVec2&				size,
+				 const VkClearColorValue&	color)
+		: m_offset	(offset)
+		, m_size	(size)
+		, m_color	(color)
+	{
+	}
+
+	const UVec2&				getOffset		(void) const { return m_offset;		}
+	const UVec2&				getSize			(void) const { return m_size;		}
+	const VkClearColorValue&	getColor		(void) const { return m_color;		}
+
+private:
+	UVec2				m_offset;
+	UVec2				m_size;
+	VkClearColorValue	m_color;
+};
+
+class DepthStencilClear
+{
+public:
+	DepthStencilClear	(const UVec2&				offset,
+						 const UVec2&				size,
+						 float						depth,
+						 deUint32					stencil)
+		: m_offset	(offset)
+		, m_size	(size)
+		, m_depth	(depth)
+		, m_stencil	(stencil)
+	{
+	}
+
+	const UVec2&		getOffset		(void) const { return m_offset;		}
+	const UVec2&		getSize			(void) const { return m_size;		}
+	float				getDepth		(void) const { return m_depth;		}
+	deUint32			getStencil		(void) const { return m_stencil;	}
+
+private:
+	UVec2				m_offset;
+	UVec2				m_size;
+
+	float				m_depth;
+	deUint32			m_stencil;
+};
+
+class SubpassRenderInfo
+{
+public:
+	SubpassRenderInfo	(const RenderPass&					renderPass,
+						 deUint32							subpassIndex,
+
+						 bool								isSecondary_,
+
+						 const UVec2&						viewportOffset,
+						 const UVec2&						viewportSize,
+
+						 const Maybe<RenderQuad>&			renderQuad,
+						 const vector<ColorClear>&			colorClears,
+						 const Maybe<DepthStencilClear>&	depthStencilClear)
+		: m_viewportOffset		(viewportOffset)
+		, m_viewportSize		(viewportSize)
+		, m_subpassIndex		(subpassIndex)
+		, m_isSecondary			(isSecondary_)
+		, m_flags				(renderPass.getSubpasses()[subpassIndex].getFlags())
+		, m_renderQuad			(renderQuad)
+		, m_colorClears			(colorClears)
+		, m_depthStencilClear	(depthStencilClear)
+		, m_colorAttachments	(renderPass.getSubpasses()[subpassIndex].getColorAttachments())
+	{
+		for (deUint32 attachmentNdx = 0; attachmentNdx < (deUint32)m_colorAttachments.size(); attachmentNdx++)
+			m_colorAttachmentInfo.push_back(renderPass.getAttachments()[m_colorAttachments[attachmentNdx].getAttachment()]);
+
+		if (renderPass.getSubpasses()[subpassIndex].getDepthStencilAttachment().getAttachment() != VK_ATTACHMENT_UNUSED)
+		{
+			m_depthStencilAttachment		= tcu::just(renderPass.getSubpasses()[subpassIndex].getDepthStencilAttachment());
+			m_depthStencilAttachmentInfo	= tcu::just(renderPass.getAttachments()[renderPass.getSubpasses()[subpassIndex].getDepthStencilAttachment().getAttachment()]);
+		}
+	}
+
+	const UVec2&						getViewportOffset				(void) const { return m_viewportOffset;		}
+	const UVec2&						getViewportSize					(void) const { return m_viewportSize;		}
+
+	deUint32							getSubpassIndex					(void) const { return m_subpassIndex;		}
+	bool								isSecondary						(void) const { return m_isSecondary;		}
+
+	const Maybe<RenderQuad>&			getRenderQuad					(void) const { return m_renderQuad;			}
+	const vector<ColorClear>&			getColorClears					(void) const { return m_colorClears;		}
+	const Maybe<DepthStencilClear>&		getDepthStencilClear			(void) const { return m_depthStencilClear;	}
+
+	deUint32							getColorAttachmentCount			(void) const { return (deUint32)m_colorAttachments.size(); }
+	VkImageLayout						getColorAttachmentLayout		(deUint32 attachmentNdx) const { return m_colorAttachments[attachmentNdx].getImageLayout(); }
+	deUint32							getColorAttachmentIndex			(deUint32 attachmentNdx) const { return m_colorAttachments[attachmentNdx].getAttachment(); }
+	const Attachment&					getColorAttachment				(deUint32 attachmentNdx) const { return m_colorAttachmentInfo[attachmentNdx]; }
+	Maybe<VkImageLayout>				getDepthStencilAttachmentLayout	(void) const { return m_depthStencilAttachment ? tcu::just(m_depthStencilAttachment->getImageLayout()) : tcu::nothing<VkImageLayout>(); }
+	Maybe<deUint32>						getDepthStencilAttachmentIndex	(void) const { return m_depthStencilAttachment ? tcu::just(m_depthStencilAttachment->getAttachment()) : tcu::nothing<deUint32>(); };
+	const Maybe<Attachment>&			getDepthStencilAttachment		(void) const { return m_depthStencilAttachmentInfo; }
+	VkSubpassDescriptionFlags			getSubpassFlags					(void) const { return m_flags; }
+private:
+	UVec2								m_viewportOffset;
+	UVec2								m_viewportSize;
+
+	deUint32							m_subpassIndex;
+	bool								m_isSecondary;
+	VkSubpassDescriptionFlags			m_flags;
+
+	Maybe<RenderQuad>					m_renderQuad;
+	vector<ColorClear>					m_colorClears;
+	Maybe<DepthStencilClear>			m_depthStencilClear;
+
+	vector<AttachmentReference>			m_colorAttachments;
+	vector<Attachment>					m_colorAttachmentInfo;
+
+	Maybe<AttachmentReference>			m_depthStencilAttachment;
+	Maybe<Attachment>					m_depthStencilAttachmentInfo;
+};
+
+Move<VkPipeline> createSubpassPipeline (const DeviceInterface&		vk,
+										VkDevice					device,
+										VkRenderPass				renderPass,
+										VkShaderModule				vertexShaderModule,
+										VkShaderModule				fragmentShaderModule,
+										VkPipelineLayout			pipelineLayout,
+										const SubpassRenderInfo&	renderInfo)
+{
+	const VkSpecializationInfo emptyShaderSpecializations =
+	{
+		0u,			// mapEntryCount
+		DE_NULL,	// pMap
+		0u,			// dataSize
+		DE_NULL,	// pData
+	};
+
+	Maybe<VkSampleCountFlagBits>				rasterSamples;
+	vector<VkPipelineColorBlendAttachmentState>	attachmentBlendStates;
+
+	for (deUint32 attachmentNdx = 0; attachmentNdx < renderInfo.getColorAttachmentCount(); attachmentNdx++)
+	{
+		const Attachment&			attachment		= renderInfo.getColorAttachment(attachmentNdx);
+
+		DE_ASSERT(!rasterSamples || *rasterSamples == attachment.getSamples());
+
+		rasterSamples = attachment.getSamples();
+
+		{
+			const VkPipelineColorBlendAttachmentState	attachmentBlendState =
+			{
+				VK_FALSE,																								// blendEnable
+				VK_BLEND_FACTOR_SRC_ALPHA,																				// srcBlendColor
+				VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,																	// destBlendColor
+				VK_BLEND_OP_ADD,																						// blendOpColor
+				VK_BLEND_FACTOR_ONE,																					// srcBlendAlpha
+				VK_BLEND_FACTOR_ONE,																					// destBlendAlpha
+				VK_BLEND_OP_ADD,																						// blendOpAlpha
+				VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT,	// channelWriteMask
+			};
+
+			attachmentBlendStates.push_back(attachmentBlendState);
+		}
+	}
+
+	if (renderInfo.getDepthStencilAttachment())
+	{
+		const Attachment& attachment = *renderInfo.getDepthStencilAttachment();
+
+		DE_ASSERT(!rasterSamples || *rasterSamples == attachment.getSamples());
+		rasterSamples = attachment.getSamples();
+	}
+
+	// If there are no attachment use single sample
+	if (!rasterSamples)
+		rasterSamples = VK_SAMPLE_COUNT_1_BIT;
+
+	const VkPipelineShaderStageCreateInfo shaderStages[2] =
+	{
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+			DE_NULL,												// pNext
+			(VkPipelineShaderStageCreateFlags)0u,
+			VK_SHADER_STAGE_VERTEX_BIT,								// stage
+			vertexShaderModule,										// shader
+			"main",
+			&emptyShaderSpecializations
+		},
+		{
+			VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+			DE_NULL,												// pNext
+			(VkPipelineShaderStageCreateFlags)0u,
+			VK_SHADER_STAGE_FRAGMENT_BIT,							// stage
+			fragmentShaderModule,									// shader
+			"main",
+			&emptyShaderSpecializations
+		}
+	};
+	const VkVertexInputBindingDescription vertexBinding =
+	{
+		0u,															// binding
+		(deUint32)sizeof(tcu::Vec4),								// strideInBytes
+		VK_VERTEX_INPUT_RATE_VERTEX,								// stepRate
+	};
+	const VkVertexInputAttributeDescription vertexAttrib =
+	{
+		0u,															// location
+		0u,															// binding
+		VK_FORMAT_R32G32B32A32_SFLOAT,								// format
+		0u,															// offsetInBytes
+	};
+	const VkPipelineVertexInputStateCreateInfo vertexInputState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,	//	sType
+		DE_NULL,													//	pNext
+		(VkPipelineVertexInputStateCreateFlags)0u,
+		1u,															//	bindingCount
+		&vertexBinding,												//	pVertexBindingDescriptions
+		1u,															//	attributeCount
+		&vertexAttrib,												//	pVertexAttributeDescriptions
+	};
+	const VkPipelineInputAssemblyStateCreateInfo inputAssemblyState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// sType
+		DE_NULL,														// pNext
+		(VkPipelineInputAssemblyStateCreateFlags)0u,
+		VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// topology
+		VK_FALSE,														// primitiveRestartEnable
+	};
+	const VkViewport viewport =
+	{
+		(float)renderInfo.getViewportOffset().x(),	(float)renderInfo.getViewportOffset().y(),
+		(float)renderInfo.getViewportSize().x(),	(float)renderInfo.getViewportSize().y(),
+		0.0f, 1.0f
+	};
+	const VkRect2D scissor =
+	{
+		{ (deInt32)renderInfo.getViewportOffset().x(),	(deInt32)renderInfo.getViewportOffset().y() },
+		{ renderInfo.getViewportSize().x(),				renderInfo.getViewportSize().y() }
+	};
+	const VkPipelineViewportStateCreateInfo viewportState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+		DE_NULL,
+		(VkPipelineViewportStateCreateFlags)0u,
+		1u,
+		&viewport,
+		1u,
+		&scissor
+	};
+	const VkPipelineRasterizationStateCreateInfo rasterState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// sType
+		DE_NULL,														// pNext
+		(VkPipelineRasterizationStateCreateFlags)0u,
+		VK_TRUE,														// depthClipEnable
+		VK_FALSE,														// rasterizerDiscardEnable
+		VK_POLYGON_MODE_FILL,											// fillMode
+		VK_CULL_MODE_NONE,												// cullMode
+		VK_FRONT_FACE_COUNTER_CLOCKWISE,								// frontFace
+		VK_FALSE,														// depthBiasEnable
+		0.0f,															// depthBias
+		0.0f,															// depthBiasClamp
+		0.0f,															// slopeScaledDepthBias
+		1.0f															// lineWidth
+	};
+	const VkPipelineMultisampleStateCreateInfo multisampleState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// sType
+		DE_NULL,														// pNext
+		(VkPipelineMultisampleStateCreateFlags)0u,
+		*rasterSamples,													// rasterSamples
+		VK_FALSE,														// sampleShadingEnable
+		0.0f,															// minSampleShading
+		DE_NULL,														// pSampleMask
+		VK_FALSE,														// alphaToCoverageEnable
+		VK_FALSE,														// alphaToOneEnable
+	};
+	const VkPipelineDepthStencilStateCreateInfo depthStencilState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,	// sType
+		DE_NULL,													// pNext
+		(VkPipelineDepthStencilStateCreateFlags)0u,
+		VK_TRUE,													// depthTestEnable
+		VK_TRUE,													// depthWriteEnable
+		VK_COMPARE_OP_ALWAYS,										// depthCompareOp
+		VK_FALSE,													// depthBoundsEnable
+		VK_TRUE,													// stencilTestEnable
+		{
+			VK_STENCIL_OP_REPLACE,									// stencilFailOp
+			VK_STENCIL_OP_REPLACE,									// stencilPassOp
+			VK_STENCIL_OP_REPLACE,									// stencilDepthFailOp
+			VK_COMPARE_OP_ALWAYS,									// stencilCompareOp
+			~0u,													// stencilCompareMask
+			~0u,													// stencilWriteMask
+			STENCIL_VALUE											// stencilReference
+		},															// front
+		{
+			VK_STENCIL_OP_REPLACE,									// stencilFailOp
+			VK_STENCIL_OP_REPLACE,									// stencilPassOp
+			VK_STENCIL_OP_REPLACE,									// stencilDepthFailOp
+			VK_COMPARE_OP_ALWAYS,									// stencilCompareOp
+			~0u,													// stencilCompareMask
+			~0u,													// stencilWriteMask
+			STENCIL_VALUE											// stencilReference
+		},															// back
+
+		-1.0f,														// minDepthBounds;
+		1.0f														// maxDepthBounds;
+	};
+	const VkPipelineColorBlendStateCreateInfo blendState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,			// sType
+		DE_NULL,															// pNext
+		(VkPipelineColorBlendStateCreateFlags)0u,
+		VK_FALSE,															// logicOpEnable
+		VK_LOGIC_OP_COPY,													// logicOp
+		(deUint32)attachmentBlendStates.size(),								// attachmentCount
+		attachmentBlendStates.empty() ? DE_NULL : &attachmentBlendStates[0],// pAttachments
+		{ 0.0f, 0.0f, 0.0f, 0.0f }											// blendConst
+	};
+	const VkPipelineDynamicStateCreateInfo dynamicState =
+	{
+		VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+		DE_NULL,
+		(VkPipelineDynamicStateCreateFlags)0u,
+		0,
+		DE_NULL
+	};
+	const VkGraphicsPipelineCreateInfo createInfo =
+	{
+		VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,		// sType
+		DE_NULL,												// pNext
+		(VkPipelineCreateFlags)0u,
+
+		2,														// stageCount
+		shaderStages,											// pStages
+
+		&vertexInputState,										// pVertexInputState
+		&inputAssemblyState,									// pInputAssemblyState
+		DE_NULL,												// pTessellationState
+		&viewportState,											// pViewportState
+		&rasterState,											// pRasterState
+		&multisampleState,										// pMultisampleState
+		&depthStencilState,										// pDepthStencilState
+		&blendState,											// pColorBlendState
+		&dynamicState,											// pDynamicState
+		pipelineLayout,											// layout
+
+		renderPass,												// renderPass
+		renderInfo.getSubpassIndex(),							// subpass
+		DE_NULL,												// basePipelineHandle
+		0u														// basePipelineIndex
+	};
+
+	return createGraphicsPipeline(vk, device, DE_NULL, &createInfo);
+}
+
+class SubpassRenderer
+{
+public:
+	SubpassRenderer (Context&					context,
+					 const DeviceInterface&		vk,
+					 VkDevice					device,
+					 Allocator&					allocator,
+					 VkRenderPass				renderPass,
+					 VkFramebuffer				framebuffer,
+					 VkCommandPool				commandBufferPool,
+					 deUint32					queueFamilyIndex,
+					 const SubpassRenderInfo&	renderInfo)
+		: m_renderInfo	(renderInfo)
+	{
+		const deUint32 subpassIndex = renderInfo.getSubpassIndex();
+
+		if (renderInfo.getRenderQuad())
+		{
+			const RenderQuad&					renderQuad				= *renderInfo.getRenderQuad();
+			const VkPipelineLayoutCreateInfo	pipelineLayoutParams	=
+			{
+				VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,	// sType;
+				DE_NULL,										// pNext;
+				(vk::VkPipelineLayoutCreateFlags)0,
+				0u,												// descriptorSetCount;
+				DE_NULL,										// pSetLayouts;
+				0u,												// pushConstantRangeCount;
+				DE_NULL,										// pPushConstantRanges;
+			};
+
+			m_vertexShaderModule	= createShaderModule(vk, device, context.getBinaryCollection().get(de::toString(subpassIndex) + "-vert"), 0u);
+			m_fragmentShaderModule	= createShaderModule(vk, device, context.getBinaryCollection().get(de::toString(subpassIndex) + "-frag"), 0u);
+			m_pipelineLayout		= createPipelineLayout(vk, device, &pipelineLayoutParams);
+			m_pipeline				= createSubpassPipeline(vk, device, renderPass, *m_vertexShaderModule, *m_fragmentShaderModule, *m_pipelineLayout, m_renderInfo);
+
+			m_vertexBuffer			= createBuffer(vk, device, 0u, (VkDeviceSize)renderQuad.getVertexDataSize(), VK_BUFFER_USAGE_VERTEX_BUFFER_BIT, VK_SHARING_MODE_EXCLUSIVE, 1u, &queueFamilyIndex);
+			m_vertexBufferMemory	= allocator.allocate(getBufferMemoryRequirements(vk, device, *m_vertexBuffer), MemoryRequirement::HostVisible);
+
+			bindBufferMemory(vk, device, *m_vertexBuffer, m_vertexBufferMemory->getMemory(), m_vertexBufferMemory->getOffset());
+			uploadBufferData(vk, device, *m_vertexBufferMemory, renderQuad.getVertexDataSize(), renderQuad.getVertexPointer());
+		}
+
+		if (renderInfo.isSecondary())
+		{
+			m_commandBuffer = allocateCommandBuffer(vk, device, commandBufferPool, VK_COMMAND_BUFFER_LEVEL_SECONDARY);
+
+			beginCommandBuffer(vk, *m_commandBuffer, vk::VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT, renderPass, subpassIndex, framebuffer, VK_FALSE, (VkQueryControlFlags)0, (VkQueryPipelineStatisticFlags)0);
+			pushRenderCommands(vk, *m_commandBuffer);
+			endCommandBuffer(vk, *m_commandBuffer);
+		}
+	}
+
+	bool isSecondary (void) const
+	{
+		return m_commandBuffer;
+	}
+
+	VkCommandBuffer getCommandBuffer (void) const
+	{
+		DE_ASSERT(isSecondary());
+		return *m_commandBuffer;
+	}
+
+	void pushRenderCommands (const DeviceInterface&		vk,
+							 VkCommandBuffer			commandBuffer)
+	{
+		if (!m_renderInfo.getColorClears().empty())
+		{
+			const vector<ColorClear>&	colorClears	(m_renderInfo.getColorClears());
+
+			for (deUint32 attachmentNdx = 0; attachmentNdx < m_renderInfo.getColorAttachmentCount(); attachmentNdx++)
+			{
+				const ColorClear&		colorClear	= colorClears[attachmentNdx];
+				const VkClearAttachment	attachment	=
+				{
+					VK_IMAGE_ASPECT_COLOR_BIT,
+					attachmentNdx,
+					makeClearValue(colorClear.getColor()),
+				};
+				const VkClearRect		rect		=
+				{
+					{
+						{ (deInt32)colorClear.getOffset().x(),	(deInt32)colorClear.getOffset().y()	},
+						{ colorClear.getSize().x(),				colorClear.getSize().y()			}
+					},					// rect
+					0u,					// baseArrayLayer
+					1u,					// layerCount
+				};
+
+				vk.cmdClearAttachments(commandBuffer, 1u, &attachment, 1u, &rect);
+			}
+		}
+
+		if (m_renderInfo.getDepthStencilClear())
+		{
+			const DepthStencilClear&		depthStencilClear	= *m_renderInfo.getDepthStencilClear();
+			const deUint32					attachmentNdx		= m_renderInfo.getColorAttachmentCount();
+			tcu::TextureFormat				format				= mapVkFormat(m_renderInfo.getDepthStencilAttachment()->getFormat());
+			const VkClearAttachment			attachment			=
+			{
+				(VkImageAspectFlags)((hasDepthComponent(format.order) ? VK_IMAGE_ASPECT_DEPTH_BIT : 0)
+					| (hasStencilComponent(format.order) ? VK_IMAGE_ASPECT_STENCIL_BIT : 0)),
+				attachmentNdx,
+				makeClearValueDepthStencil(depthStencilClear.getDepth(), depthStencilClear.getStencil())
+			};
+			const VkClearRect				rect				=
+			{
+				{
+					{ (deInt32)depthStencilClear.getOffset().x(),	(deInt32)depthStencilClear.getOffset().y()	},
+					{ depthStencilClear.getSize().x(),				depthStencilClear.getSize().y()				}
+				},							// rect
+				0u,							// baseArrayLayer
+				1u,							// layerCount
+			};
+
+			vk.cmdClearAttachments(commandBuffer, 1u, &attachment, 1u, &rect);
+		}
+
+		if (m_renderInfo.getRenderQuad())
+		{
+			const VkDeviceSize	offset			= 0;
+			const VkBuffer		vertexBuffer	= *m_vertexBuffer;
+
+			vk.cmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+			vk.cmdBindVertexBuffers(commandBuffer, 0u, 1u, &vertexBuffer, &offset);
+			vk.cmdDraw(commandBuffer, 6u, 1u, 0u, 0u);
+		}
+	}
+
+private:
+	const SubpassRenderInfo				m_renderInfo;
+	Move<VkCommandBuffer>				m_commandBuffer;
+	Move<VkPipeline>					m_pipeline;
+	Move<VkPipelineLayout>				m_pipelineLayout;
+
+	Move<VkShaderModule>				m_vertexShaderModule;
+
+	Move<VkShaderModule>				m_fragmentShaderModule;
+
+	Move<VkBuffer>						m_vertexBuffer;
+	de::MovePtr<Allocation>				m_vertexBufferMemory;
+};
+
+void pushImageInitializationCommands (const DeviceInterface&								vk,
+									  VkCommandBuffer										commandBuffer,
+									  const vector<Attachment>&								attachmentInfo,
+									  const vector<de::SharedPtr<AttachmentResources> >&	attachmentResources,
+									  deUint32												queueIndex,
+									  const vector<Maybe<VkClearValue> >&					clearValues)
+{
+	{
+		vector<VkImageMemoryBarrier>	initializeLayouts;
+
+		for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+		{
+			if (!clearValues[attachmentNdx])
+				continue;
+
+			const VkImageMemoryBarrier barrier =
+			{
+				VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,								// sType;
+				DE_NULL,															// pNext;
+
+				getAllMemoryWriteFlags(),											// srcAccessMask
+				getAllMemoryReadFlags(),											// dstAccessMask
+
+				VK_IMAGE_LAYOUT_UNDEFINED,											// oldLayout
+				VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,								// newLayout;
+
+				queueIndex,															// srcQueueFamilyIndex;
+				queueIndex,															// destQueueFamilyIndex;
+
+				attachmentResources[attachmentNdx]->getImage(),						// image;
+				{																	// subresourceRange;
+					getImageAspectFlags(attachmentInfo[attachmentNdx].getFormat()),	// aspect;
+					0,																// baseMipLevel;
+					1,																// mipLevels;
+					0,																// baseArraySlice;
+					1																// arraySize;
+				}
+			};
+
+			initializeLayouts.push_back(barrier);
+		}
+
+		if (!initializeLayouts.empty())
+			vk.cmdPipelineBarrier(commandBuffer, (VkPipelineStageFlags)0, (VkPipelineStageFlags)0, (VkDependencyFlags)0,
+								  0, (const VkMemoryBarrier*)DE_NULL,
+								  0, (const VkBufferMemoryBarrier*)DE_NULL,
+								  (deUint32)initializeLayouts.size(), &initializeLayouts[0]);
+	}
+
+	for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+	{
+		if (!clearValues[attachmentNdx])
+			continue;
+
+		const tcu::TextureFormat format = mapVkFormat(attachmentInfo[attachmentNdx].getFormat());
+
+		if (hasStencilComponent(format.order) || hasDepthComponent(format.order))
+		{
+			const float						clearNan		= tcu::Float32::nan().asFloat();
+			const float						clearDepth		= hasDepthComponent(format.order) ? clearValues[attachmentNdx]->depthStencil.depth : clearNan;
+			const deUint32					clearStencil	= hasStencilComponent(format.order) ? clearValues[attachmentNdx]->depthStencil.stencil : ~0u;
+			const VkClearDepthStencilValue	depthStencil	=
+			{
+				clearDepth,
+				clearStencil
+			};
+			const VkImageSubresourceRange range =
+			{
+				(VkImageAspectFlags)((hasDepthComponent(format.order) ? VK_IMAGE_ASPECT_DEPTH_BIT : 0)
+									 | (hasStencilComponent(format.order) ? VK_IMAGE_ASPECT_STENCIL_BIT : 0)),
+				0,
+				1,
+				0,
+				1
+			};
+
+			vk.cmdClearDepthStencilImage(commandBuffer, attachmentResources[attachmentNdx]->getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &depthStencil, 1, &range);
+		}
+		else
+		{
+			const VkImageSubresourceRange	range		=
+			{
+				VK_IMAGE_ASPECT_COLOR_BIT,				// aspectMask;
+				0,										// baseMipLevel;
+				1,										// mipLevels;
+				0,										// baseArrayLayer;
+				1										// layerCount;
+			};
+			const VkClearColorValue			clearColor	= clearValues[attachmentNdx]->color;
+
+			vk.cmdClearColorImage(commandBuffer, attachmentResources[attachmentNdx]->getImage(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &clearColor, 1, &range);
+		}
+	}
+
+	{
+		vector<VkImageMemoryBarrier>	renderPassLayouts;
+
+		for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+		{
+			const VkImageMemoryBarrier barrier =
+			{
+				VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,								// sType;
+				DE_NULL,															// pNext;
+
+				getAllMemoryWriteFlags(),											// srcAccessMask
+				getAllMemoryReadFlags(),											// dstAccessMask
+
+				clearValues[attachmentNdx] ?
+					VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
+					: VK_IMAGE_LAYOUT_UNDEFINED,									// oldLayout
+
+				attachmentInfo[attachmentNdx].getInitialLayout(),					// newLayout;
+
+				queueIndex,															// srcQueueFamilyIndex;
+				queueIndex,															// destQueueFamilyIndex;
+
+				attachmentResources[attachmentNdx]->getImage(),						// image;
+				{																	// subresourceRange;
+					getImageAspectFlags(attachmentInfo[attachmentNdx].getFormat()),	// aspect;
+					0,																// baseMipLevel;
+					1,																// mipLevels;
+					0,																// baseArraySlice;
+					1																// arraySize;
+				}
+			};
+
+			renderPassLayouts.push_back(barrier);
+		}
+
+		if (!renderPassLayouts.empty())
+			vk.cmdPipelineBarrier(commandBuffer, 0, 0, (VkDependencyFlags)0,
+								  0, (const VkMemoryBarrier*)DE_NULL,
+								  0, (const VkBufferMemoryBarrier*)DE_NULL,
+								  (deUint32)renderPassLayouts.size(), &renderPassLayouts[0]);
+	}
+}
+
+void pushRenderPassCommands (const DeviceInterface&								vk,
+							 VkCommandBuffer									commandBuffer,
+							 VkRenderPass										renderPass,
+							 VkFramebuffer										framebuffer,
+							 const vector<de::SharedPtr<SubpassRenderer> >&		subpassRenderers,
+							 const UVec2&										renderPos,
+							 const UVec2&										renderSize,
+							 const vector<Maybe<VkClearValue> >&				renderPassClearValues,
+							 TestConfig::RenderTypes							render)
+{
+	const float				clearNan				= tcu::Float32::nan().asFloat();
+	vector<VkClearValue>	attachmentClearValues;
+
+	for (size_t attachmentNdx = 0; attachmentNdx < renderPassClearValues.size(); attachmentNdx++)
+	{
+		if (renderPassClearValues[attachmentNdx])
+			attachmentClearValues.push_back(*renderPassClearValues[attachmentNdx]);
+		else
+			attachmentClearValues.push_back(makeClearValueColorF32(clearNan, clearNan, clearNan, clearNan));
+	}
+
+	{
+		const VkRect2D renderArea =
+		{
+			{ (deInt32)renderPos.x(),	(deInt32)renderPos.y()	},
+			{ renderSize.x(),			renderSize.y()			}
+		};
+
+		for (size_t subpassNdx = 0; subpassNdx < subpassRenderers.size(); subpassNdx++)
+		{
+			const VkSubpassContents	contents = subpassRenderers[subpassNdx]->isSecondary() ? VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS : VK_SUBPASS_CONTENTS_INLINE;
+
+			if (subpassNdx == 0)
+				cmdBeginRenderPass(vk, commandBuffer, renderPass, framebuffer, renderArea, (deUint32)attachmentClearValues.size(), attachmentClearValues.empty() ? DE_NULL : &attachmentClearValues[0], contents);
+			else
+				vk.cmdNextSubpass(commandBuffer, contents);
+
+			if (render)
+			{
+				if (contents == VK_SUBPASS_CONTENTS_INLINE)
+				{
+					subpassRenderers[subpassNdx]->pushRenderCommands(vk, commandBuffer);
+				}
+				else if (contents == VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS)
+				{
+					const VkCommandBuffer cmd = subpassRenderers[subpassNdx]->getCommandBuffer();
+					vk.cmdExecuteCommands(commandBuffer, 1, &cmd);
+				}
+				else
+					DE_FATAL("Invalid contents");
+			}
+		}
+
+		vk.cmdEndRenderPass(commandBuffer);
+	}
+}
+
+void pushReadImagesToBuffers (const DeviceInterface&								vk,
+							  VkCommandBuffer										commandBuffer,
+							  deUint32												queueIndex,
+
+							  const vector<de::SharedPtr<AttachmentResources> >&	attachmentResources,
+							  const vector<Attachment>&								attachmentInfo,
+							  const vector<bool>&									isLazy,
+
+							  const UVec2&											targetSize)
+{
+	{
+		vector<VkImageMemoryBarrier>	imageBarriers;
+
+		for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+		{
+			if (isLazy[attachmentNdx])
+				continue;
+
+			const VkImageMemoryBarrier barrier =
+			{
+				VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,				// sType
+				DE_NULL,											// pNext
+
+				getAllMemoryWriteFlags(),							// srcAccessMask
+				getAllMemoryReadFlags(),							// dstAccessMask
+
+				attachmentInfo[attachmentNdx].getFinalLayout(),		// oldLayout
+				VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,				// newLayout
+
+				queueIndex,											// srcQueueFamilyIndex
+				queueIndex,											// destQueueFamilyIndex
+
+				attachmentResources[attachmentNdx]->getImage(),		// image
+				{													// subresourceRange
+					getImageAspectFlags(attachmentInfo[attachmentNdx].getFormat()),	// aspect;
+					0,										// baseMipLevel
+					1,										// mipLevels
+					0,										// baseArraySlice
+					1										// arraySize
+				}
+			};
+
+			imageBarriers.push_back(barrier);
+		}
+
+		if (!imageBarriers.empty())
+			vk.cmdPipelineBarrier(commandBuffer,
+								  getAllPipelineStageFlags(),
+								  getAllPipelineStageFlags(),
+								  (VkDependencyFlags)0,
+								  0, (const VkMemoryBarrier*)DE_NULL,
+								  0, (const VkBufferMemoryBarrier*)DE_NULL,
+								  (deUint32)imageBarriers.size(), &imageBarriers[0]);
+	}
+
+	for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+	{
+		if (isLazy[attachmentNdx])
+			continue;
+
+		const tcu::TextureFormat::ChannelOrder	order	= mapVkFormat(attachmentInfo[attachmentNdx].getFormat()).order;
+		const VkBufferImageCopy					rect	=
+		{
+			0, // bufferOffset
+			0, // bufferRowLength
+			0, // bufferImageHeight
+			{							// imageSubresource
+				getPrimaryImageAspect(mapVkFormat(attachmentInfo[attachmentNdx].getFormat()).order),	// aspect
+				0,						// mipLevel
+				0,						// arraySlice
+				1						// arraySize
+			},
+			{ 0, 0, 0 },				// imageOffset
+			{ targetSize.x(), targetSize.y(), 1u }		// imageExtent
+		};
+
+		vk.cmdCopyImageToBuffer(commandBuffer, attachmentResources[attachmentNdx]->getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, attachmentResources[attachmentNdx]->getBuffer(), 1, &rect);
+
+		if (tcu::TextureFormat::DS == order)
+		{
+			const VkBufferImageCopy stencilRect =
+			{
+				0, // bufferOffset
+				0, // bufferRowLength
+				0, // bufferImageHeight
+				{									// imageSubresource
+					VK_IMAGE_ASPECT_STENCIL_BIT,	// aspect
+					0,								// mipLevel
+					0,								// arraySlice
+					1						// arraySize
+				},
+				{ 0, 0, 0 },				// imageOffset
+				{ targetSize.x(), targetSize.y(), 1u }		// imageExtent
+			};
+
+			vk.cmdCopyImageToBuffer(commandBuffer, attachmentResources[attachmentNdx]->getImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, attachmentResources[attachmentNdx]->getSecondaryBuffer(), 1, &stencilRect);
+		}
+	}
+
+	{
+		vector<VkBufferMemoryBarrier>	bufferBarriers;
+
+		for (size_t attachmentNdx = 0; attachmentNdx < attachmentInfo.size(); attachmentNdx++)
+		{
+			if (isLazy[attachmentNdx])
+				continue;
+
+			const tcu::TextureFormat::ChannelOrder	order			= mapVkFormat(attachmentInfo[attachmentNdx].getFormat()).order;
+			const VkBufferMemoryBarrier				bufferBarrier	=
+			{
+				VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+				DE_NULL,
+
+				getAllMemoryWriteFlags(),
+				getAllMemoryReadFlags(),
+
+				queueIndex,
+				queueIndex,
+
+				attachmentResources[attachmentNdx]->getBuffer(),
+				0,
+				attachmentResources[attachmentNdx]->getBufferSize()
+			};
+
+			bufferBarriers.push_back(bufferBarrier);
+
+			if (tcu::TextureFormat::DS == order)
+			{
+				const VkBufferMemoryBarrier secondaryBufferBarrier =
+				{
+					VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+					DE_NULL,
+
+					getAllMemoryWriteFlags(),
+					getAllMemoryReadFlags(),
+
+					queueIndex,
+					queueIndex,
+
+					attachmentResources[attachmentNdx]->getSecondaryBuffer(),
+					0,
+					attachmentResources[attachmentNdx]->getSecondaryBufferSize()
+				};
+
+				bufferBarriers.push_back(secondaryBufferBarrier);
+			}
+
+			bufferBarriers.push_back(bufferBarrier);
+		}
+
+		if (!bufferBarriers.empty())
+			vk.cmdPipelineBarrier(commandBuffer,
+								  getAllPipelineStageFlags(),
+								  getAllPipelineStageFlags(),
+								  (VkDependencyFlags)0,
+								  0, (const VkMemoryBarrier*)DE_NULL,
+								  (deUint32)bufferBarriers.size(), &bufferBarriers[0],
+								  0, (const VkImageMemoryBarrier*)DE_NULL);
+	}
+}
+
+void clear (const PixelBufferAccess& access, const VkClearValue& value)
+{
+	const tcu::TextureFormat&	format	= access.getFormat();
+
+	if (tcu::hasDepthComponent(format.order) || tcu::hasStencilComponent(format.order))
+	{
+		if (tcu::hasDepthComponent(format.order))
+			tcu::clearDepth(access, value.depthStencil.depth);
+
+		if (tcu::hasStencilComponent(format.order))
+			tcu::clearStencil(access, value.depthStencil.stencil);
+	}
+	else
+	{
+		if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_FLOATING_POINT
+				|| tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT
+				|| tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT)
+		{
+			const tcu::Vec4		color	(value.color.float32[0],
+										 value.color.float32[1],
+										 value.color.float32[2],
+										 value.color.float32[3]);
+
+			if (tcu::isSRGB(format))
+				tcu::clear(access, tcu::linearToSRGB(color));
+			else
+				tcu::clear(access, color);
+		}
+		else if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
+		{
+			const tcu::UVec4	color	(value.color.uint32[0],
+										 value.color.uint32[1],
+										 value.color.uint32[2],
+										 value.color.uint32[3]);
+
+			tcu::clear(access, color);
+		}
+		else if (tcu::getTextureChannelClass(format.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER)
+		{
+			const tcu::IVec4	color	(value.color.int32[0],
+										 value.color.int32[1],
+										 value.color.int32[2],
+										 value.color.int32[3]);
+
+			tcu::clear(access, color);
+		}
+		else
+			DE_FATAL("Unknown channel class");
+	}
+}
+
+Vec4 computeUvs (const IVec2& posA, const IVec2& posB, const IVec2& pos)
+{
+	const float u = de::clamp((float)(pos.x() - posA.x()) / (float)(posB.x() - posA.x()), 0.0f, 1.0f);
+	const float v = de::clamp((float)(pos.y() - posA.y()) / (float)(posB.y() - posA.y()), 0.0f, 1.0f);
+
+	return Vec4(u, v, u * v, (u + v) / 2.0f);
+}
+
+void renderReferenceImages (vector<tcu::TextureLevel>&			referenceAttachments,
+							const RenderPass&					renderPassInfo,
+							const UVec2&						targetSize,
+							const vector<Maybe<VkClearValue> >&	imageClearValues,
+							const vector<Maybe<VkClearValue> >&	renderPassClearValues,
+							const vector<SubpassRenderInfo>&	subpassRenderInfo,
+							const UVec2&						renderPos,
+							const UVec2&						renderSize)
+{
+	const vector<Subpass>&	subpasses		= renderPassInfo.getSubpasses();
+	vector<bool>			attachmentUsed	(renderPassInfo.getAttachments().size(), false);
+
+	referenceAttachments.resize(renderPassInfo.getAttachments().size());
+
+	for (size_t attachmentNdx = 0; attachmentNdx < renderPassInfo.getAttachments().size(); attachmentNdx++)
+	{
+		const Attachment				attachment					= renderPassInfo.getAttachments()[attachmentNdx];
+		const tcu::TextureFormat		format						= mapVkFormat(attachment.getFormat());
+		const tcu::TextureFormatInfo	textureInfo					= tcu::getTextureFormatInfo(format);
+		tcu::TextureLevel&				reference					= referenceAttachments[attachmentNdx];
+		const bool						isDepthOrStencilAttachment	= hasDepthComponent(format.order) || hasStencilComponent(format.order);
+
+		reference = tcu::TextureLevel(format, targetSize.x(), targetSize.y());
+
+		if (imageClearValues[attachmentNdx])
+			clear(reference.getAccess(), *imageClearValues[attachmentNdx]);
+		else
+		{
+			// Fill with grid if image contentst are undefined before renderpass
+			if (isDepthOrStencilAttachment)
+			{
+				if (tcu::hasDepthComponent(format.order))
+					tcu::fillWithGrid(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_DEPTH), 2, textureInfo.valueMin, textureInfo.valueMax);
+
+				if (tcu::hasStencilComponent(format.order))
+					tcu::fillWithGrid(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_STENCIL), 2, textureInfo.valueMin, textureInfo.valueMax);
+			}
+			else
+				tcu::fillWithGrid(reference.getAccess(), 2, textureInfo.valueMin, textureInfo.valueMax);
+		}
+	}
+
+	for (size_t subpassNdx = 0; subpassNdx < subpasses.size(); subpassNdx++)
+	{
+		const Subpass&						subpass				= subpasses[subpassNdx];
+		const SubpassRenderInfo&			renderInfo			= subpassRenderInfo[subpassNdx];
+		const vector<AttachmentReference>&	colorAttachments	= subpass.getColorAttachments();
+
+		// Apply load op if attachment was used for the first time
+		for (size_t attachmentNdx = 0; attachmentNdx < colorAttachments.size(); attachmentNdx++)
+		{
+			const deUint32 attachmentIndex = colorAttachments[attachmentNdx].getAttachment();
+
+			if (!attachmentUsed[attachmentIndex])
+			{
+				const Attachment&	attachment	= renderPassInfo.getAttachments()[attachmentIndex];
+				tcu::TextureLevel&	reference	= referenceAttachments[attachmentIndex];
+
+				DE_ASSERT(!tcu::hasDepthComponent(reference.getFormat().order));
+				DE_ASSERT(!tcu::hasStencilComponent(reference.getFormat().order));
+
+				if (attachment.getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+					clear(tcu::getSubregion(reference.getAccess(), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), *renderPassClearValues[attachmentIndex]);
+				else if (attachment.getLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+				{
+					const tcu::TextureFormatInfo textureInfo = tcu::getTextureFormatInfo(reference.getFormat());
+
+					tcu::fillWithGrid(tcu::getSubregion(reference.getAccess(), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), 2, textureInfo.valueMin, textureInfo.valueMax);
+				}
+
+				attachmentUsed[attachmentIndex] = true;
+			}
+		}
+
+		// Apply load op to depth/stencil attachment if it was used for the first time
+		if (subpass.getDepthStencilAttachment().getAttachment() != VK_ATTACHMENT_UNUSED && !attachmentUsed[subpass.getDepthStencilAttachment().getAttachment()])
+		{
+			const deUint32 attachmentIndex = subpass.getDepthStencilAttachment().getAttachment();
+
+			// Apply load op if attachment was used for the first time
+			if (!attachmentUsed[attachmentIndex])
+			{
+				const Attachment&	attachment	= renderPassInfo.getAttachments()[attachmentIndex];
+				tcu::TextureLevel&	reference		= referenceAttachments[attachmentIndex];
+
+				if (tcu::hasDepthComponent(reference.getFormat().order))
+				{
+					if (attachment.getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+						clear(tcu::getSubregion(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_DEPTH), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), *renderPassClearValues[attachmentIndex]);
+					else if (attachment.getLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+					{
+						const tcu::TextureFormatInfo textureInfo = tcu::getTextureFormatInfo(reference.getFormat());
+
+						tcu::fillWithGrid(tcu::getSubregion(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_DEPTH), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), 2, textureInfo.valueMin, textureInfo.valueMax);
+					}
+				}
+
+				if (tcu::hasStencilComponent(reference.getFormat().order))
+				{
+					if (attachment.getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+						clear(tcu::getSubregion(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_STENCIL), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), *renderPassClearValues[attachmentIndex]);
+					else if (attachment.getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+					{
+						const tcu::TextureFormatInfo textureInfo = tcu::getTextureFormatInfo(reference.getFormat());
+
+						tcu::fillWithGrid(tcu::getSubregion(tcu::getEffectiveDepthStencilAccess(reference.getAccess(), tcu::Sampler::MODE_STENCIL), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), 2, textureInfo.valueMin, textureInfo.valueMax);
+					}
+				}
+			}
+
+			attachmentUsed[attachmentIndex] = true;
+		}
+
+		for (size_t colorClearNdx = 0; colorClearNdx < renderInfo.getColorClears().size(); colorClearNdx++)
+		{
+			const ColorClear&	colorClear	= renderInfo.getColorClears()[colorClearNdx];
+			const UVec2			offset		= colorClear.getOffset();
+			const UVec2			size		= colorClear.getSize();
+			tcu::TextureLevel&	reference	= referenceAttachments[subpass.getColorAttachments()[colorClearNdx].getAttachment()];
+			VkClearValue		value;
+
+			value.color = colorClear.getColor();
+
+			clear(tcu::getSubregion(reference.getAccess(), offset.x(), offset.y(), 0, size.x(), size.y(), 1), value);
+		}
+
+		if (renderInfo.getDepthStencilClear())
+		{
+			const DepthStencilClear&	dsClear		= *renderInfo.getDepthStencilClear();
+			const UVec2					offset		= dsClear.getOffset();
+			const UVec2					size		= dsClear.getSize();
+			tcu::TextureLevel&			reference	= referenceAttachments[subpass.getDepthStencilAttachment().getAttachment()];
+
+			if (tcu::hasDepthComponent(reference.getFormat().order))
+				clearDepth(tcu::getSubregion(reference.getAccess(), offset.x(), offset.y(), 0, size.x(), size.y(), 1), dsClear.getDepth());
+
+			if (tcu::hasStencilComponent(reference.getFormat().order))
+				clearStencil(tcu::getSubregion(reference.getAccess(), offset.x(), offset.y(), 0, size.x(), size.y(), 1), dsClear.getStencil());
+		}
+
+		if (renderInfo.getRenderQuad())
+		{
+			const RenderQuad&	renderQuad	= *renderInfo.getRenderQuad();
+			const Vec4			posA		= renderQuad.getCornerA();
+			const Vec4			posB		= renderQuad.getCornerB();
+			const Vec2			origin		= Vec2((float)renderInfo.getViewportOffset().x(), (float)renderInfo.getViewportOffset().y()) + Vec2((float)renderInfo.getViewportSize().x(), (float)renderInfo.getViewportSize().y()) / Vec2(2.0f);
+			const Vec2			p			= Vec2((float)renderInfo.getViewportSize().x(), (float)renderInfo.getViewportSize().y()) / Vec2(2.0f);
+			const IVec2			posAI		((deInt32)(origin.x() + (p.x() * posA.x())),
+											 (deInt32)(origin.y() + (p.y() * posA.y())));
+			const IVec2			posBI		((deInt32)(origin.x() + (p.x() * posB.x())),
+											 (deInt32)(origin.y() + (p.y() * posB.y())));
+
+			for (size_t attachmentRefNdx = 0; attachmentRefNdx < subpass.getColorAttachments().size(); attachmentRefNdx++)
+			{
+				const Attachment				attachment			= renderPassInfo.getAttachments()[subpass.getColorAttachments()[attachmentRefNdx].getAttachment()];
+				const tcu::TextureFormatInfo	textureInfo			= tcu::getTextureFormatInfo(mapVkFormat(attachment.getFormat()));
+				tcu::TextureLevel&				referenceTexture	= referenceAttachments[subpass.getColorAttachments()[attachmentRefNdx].getAttachment()];
+				const bool						srgb				= tcu::isSRGB(referenceTexture.getFormat());
+				const PixelBufferAccess	reference			= referenceTexture.getAccess();
+				const float						clampMin			= (float)(-MAX_INTEGER_VALUE);
+				const float						clampMax			= (float)(MAX_INTEGER_VALUE);
+				const Vec4						valueMax			(de::clamp(textureInfo.valueMax[0], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMax[1], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMax[2], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMax[3], clampMin, clampMax));
+
+				const Vec4						valueMin			(de::clamp(textureInfo.valueMin[0], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMin[1], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMin[2], clampMin, clampMax),
+																	 de::clamp(textureInfo.valueMin[3], clampMin, clampMax));
+
+				DE_ASSERT(posAI.x() < posBI.x());
+				DE_ASSERT(posAI.y() < posBI.y());
+
+				for (int y = posAI.y(); y <= (int)posBI.y(); y++)
+				for (int x = posAI.x(); x <= (int)posBI.x(); x++)
+				{
+					const Vec4	uvs		= computeUvs(posAI, posBI, IVec2(x, y));
+					const Vec4	color	= valueMax * uvs + valueMin * (Vec4(1.0f) - uvs);
+
+					if (srgb)
+						reference.setPixel(tcu::linearToSRGB(color), x, y);
+					else
+						reference.setPixel(color, x, y);
+				}
+			}
+
+			if (subpass.getDepthStencilAttachment().getAttachment() != VK_ATTACHMENT_UNUSED)
+			{
+				tcu::TextureLevel&				referenceTexture	= referenceAttachments[subpass.getDepthStencilAttachment().getAttachment()];
+				const PixelBufferAccess	reference			= referenceTexture.getAccess();
+
+				DE_ASSERT(posAI.x() < posBI.x());
+				DE_ASSERT(posAI.y() < posBI.y());
+
+				for (int y = posAI.y(); y <= (int)posBI.y(); y++)
+				for (int x = posAI.x(); x <= (int)posBI.x(); x++)
+				{
+					const Vec4 uvs = computeUvs(posAI, posBI, IVec2(x, y));
+
+					if (tcu::hasDepthComponent(reference.getFormat().order))
+						reference.setPixDepth(uvs.x(), x, y);
+
+					if (tcu::hasStencilComponent(reference.getFormat().order))
+						reference.setPixStencil(STENCIL_VALUE, x, y);
+				}
+			}
+		}
+	}
+
+	// Mark all attachments that were used but not stored as undefined
+	for (size_t attachmentNdx = 0; attachmentNdx < renderPassInfo.getAttachments().size(); attachmentNdx++)
+	{
+		const Attachment				attachment	= renderPassInfo.getAttachments()[attachmentNdx];
+		const tcu::TextureFormat		format		= mapVkFormat(attachment.getFormat());
+		const tcu::TextureFormatInfo	textureInfo	= tcu::getTextureFormatInfo(format);
+		tcu::TextureLevel&				reference	= referenceAttachments[attachmentNdx];
+
+		if (attachmentUsed[attachmentNdx] && renderPassInfo.getAttachments()[attachmentNdx].getStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)
+			tcu::fillWithGrid(tcu::getSubregion(reference.getAccess(), renderPos.x(), renderPos.y(), renderSize.x(), renderSize.y()), 2, textureInfo.valueMin, textureInfo.valueMax);
+	}
+}
+
+Maybe<deUint32> findColorAttachment (const Subpass&				subpass,
+									 deUint32					attachmentIndex)
+{
+	for (size_t colorAttachmentNdx = 0; colorAttachmentNdx < subpass.getColorAttachments().size(); colorAttachmentNdx++)
+	{
+		if (subpass.getColorAttachments()[colorAttachmentNdx].getAttachment() == attachmentIndex)
+			return tcu::just((deUint32)colorAttachmentNdx);
+	}
+
+	return tcu::nothing<deUint32>();
+}
+
+int calcFloatDiff (float a, float b)
+{
+	const deUint32		au		= tcu::Float32(a).bits();
+	const deUint32		bu		= tcu::Float32(b).bits();
+
+	const bool			asign	= (au & (0x1u << 31u)) != 0u;
+	const bool			bsign	= (bu & (0x1u << 31u)) != 0u;
+
+	const deUint32		avalue	= (au & ((0x1u << 31u) - 1u));
+	const deUint32		bvalue	= (bu & ((0x1u << 31u) - 1u));
+
+	if (asign != bsign)
+		return avalue + bvalue + 1u;
+	else if (avalue < bvalue)
+		return bvalue - avalue;
+	else
+		return avalue - bvalue;
+}
+
+bool comparePixelToDepthClearValue (const ConstPixelBufferAccess&	access,
+									int								x,
+									int								y,
+									float							ref)
+{
+	const tcu::TextureFormat		format			= tcu::getEffectiveDepthStencilTextureFormat(access.getFormat(), tcu::Sampler::MODE_DEPTH);
+	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		{
+			const int	bitDepth	= tcu::getTextureFormatBitDepth(format).x();
+			const float	depth		= access.getPixDepth(x, y);
+			const float	threshold	= 2.0f / (float)((1 << bitDepth) - 1);
+
+			return deFloatAbs(depth - ref) <= threshold;
+		}
+
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+		{
+			const float	depth			= access.getPixDepth(x, y);
+			const int	mantissaBits	= tcu::getTextureFormatMantissaBitDepth(format).x();
+			const int	threshold		= 10 * 1 << (23 - mantissaBits);
+
+			DE_ASSERT(mantissaBits <= 23);
+
+			return calcFloatDiff(depth, ref) <= threshold;
+		}
+
+		default:
+			DE_FATAL("Invalid channel class");
+			return false;
+	}
+}
+
+bool comparePixelToStencilClearValue (const ConstPixelBufferAccess&	access,
+									  int							x,
+									  int							y,
+									  deUint32						ref)
+{
+	const deUint32 stencil = access.getPixStencil(x, y);
+
+	return stencil == ref;
+}
+
+bool comparePixelToColorClearValue (const ConstPixelBufferAccess&	access,
+									int								x,
+									int								y,
+									const VkClearColorValue&		ref)
+{
+	const tcu::TextureFormat		format			= access.getFormat();
+	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
+	const BVec4						channelMask		= tcu::getTextureFormatChannelMask(format);
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		{
+			const IVec4	bitDepth	(tcu::getTextureFormatBitDepth(format));
+			const Vec4	resColor	(access.getPixel(x, y));
+			const Vec4	refColor	(ref.float32[0],
+									 ref.float32[1],
+									 ref.float32[2],
+									 ref.float32[3]);
+			const Vec4	threshold	(bitDepth[0] > 0 ? 20.0f / (float)((1 << bitDepth[0]) - 1) : 1.0f,
+									 bitDepth[1] > 0 ? 20.0f / (float)((1 << bitDepth[1]) - 1) : 1.0f,
+									 bitDepth[2] > 0 ? 20.0f / (float)((1 << bitDepth[2]) - 1) : 1.0f,
+									 bitDepth[3] > 0 ? 20.0f / (float)((1 << bitDepth[3]) - 1) : 1.0f);
+
+			if (tcu::isSRGB(access.getFormat()))
+				return !(tcu::anyNotEqual(tcu::logicalAnd(lessThanEqual(tcu::absDiff(resColor, tcu::linearToSRGB(refColor)), threshold), channelMask), channelMask));
+			else
+				return !(tcu::anyNotEqual(tcu::logicalAnd(lessThanEqual(tcu::absDiff(resColor, refColor), threshold), channelMask), channelMask));
+		}
+
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER:
+		{
+			const UVec4	resColor	(access.getPixelUint(x, y));
+			const UVec4	refColor	(ref.uint32[0],
+									 ref.uint32[1],
+									 ref.uint32[2],
+									 ref.uint32[3]);
+			const UVec4	threshold	(1);
+
+			return !(tcu::anyNotEqual(tcu::logicalAnd(lessThanEqual(tcu::absDiff(resColor, refColor), threshold), channelMask), channelMask));
+		}
+
+		case tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER:
+		{
+			const IVec4	resColor	(access.getPixelInt(x, y));
+			const IVec4	refColor	(ref.int32[0],
+									 ref.int32[1],
+									 ref.int32[2],
+									 ref.int32[3]);
+			const IVec4	threshold	(1);
+
+			return !(tcu::anyNotEqual(tcu::logicalAnd(lessThanEqual(tcu::absDiff(resColor, refColor), threshold), channelMask), channelMask));
+		}
+
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+		{
+			const Vec4	resColor		(access.getPixel(x, y));
+			const Vec4	refColor		(ref.float32[0],
+										 ref.float32[1],
+										 ref.float32[2],
+										 ref.float32[3]);
+			const IVec4	mantissaBits	(tcu::getTextureFormatMantissaBitDepth(format));
+			const IVec4	threshold		(10 * IVec4(1) << (23 - mantissaBits));
+
+			DE_ASSERT(tcu::allEqual(greaterThanEqual(threshold, IVec4(0)), BVec4(true)));
+
+			for (int ndx = 0; ndx < 4; ndx++)
+			{
+				if (calcFloatDiff(resColor[ndx], refColor[ndx]) > threshold[ndx] && channelMask[ndx])
+					return false;
+			}
+
+			return true;
+		}
+
+		default:
+			DE_FATAL("Invalid channel class");
+			return false;
+	}
+}
+
+class PixelStatus
+{
+public:
+	enum Status
+	{
+		STATUS_UNDEFINED	= 0,
+		STATUS_OK			= 1,
+		STATUS_FAIL			= 2,
+
+		STATUS_LAST
+	};
+
+			PixelStatus			(Status color, Status depth, Status stencil)
+				: m_status	((deUint8)((color << COLOR_OFFSET)
+					| (depth << DEPTH_OFFSET)
+					| (stencil << STENCIL_OFFSET)))
+	{
+	}
+
+	Status	getColorStatus		(void) const { return (Status)((m_status & COLOR_MASK) >> COLOR_OFFSET); }
+	Status	getDepthStatus		(void) const { return (Status)((m_status & DEPTH_MASK) >> DEPTH_OFFSET); }
+	Status	getStencilStatus	(void) const { return (Status)((m_status & STENCIL_MASK) >> STENCIL_OFFSET); }
+
+	void	setColorStatus		(Status status)
+	{
+		DE_ASSERT(getColorStatus() == STATUS_UNDEFINED);
+		deUint8 statusFlags = (deUint8)(status << COLOR_OFFSET);
+		m_status |= statusFlags;
+	}
+
+	void	setDepthStatus		(Status status)
+	{
+		DE_ASSERT(getDepthStatus() == STATUS_UNDEFINED);
+		m_status |= (deUint8)(status << DEPTH_OFFSET);
+	}
+
+	void	setStencilStatus	(Status status)
+	{
+		DE_ASSERT(getStencilStatus() == STATUS_UNDEFINED);
+		m_status |= (deUint8)(status << STENCIL_OFFSET);
+	}
+
+private:
+	enum
+	{
+		COLOR_OFFSET	= 0,
+		DEPTH_OFFSET	= 2,
+		STENCIL_OFFSET	= 4,
+
+		COLOR_MASK		= (3<<COLOR_OFFSET),
+		DEPTH_MASK		= (3<<DEPTH_OFFSET),
+		STENCIL_MASK	= (3<<STENCIL_OFFSET),
+	};
+	deUint8	m_status;
+};
+
+void checkDepthRenderQuad (const ConstPixelBufferAccess&	result,
+						   const IVec2&						posA,
+						   const IVec2&						posB,
+						   vector<PixelStatus>&				status)
+{
+	for (int y = posA.y(); y <= posB.y(); y++)
+	for (int x = posA.x(); x <= posB.x(); x++)
+	{
+		PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+		if (pixelStatus.getDepthStatus() == PixelStatus::STATUS_UNDEFINED)
+		{
+			const Vec4	minUvs		= computeUvs(posA, posB, IVec2(x-1, y-1));
+			const Vec4	maxUvs		= computeUvs(posA, posB, IVec2(x+1, y+1));
+			const bool	softCheck	= std::abs(x - posA.x()) <= 1 || std::abs(x - posB.x()) <= 1
+									|| std::abs(y - posA.y()) <= 1 || std::abs(y - posB.y()) <= 1;
+			const float	resDepth	= result.getPixDepth(x, y);
+
+			if (resDepth >= minUvs.x() && resDepth <= maxUvs.x())
+				pixelStatus.setDepthStatus(PixelStatus::STATUS_OK);
+			else if (!softCheck)
+				pixelStatus.setDepthStatus(PixelStatus::STATUS_FAIL);
+		}
+	}
+}
+
+void checkStencilRenderQuad (const ConstPixelBufferAccess&		result,
+							 const IVec2&						posA,
+							 const IVec2&						posB,
+							 vector<PixelStatus>&				status)
+{
+	for (int y = posA.y(); y <= posB.y(); y++)
+	for (int x = posA.x(); x <= posB.x(); x++)
+	{
+		PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+		if (pixelStatus.getStencilStatus() == PixelStatus::STATUS_UNDEFINED)
+		{
+			const bool	softCheck	= std::abs(x - posA.x()) <= 1 || std::abs(x - posB.x()) <= 1
+									|| std::abs(y - posA.y()) <= 1 || std::abs(y - posB.y()) <= 1;
+
+			if (result.getPixStencil(x, y) == STENCIL_VALUE)
+				pixelStatus.setStencilStatus(PixelStatus::STATUS_OK);
+			else if (!softCheck)
+				pixelStatus.setStencilStatus(PixelStatus::STATUS_FAIL);
+		}
+	}
+}
+
+void checkColorRenderQuad (const ConstPixelBufferAccess&	result,
+						   const IVec2&						posA,
+						   const IVec2&						posB,
+						   vector<PixelStatus>&				status)
+{
+	const tcu::TextureFormat&		format				= result.getFormat();
+	const bool						srgb				= tcu::isSRGB(format);
+	const tcu::TextureChannelClass	channelClass		= tcu::getTextureChannelClass(format.type);
+	const tcu::TextureFormatInfo	textureInfo			= tcu::getTextureFormatInfo(format);
+	const float						clampMin			= (float)(-MAX_INTEGER_VALUE);
+	const float						clampMax			= (float)(MAX_INTEGER_VALUE);
+	const Vec4						valueMax			(de::clamp(textureInfo.valueMax[0], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMax[1], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMax[2], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMax[3], clampMin, clampMax));
+
+	const Vec4						valueMin			(de::clamp(textureInfo.valueMin[0], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMin[1], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMin[2], clampMin, clampMax),
+														 de::clamp(textureInfo.valueMin[3], clampMin, clampMax));
+	const BVec4						channelMask			= tcu::getTextureFormatChannelMask(format);
+
+	IVec4						formatBitDepths = tcu::getTextureFormatBitDepth(format);
+	Vec4						threshold = Vec4(1.0f) / Vec4((float)(1 << formatBitDepths.x()),
+																(float)(1 << formatBitDepths.y()),
+																(float)(1 << formatBitDepths.z()),
+																(float)(1 << formatBitDepths.w()));
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+		{
+			for (int y = posA.y(); y <= posB.y(); y++)
+			for (int x = posA.x(); x <= posB.x(); x++)
+			{
+				PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+				if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED)
+				{
+					const Vec4	minDiff		= Vec4(1.0f) / (IVec4(1) << tcu::getTextureFormatMantissaBitDepth(format)).cast<float>();
+					const Vec4	minUvs		= computeUvs(posA, posB, IVec2(x-1, y-1));
+					const Vec4	maxUvs		= computeUvs(posA, posB, IVec2(x+1, y+1));
+					const bool	softCheck	= std::abs(x - posA.x()) <= 1 || std::abs(x - posB.x()) <= 1
+											|| std::abs(y - posA.y()) <= 1 || std::abs(y - posB.y()) <= 1;
+
+					const Vec4	resColor	(result.getPixel(x, y));
+
+					Vec4	minRefColor	= srgb ? tcu::linearToSRGB(valueMax * minUvs + valueMin * (Vec4(1.0f) - minUvs))
+											 : valueMax * minUvs + valueMin * (Vec4(1.0f) - minUvs) - threshold;
+					Vec4	maxRefColor	= srgb ? tcu::linearToSRGB(valueMax * maxUvs + valueMin * (Vec4(1.0f) - maxUvs))
+											 : valueMax * maxUvs + valueMin * (Vec4(1.0f) - maxUvs) + threshold;
+
+					// Take into account rounding and quantization
+					if (channelClass == tcu::TEXTURECHANNELCLASS_FLOATING_POINT)
+					{
+						minRefColor = tcu::min(minRefColor * (Vec4(1.0f) - minDiff), minRefColor * (Vec4(1.0f) + minDiff));
+						maxRefColor = tcu::max(maxRefColor * (Vec4(1.0f) - minDiff), maxRefColor * (Vec4(1.0f) + minDiff));
+					}
+					else
+					{
+						minRefColor = minRefColor - minDiff;
+						maxRefColor = maxRefColor + minDiff;
+					}
+
+					DE_ASSERT(minRefColor[0] <= maxRefColor[0]);
+					DE_ASSERT(minRefColor[1] <= maxRefColor[1]);
+					DE_ASSERT(minRefColor[2] <= maxRefColor[2]);
+					DE_ASSERT(minRefColor[3] <= maxRefColor[3]);
+
+					if (tcu::anyNotEqual(tcu::logicalAnd(
+											tcu::logicalAnd(greaterThanEqual(resColor, minRefColor),
+															lessThanEqual(resColor, maxRefColor)),
+											channelMask), channelMask))
+					{
+						if (!softCheck)
+							pixelStatus.setColorStatus(PixelStatus::STATUS_FAIL);
+					}
+					else
+						pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+				}
+			}
+
+			break;
+		}
+
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER:
+		{
+			for (int y = posA.y(); y <= posB.y(); y++)
+			for (int x = posA.x(); x <= posB.x(); x++)
+			{
+				PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+				if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED)
+				{
+					const Vec4	minUvs			= computeUvs(posA, posB, IVec2(x-1, y-1));
+					const Vec4	maxUvs			= computeUvs(posA, posB, IVec2(x+1, y+1));
+					const bool	softCheck		= std::abs(x - posA.x()) <= 1 || std::abs(x - posB.x()) <= 1
+												|| std::abs(y - posA.y()) <= 1 || std::abs(y - posB.y()) <= 1;
+
+					const UVec4	resColor		(result.getPixelUint(x, y));
+
+					const Vec4	minRefColorF	= valueMax * minUvs + valueMin * (Vec4(1.0f) - minUvs);
+					const Vec4	maxRefColorF	= valueMax * maxUvs + valueMin * (Vec4(1.0f) - maxUvs);
+
+					const UVec4	minRefColor		(minRefColorF.asUint());
+					const UVec4	maxRefColor		(maxRefColorF.asUint());
+
+					DE_ASSERT(minRefColor[0] <= maxRefColor[0]);
+					DE_ASSERT(minRefColor[1] <= maxRefColor[1]);
+					DE_ASSERT(minRefColor[2] <= maxRefColor[2]);
+					DE_ASSERT(minRefColor[3] <= maxRefColor[3]);
+
+					if (tcu::anyNotEqual(tcu::logicalAnd(
+											tcu::logicalAnd(greaterThanEqual(resColor, minRefColor),
+															lessThanEqual(resColor, maxRefColor)),
+											channelMask), channelMask))
+					{
+						if (!softCheck)
+							pixelStatus.setColorStatus(PixelStatus::STATUS_FAIL);
+					}
+					else
+						pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+				}
+			}
+
+			break;
+		}
+
+		case tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER:
+		{
+			for (int y = posA.y(); y <= posB.y(); y++)
+			for (int x = posA.x(); x <= posB.x(); x++)
+			{
+				PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+				if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED)
+				{
+					const Vec4	minUvs			= computeUvs(posA, posB, IVec2(x-1, y-1));
+					const Vec4	maxUvs			= computeUvs(posA, posB, IVec2(x+1, y+1));
+					const bool	softCheck		= std::abs(x - posA.x()) <= 1 || std::abs(x - posB.x()) <= 1
+												|| std::abs(y - posA.y()) <= 1 || std::abs(y - posB.y()) <= 1;
+
+					const IVec4	resColor		(result.getPixelInt(x, y));
+
+					const Vec4	minRefColorF	= valueMax * minUvs + valueMin * (Vec4(1.0f) - minUvs);
+					const Vec4	maxRefColorF	= valueMax * maxUvs + valueMin * (Vec4(1.0f) - maxUvs);
+
+					const IVec4	minRefColor		(minRefColorF.asInt());
+					const IVec4	maxRefColor		(maxRefColorF.asInt());
+
+					DE_ASSERT(minRefColor[0] <= maxRefColor[0]);
+					DE_ASSERT(minRefColor[1] <= maxRefColor[1]);
+					DE_ASSERT(minRefColor[2] <= maxRefColor[2]);
+					DE_ASSERT(minRefColor[3] <= maxRefColor[3]);
+
+					if (tcu::anyNotEqual(tcu::logicalAnd(
+											tcu::logicalAnd(greaterThanEqual(resColor, minRefColor),
+															lessThanEqual(resColor, maxRefColor)),
+											channelMask), channelMask))
+					{
+						if (!softCheck)
+							pixelStatus.setColorStatus(PixelStatus::STATUS_FAIL);
+					}
+					else
+						pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+				}
+			}
+
+			break;
+		}
+
+		default:
+			DE_FATAL("Invalid channel class");
+	}
+}
+
+void checkColorClear (const ConstPixelBufferAccess&	result,
+					  const UVec2&					offset,
+					  const UVec2&					size,
+					  vector<PixelStatus>&			status,
+					  const VkClearColorValue&		color)
+{
+	DE_ASSERT(offset.x() + size.x() <= (deUint32)result.getWidth());
+	DE_ASSERT(offset.y() + size.y() <= (deUint32)result.getHeight());
+
+	DE_ASSERT(result.getWidth() * result.getHeight() == (int)status.size());
+
+	for (int y = offset.y(); y < (int)(offset.y() + size.y()); y++)
+	for (int x = offset.x(); x < (int)(offset.x() + size.x()); x++)
+	{
+		PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+		DE_ASSERT(x + y * result.getWidth() < (int)status.size());
+
+		if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED)
+		{
+			if (comparePixelToColorClearValue(result, x, y, color))
+				pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+			else
+				pixelStatus.setColorStatus(PixelStatus::STATUS_FAIL);
+		}
+	}
+}
+
+void checkDepthClear (const ConstPixelBufferAccess&	result,
+					  const UVec2&					offset,
+					  const UVec2&					size,
+					  vector<PixelStatus>&			status,
+					  float							depth)
+{
+	for (int y = offset.y(); y < (int)(offset.y() + size.y()); y++)
+	for (int x = offset.x(); x < (int)(offset.x() + size.x()); x++)
+	{
+		PixelStatus&	pixelStatus	= status[x + y * result.getWidth()];
+
+		if (pixelStatus.getDepthStatus() == PixelStatus::STATUS_UNDEFINED)
+		{
+			if (comparePixelToDepthClearValue(result, x, y, depth))
+				pixelStatus.setDepthStatus(PixelStatus::STATUS_OK);
+			else
+				pixelStatus.setDepthStatus(PixelStatus::STATUS_FAIL);
+		}
+	}
+}
+
+void checkStencilClear (const ConstPixelBufferAccess&	result,
+						const UVec2&					offset,
+						const UVec2&					size,
+						vector<PixelStatus>&			status,
+						deUint32						stencil)
+{
+	for (int y = offset.y(); y < (int)(offset.y() + size.y()); y++)
+	for (int x = offset.x(); x < (int)(offset.x() + size.x()); x++)
+	{
+		PixelStatus&	pixelStatus	= status[x + y * result.getWidth()];
+
+		if (pixelStatus.getStencilStatus() == PixelStatus::STATUS_UNDEFINED)
+		{
+			if (comparePixelToStencilClearValue(result, x, y, stencil))
+				pixelStatus.setStencilStatus(PixelStatus::STATUS_OK);
+			else
+				pixelStatus.setStencilStatus(PixelStatus::STATUS_FAIL);
+		}
+	}
+}
+
+bool verifyAttachment (const ConstPixelBufferAccess&		result,
+					   const Maybe<ConstPixelBufferAccess>&	secondaryResult,
+					   const RenderPass&					renderPassInfo,
+					   const Maybe<VkClearValue>&			renderPassClearValue,
+					   const Maybe<VkClearValue>&			imageClearValue,
+					   const vector<Subpass>&				subpasses,
+					   const vector<SubpassRenderInfo>&		subpassRenderInfo,
+					   const PixelBufferAccess&				errorImage,
+					   deUint32								attachmentIndex,
+					   const UVec2&							renderPos,
+					   const UVec2&							renderSize)
+{
+	const tcu::TextureFormat&		format				= result.getFormat();
+	const bool						hasDepth			= tcu::hasDepthComponent(format.order);
+	const bool						hasStencil			= tcu::hasStencilComponent(format.order);
+	const bool						isColorFormat		= !hasDepth && !hasStencil;
+	const PixelStatus				initialStatus		(isColorFormat ? PixelStatus::STATUS_UNDEFINED : PixelStatus::STATUS_OK,
+														 hasDepth ? PixelStatus::STATUS_UNDEFINED : PixelStatus::STATUS_OK,
+														 hasStencil ? PixelStatus::STATUS_UNDEFINED : PixelStatus::STATUS_OK);
+
+	bool							attachmentIsUsed	= false;
+	vector<PixelStatus>				status				(result.getWidth() * result.getHeight(), initialStatus);
+	tcu::clear(errorImage, Vec4(0.0f, 1.0f, 0.0f, 1.0f));
+
+	// Check if attachment is used
+	for (int subpassNdx = 0; subpassNdx < (int)subpasses.size(); subpassNdx++)
+	{
+		const Subpass&			subpass			= subpasses[subpassNdx];
+		const Maybe<deUint32>	attachmentNdx	= findColorAttachment(subpass, attachmentIndex);
+
+		if (attachmentNdx || subpass.getDepthStencilAttachment().getAttachment() == attachmentIndex)
+			attachmentIsUsed = true;
+	}
+
+	// Set all pixels that have undefined values to OK
+	if (attachmentIsUsed && (((isColorFormat || hasDepth) && renderPassInfo.getAttachments()[attachmentIndex].getStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)
+							|| (hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)))
+	{
+		for(int y = renderPos.y(); y < (int)(renderPos.y() + renderSize.y()); y++)
+		for(int x = renderPos.x(); x < (int)(renderPos.x() + renderSize.x()); x++)
+		{
+			PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+			if (isColorFormat && renderPassInfo.getAttachments()[attachmentIndex].getStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)
+				pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+			else
+			{
+				if (hasDepth && renderPassInfo.getAttachments()[attachmentIndex].getStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)
+					pixelStatus.setDepthStatus(PixelStatus::STATUS_OK);
+
+				if (hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilStoreOp() == VK_ATTACHMENT_STORE_OP_DONT_CARE)
+					pixelStatus.setStencilStatus(PixelStatus::STATUS_OK);
+			}
+		}
+	}
+
+	// Check renderpass rendering results
+	if (renderPassInfo.getAttachments()[attachmentIndex].getStoreOp() == VK_ATTACHMENT_STORE_OP_STORE
+		|| (hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilStoreOp() == VK_ATTACHMENT_STORE_OP_STORE))
+	{
+		// Check subpass rendering results
+		for (int subpassNdx = (int)subpasses.size() - 1; subpassNdx >= 0; subpassNdx--)
+		{
+			const Subpass&				subpass			= subpasses[subpassNdx];
+			const SubpassRenderInfo&	renderInfo		= subpassRenderInfo[subpassNdx];
+			const Maybe<deUint32>		attachmentNdx	= findColorAttachment(subpass, attachmentIndex);
+
+			// Check rendered quad
+			if (renderInfo.getRenderQuad() && (attachmentNdx || subpass.getDepthStencilAttachment().getAttachment() == attachmentIndex))
+			{
+				const RenderQuad&	renderQuad	= *renderInfo.getRenderQuad();
+				const Vec4			posA		= renderQuad.getCornerA();
+				const Vec4			posB		= renderQuad.getCornerB();
+				const Vec2			origin		= Vec2((float)renderInfo.getViewportOffset().x(), (float)renderInfo.getViewportOffset().y()) + Vec2((float)renderInfo.getViewportSize().x(), (float)renderInfo.getViewportSize().y()) / Vec2(2.0f);
+				const Vec2			p			= Vec2((float)renderInfo.getViewportSize().x(), (float)renderInfo.getViewportSize().y()) / Vec2(2.0f);
+				const IVec2			posAI		((deInt32)(origin.x() + (p.x() * posA.x())),
+												 (deInt32)(origin.y() + (p.y() * posA.y())));
+				const IVec2			posBI		((deInt32)(origin.x() + (p.x() * posB.x())),
+												 (deInt32)(origin.y() + (p.y() * posB.y())));
+
+				if (isColorFormat)
+					checkColorRenderQuad(result, posAI, posBI, status);
+				else
+				{
+					if (hasDepth)
+						checkDepthRenderQuad(result, posAI, posBI, status);
+
+					if (hasDepth && hasStencil)
+						checkStencilRenderQuad(*secondaryResult, posAI, posBI, status);
+					else if (hasStencil)
+						checkStencilRenderQuad(result, posAI, posBI, status);
+				}
+			}
+
+			// Check color attachment clears
+			if (attachmentNdx && !renderInfo.getColorClears().empty())
+			{
+				const ColorClear& clear = renderInfo.getColorClears()[*attachmentNdx];
+
+				checkColorClear(result, clear.getOffset(), clear.getSize(), status, clear.getColor());
+			}
+
+			// Check depth/stencil attachment clears
+			if (subpass.getDepthStencilAttachment().getAttachment() == attachmentIndex && renderInfo.getDepthStencilClear())
+			{
+				const DepthStencilClear clear = *renderInfo.getDepthStencilClear();
+
+				if (hasDepth)
+					checkDepthClear(result, clear.getOffset(), clear.getSize(), status, clear.getDepth());
+
+				if (hasDepth && hasStencil)
+					checkStencilClear(*secondaryResult, clear.getOffset(), clear.getSize(), status, clear.getStencil());
+				else if (hasStencil)
+					checkStencilClear(result, clear.getOffset(), clear.getSize(), status, clear.getStencil());
+			}
+		}
+
+		// Check renderpas clear results
+		if (attachmentIsUsed && renderPassClearValue)
+		{
+			if (isColorFormat)
+			{
+				if (renderPassInfo.getAttachments()[attachmentIndex].getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+					checkColorClear(result, renderPos, renderSize, status, renderPassClearValue->color);
+			}
+			else
+			{
+				if (hasDepth && renderPassInfo.getAttachments()[attachmentIndex].getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+					checkDepthClear(result, renderPos, renderSize, status, renderPassClearValue->depthStencil.depth);
+
+				if (hasDepth && hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+					checkStencilClear(*secondaryResult, renderPos, renderSize, status, renderPassClearValue->depthStencil.stencil);
+				else if (hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+					checkStencilClear(result, renderPos, renderSize, status, renderPassClearValue->depthStencil.stencil);
+			}
+		}
+	}
+
+	// Set all pixels that have undefined values fater renderpass to OK
+	if (attachmentIsUsed && (((isColorFormat || hasDepth) && renderPassInfo.getAttachments()[attachmentIndex].getLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+							|| (hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)))
+	{
+		for(int y = renderPos.y(); y < (int)(renderPos.y() + renderSize.y()); y++)
+		for(int x = renderPos.x(); x < (int)(renderPos.x() + renderSize.x()); x++)
+		{
+			PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+			if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED
+				&& isColorFormat && renderPassInfo.getAttachments()[attachmentIndex].getLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+				pixelStatus.setColorStatus(PixelStatus::STATUS_OK);
+			else
+			{
+				if (pixelStatus.getDepthStatus() == PixelStatus::STATUS_UNDEFINED
+					&& hasDepth && renderPassInfo.getAttachments()[attachmentIndex].getLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+					pixelStatus.setDepthStatus(PixelStatus::STATUS_OK);
+
+				if (pixelStatus.getStencilStatus() == PixelStatus::STATUS_UNDEFINED
+					&& hasStencil && renderPassInfo.getAttachments()[attachmentIndex].getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_DONT_CARE)
+					pixelStatus.setStencilStatus(PixelStatus::STATUS_OK);
+			}
+		}
+	}
+
+	if (imageClearValue)
+	{
+		if (isColorFormat)
+			checkColorClear(result, UVec2(0, 0), UVec2(result.getWidth(), result.getHeight()), status, imageClearValue->color);
+		else
+		{
+			if (hasDepth)
+				checkDepthClear(result, UVec2(0, 0), UVec2(result.getWidth(), result.getHeight()), status, imageClearValue->depthStencil.depth);
+
+			if (hasDepth && hasStencil)
+				checkStencilClear(*secondaryResult, UVec2(0, 0), UVec2(secondaryResult->getWidth(), result.getHeight()), status, imageClearValue->depthStencil.stencil);
+			else if (hasStencil)
+				checkStencilClear(result, UVec2(0, 0), UVec2(result.getWidth(), result.getHeight()), status, imageClearValue->depthStencil.stencil);
+		}
+	}
+
+	{
+		bool isOk = true;
+
+		for(int y = 0; y < result.getHeight(); y++)
+		for(int x = 0; x < result.getWidth(); x++)
+		{
+			const PixelStatus& pixelStatus = status[x + y * result.getWidth()];
+
+			if (isColorFormat)
+			{
+				if (pixelStatus.getColorStatus() != PixelStatus::STATUS_OK)
+				{
+					if (pixelStatus.getColorStatus() == PixelStatus::STATUS_UNDEFINED)
+						errorImage.setPixel(Vec4(1.0f, 1.0f, 0.0f, 1.0f), x, y);
+					else if (pixelStatus.getColorStatus() == PixelStatus::STATUS_FAIL)
+						errorImage.setPixel(Vec4(1.0f, 0.0f, 0.0f, 1.0f), x, y);
+
+					isOk = false;
+				}
+			}
+			else
+			{
+				if (hasDepth && pixelStatus.getDepthStatus() != PixelStatus::STATUS_OK)
+				{
+					errorImage.setPixel(Vec4(1.0f, 0.0f, 0.0f, 1.0f), x, y);
+					isOk = false;
+				}
+
+				if (hasStencil && pixelStatus.getStencilStatus() != PixelStatus::STATUS_OK)
+				{
+					errorImage.setPixel(Vec4(1.0f, 0.0f, 0.0f, 1.0f), x, y);
+					isOk = false;
+				}
+			}
+		}
+
+		return isOk;
+	}
+}
+
+bool logAndVerifyImages (TestLog&											log,
+						 const DeviceInterface&								vk,
+						 VkDevice											device,
+						 const vector<de::SharedPtr<AttachmentResources> >&	attachmentResources,
+						 const vector<bool>&								attachmentIsLazy,
+						 const RenderPass&									renderPassInfo,
+						 const vector<Maybe<VkClearValue> >&				renderPassClearValues,
+						 const vector<Maybe<VkClearValue> >&				imageClearValues,
+						 const vector<SubpassRenderInfo>&					subpassRenderInfo,
+						 const UVec2&										targetSize,
+						 const TestConfig&									config)
+{
+	vector<tcu::TextureLevel>	referenceAttachments;
+	bool						isOk					= true;
+
+	log << TestLog::Message << "Reference images fill undefined pixels with grid pattern." << TestLog::EndMessage;
+
+	renderReferenceImages(referenceAttachments, renderPassInfo, targetSize, imageClearValues, renderPassClearValues, subpassRenderInfo, config.renderPos, config.renderSize);
+
+	for (size_t attachmentNdx = 0; attachmentNdx < renderPassInfo.getAttachments().size(); attachmentNdx++)
+	{
+		if (!attachmentIsLazy[attachmentNdx])
+		{
+			const Attachment			attachment		= renderPassInfo.getAttachments()[attachmentNdx];
+			const tcu::TextureFormat	format			= mapVkFormat(attachment.getFormat());
+
+			if (tcu::hasDepthComponent(format.order) && tcu::hasStencilComponent(format.order))
+			{
+				const tcu::TextureFormat	depthFormat		= getDepthCopyFormat(attachment.getFormat());
+				const VkDeviceSize			depthBufferSize	= targetSize.x() * targetSize.y() * depthFormat.getPixelSize();
+				void* const					depthPtr		= attachmentResources[attachmentNdx]->getResultMemory().getHostPtr();
+
+				const tcu::TextureFormat	stencilFormat		= getStencilCopyFormat(attachment.getFormat());
+				const VkDeviceSize			stencilBufferSize	= targetSize.x() * targetSize.y() * stencilFormat.getPixelSize();
+				void* const					stencilPtr			= attachmentResources[attachmentNdx]->getSecondaryResultMemory().getHostPtr();
+
+				const VkMappedMemoryRange	ranges[] =
+				{
+					{
+						VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,								// sType;
+						DE_NULL,															// pNext;
+						attachmentResources[attachmentNdx]->getResultMemory().getMemory(),	// mem;
+						attachmentResources[attachmentNdx]->getResultMemory().getOffset(),	// offset;
+						depthBufferSize														// size;
+					},
+					{
+						VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,										// sType;
+						DE_NULL,																	// pNext;
+						attachmentResources[attachmentNdx]->getSecondaryResultMemory().getMemory(),	// mem;
+						attachmentResources[attachmentNdx]->getSecondaryResultMemory().getOffset(),	// offset;
+						stencilBufferSize															// size;
+					}
+				};
+				VK_CHECK(vk.invalidateMappedMemoryRanges(device, 2u, ranges));
+
+				{
+					const ConstPixelBufferAccess	depthAccess		(depthFormat, targetSize.x(), targetSize.y(), 1, depthPtr);
+					const ConstPixelBufferAccess	stencilAccess	(stencilFormat, targetSize.x(), targetSize.y(), 1, stencilPtr);
+					tcu::TextureLevel				errorImage		(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8), targetSize.x(), targetSize.y());
+
+					log << TestLog::Image("Attachment" + de::toString(attachmentNdx) + "Depth", "Attachment " + de::toString(attachmentNdx) + " Depth", depthAccess);
+					log << TestLog::Image("Attachment" + de::toString(attachmentNdx) + "Stencil", "Attachment " + de::toString(attachmentNdx) + " Stencil", stencilAccess);
+
+					log << TestLog::Image("AttachmentReference" + de::toString(attachmentNdx), "Attachment reference " + de::toString(attachmentNdx), referenceAttachments[attachmentNdx].getAccess());
+
+					if ((renderPassInfo.getAttachments()[attachmentNdx].getStoreOp() == VK_ATTACHMENT_STORE_OP_STORE || renderPassInfo.getAttachments()[attachmentNdx].getStencilStoreOp() == VK_ATTACHMENT_STORE_OP_STORE)
+						&& !verifyAttachment(depthAccess, tcu::just(stencilAccess), renderPassInfo, renderPassClearValues[attachmentNdx], imageClearValues[attachmentNdx], renderPassInfo.getSubpasses(), subpassRenderInfo, errorImage.getAccess(), (deUint32)attachmentNdx, config.renderPos, config.renderSize))
+					{
+						log << TestLog::Image("AttachmentError" + de::toString(attachmentNdx), "Attachment Error " + de::toString(attachmentNdx), errorImage.getAccess());
+						isOk = false;
+					}
+				}
+			}
+			else
+			{
+				const VkDeviceSize			bufferSize	= targetSize.x() * targetSize.y() * format.getPixelSize();
+				void* const					ptr			= attachmentResources[attachmentNdx]->getResultMemory().getHostPtr();
+
+				const VkMappedMemoryRange	range	=
+				{
+					VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,								// sType;
+					DE_NULL,															// pNext;
+					attachmentResources[attachmentNdx]->getResultMemory().getMemory(),	// mem;
+					attachmentResources[attachmentNdx]->getResultMemory().getOffset(),	// offset;
+					bufferSize															// size;
+				};
+				VK_CHECK(vk.invalidateMappedMemoryRanges(device, 1u, &range));
+
+				{
+					const ConstPixelBufferAccess	access		(format, targetSize.x(), targetSize.y(), 1, ptr);
+					tcu::TextureLevel				errorImage	(tcu::TextureFormat(tcu::TextureFormat::RGBA, tcu::TextureFormat::UNORM_INT8), targetSize.x(), targetSize.y());
+
+					log << TestLog::Image("Attachment" + de::toString(attachmentNdx), "Attachment " + de::toString(attachmentNdx), access);
+					log << TestLog::Image("AttachmentReference" + de::toString(attachmentNdx), "Attachment reference " + de::toString(attachmentNdx), referenceAttachments[attachmentNdx].getAccess());
+
+					if ((renderPassInfo.getAttachments()[attachmentNdx].getStoreOp() == VK_ATTACHMENT_STORE_OP_STORE || renderPassInfo.getAttachments()[attachmentNdx].getStencilStoreOp() == VK_ATTACHMENT_STORE_OP_STORE)
+						&& !verifyAttachment(access, tcu::nothing<ConstPixelBufferAccess>(), renderPassInfo, renderPassClearValues[attachmentNdx], imageClearValues[attachmentNdx], renderPassInfo.getSubpasses(), subpassRenderInfo, errorImage.getAccess(), (deUint32)attachmentNdx, config.renderPos, config.renderSize))
+					{
+						log << TestLog::Image("AttachmentError" + de::toString(attachmentNdx), "Attachment Error " + de::toString(attachmentNdx), errorImage.getAccess());
+						isOk = false;
+					}
+				}
+			}
+		}
+	}
+
+	return isOk;
+}
+
+std::string getAttachmentType (VkFormat vkFormat)
+{
+	const tcu::TextureFormat		format			= mapVkFormat(vkFormat);
+	const tcu::TextureChannelClass	channelClass	= tcu::getTextureChannelClass(format.type);
+
+	switch (channelClass)
+	{
+		case tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER:
+			return "ivec4";
+
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER:
+			return "uvec4";
+
+		case tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT:
+		case tcu::TEXTURECHANNELCLASS_FLOATING_POINT:
+			return "vec4";
+
+		default:
+			DE_FATAL("Unknown channel class");
+			return "";
+	}
+}
+
+void createTestShaders (SourceCollections& dst, TestConfig config)
+{
+	if (config.renderTypes & TestConfig::RENDERTYPES_DRAW)
+	{
+		const vector<Subpass>&	subpasses	= config.renderPass.getSubpasses();
+
+		for (size_t subpassNdx = 0; subpassNdx < subpasses.size(); subpassNdx++)
+		{
+			const Subpass&		subpass		= subpasses[subpassNdx];
+			std::ostringstream	vertexShader;
+			std::ostringstream	fragmentShader;
+
+			vertexShader << "#version 310 es\n"
+						 << "layout(location = 0) in highp vec4 a_position;\n"
+						 << "layout(location = 0) out highp vec2 v_color;\n"
+						 << "void main (void) {\n"
+						 << "\thighp float a = 0.5 + a_position.x;\n"
+						 << "\thighp float b = 0.5 + a_position.y;\n"
+						 << "\tv_color = vec2(a, b);\n"
+						 << "\tgl_Position = a_position;\n"
+						 << "}\n";
+
+			fragmentShader << "#version 310 es\n"
+						   << "layout(location = 0) in highp vec2 v_color;\n";
+
+			for (size_t attachmentNdx = 0; attachmentNdx < subpass.getColorAttachments().size(); attachmentNdx++)
+			{
+				const std::string attachmentType = getAttachmentType(config.renderPass.getAttachments()[subpass.getColorAttachments()[attachmentNdx].getAttachment()].getFormat());
+				fragmentShader << "layout(location = " << attachmentNdx << ") out highp " << attachmentType << " o_color" << attachmentNdx << ";\n";
+			}
+
+			fragmentShader	<< "void main (void) {\n"
+							<< "\thighp vec4 scale = vec4(v_color.x, v_color.y, v_color.x * v_color.y, (v_color.x + v_color.y) / 2.0);\n";
+
+			for (size_t attachmentNdx = 0; attachmentNdx < subpass.getColorAttachments().size(); attachmentNdx++)
+			{
+				const tcu::TextureFormat		format			= mapVkFormat(config.renderPass.getAttachments()[subpass.getColorAttachments()[attachmentNdx].getAttachment()].getFormat());
+				const tcu::TextureFormatInfo	formatInfo		= tcu::getTextureFormatInfo(format);
+				const float						clampMin		= (float)(-MAX_INTEGER_VALUE);
+				const float						clampMax		= (float)(MAX_INTEGER_VALUE);
+				const Vec4						valueMax		(de::clamp(formatInfo.valueMax[0], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMax[1], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMax[2], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMax[3], clampMin, clampMax));
+
+				const Vec4						valueMin		(de::clamp(formatInfo.valueMin[0], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMin[1], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMin[2], clampMin, clampMax),
+																 de::clamp(formatInfo.valueMin[3], clampMin, clampMax));
+				const std::string				attachmentType	= getAttachmentType(config.renderPass.getAttachments()[subpass.getColorAttachments()[attachmentNdx].getAttachment()].getFormat());
+
+				fragmentShader << "\to_color" << attachmentNdx << " = " << attachmentType << "(vec4" << valueMin << " + vec4" << (valueMax - valueMin)  << " * scale);\n";
+			}
+
+			fragmentShader << "}\n";
+
+			dst.glslSources.add(de::toString(subpassNdx) + "-vert") << glu::VertexSource(vertexShader.str());
+			dst.glslSources.add(de::toString(subpassNdx) + "-frag") << glu::FragmentSource(fragmentShader.str());
+		}
+	}
+}
+
+void initializeAttachmentIsLazy (vector<bool>& attachmentIsLazy, const vector<Attachment>& attachments, TestConfig::ImageMemory imageMemory)
+{
+	bool lastAttachmentWasLazy = false;
+
+	for (size_t attachmentNdx = 0; attachmentNdx < attachments.size(); attachmentNdx++)
+	{
+		if (attachments[attachmentNdx].getLoadOp() != VK_ATTACHMENT_LOAD_OP_LOAD
+			&& attachments[attachmentNdx].getStoreOp() != VK_ATTACHMENT_STORE_OP_STORE
+			&& attachments[attachmentNdx].getStencilLoadOp() != VK_ATTACHMENT_LOAD_OP_LOAD
+			&& attachments[attachmentNdx].getStencilStoreOp() != VK_ATTACHMENT_STORE_OP_STORE)
+		{
+			if (imageMemory == TestConfig::IMAGEMEMORY_LAZY || (imageMemory & TestConfig::IMAGEMEMORY_LAZY && !lastAttachmentWasLazy))
+			{
+				attachmentIsLazy.push_back(true);
+				lastAttachmentWasLazy = true;
+			}
+			else if (imageMemory & TestConfig::IMAGEMEMORY_STRICT)
+			{
+				attachmentIsLazy.push_back(false);
+				lastAttachmentWasLazy = false;
+			}
+			else
+				DE_FATAL("Unknown imageMemory");
+		}
+		else
+			attachmentIsLazy.push_back(false);
+	}
+}
+
+void initializeSubpassIsSecondary (vector<bool>& subpassIsSecondary, const vector<Subpass>& subpasses, TestConfig::CommandBufferTypes commandBuffer)
+{
+	bool lastSubpassWasSecondary = false;
+
+	for (size_t subpassNdx = 0; subpassNdx < subpasses.size(); subpassNdx++)
+	{
+		if (commandBuffer == TestConfig::COMMANDBUFFERTYPES_SECONDARY || (commandBuffer & TestConfig::COMMANDBUFFERTYPES_SECONDARY && !lastSubpassWasSecondary))
+		{
+			subpassIsSecondary.push_back(true);
+			lastSubpassWasSecondary = true;
+		}
+		else if (commandBuffer & TestConfig::COMMANDBUFFERTYPES_INLINE)
+		{
+			subpassIsSecondary.push_back(false);
+			lastSubpassWasSecondary = false;
+		}
+		else
+			DE_FATAL("Unknown commandBuffer");
+	}
+}
+
+void initializeImageClearValues (de::Random& rng, vector<Maybe<VkClearValue> >& clearValues, const vector<Attachment>& attachments, const vector<bool>& isLazy)
+{
+	for (size_t attachmentNdx = 0; attachmentNdx < attachments.size(); attachmentNdx++)
+	{
+		if (!isLazy[attachmentNdx])
+			clearValues.push_back(just(randomClearValue(attachments[attachmentNdx], rng)));
+		else
+			clearValues.push_back(nothing<VkClearValue>());
+	}
+}
+
+void initializeRenderPassClearValues (de::Random& rng, vector<Maybe<VkClearValue> >& clearValues, const vector<Attachment>& attachments)
+{
+	for (size_t attachmentNdx = 0; attachmentNdx < attachments.size(); attachmentNdx++)
+	{
+		if (attachments[attachmentNdx].getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR
+			|| attachments[attachmentNdx].getStencilLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR)
+		{
+			clearValues.push_back(just(randomClearValue(attachments[attachmentNdx], rng)));
+		}
+		else
+			clearValues.push_back(nothing<VkClearValue>());
+	}
+}
+
+void initializeSubpassClearValues (de::Random& rng, vector<vector<VkClearColorValue> >& clearValues, const RenderPass& renderPass)
+{
+	clearValues.resize(renderPass.getSubpasses().size());
+
+	for (size_t subpassNdx = 0; subpassNdx < renderPass.getSubpasses().size(); subpassNdx++)
+	{
+		const Subpass&						subpass				= renderPass.getSubpasses()[subpassNdx];
+		const vector<AttachmentReference>&	colorAttachments	= subpass.getColorAttachments();
+
+		clearValues[subpassNdx].resize(colorAttachments.size());
+
+		for (size_t attachmentRefNdx = 0; attachmentRefNdx < colorAttachments.size(); attachmentRefNdx++)
+		{
+			const AttachmentReference&	attachmentRef	= colorAttachments[attachmentRefNdx];
+			const Attachment&			attachment		= renderPass.getAttachments()[attachmentRef.getAttachment()];
+
+			clearValues[subpassNdx][attachmentRefNdx] = randomColorClearValue(attachment, rng);
+		}
+	}
+}
+
+void logSubpassRenderInfo (TestLog&					log,
+						   const SubpassRenderInfo&	info)
+{
+	log << TestLog::Message << "Viewport, offset: " << info.getViewportOffset() << ", size: " << info.getViewportSize() << TestLog::EndMessage;
+
+	if (info.isSecondary())
+		log << TestLog::Message << "Subpass uses secondary command buffers" << TestLog::EndMessage;
+	else
+		log << TestLog::Message << "Subpass uses inlined commands" << TestLog::EndMessage;
+
+	for (deUint32 attachmentNdx = 0; attachmentNdx < info.getColorClears().size(); attachmentNdx++)
+	{
+		const ColorClear&	colorClear	= info.getColorClears()[attachmentNdx];
+
+		log << TestLog::Message << "Clearing color attachment " << attachmentNdx
+			<< ". Offset: " << colorClear.getOffset()
+			<< ", Size: " << colorClear.getSize()
+			<< ", Color: " << clearColorToString(info.getColorAttachment(attachmentNdx).getFormat(), colorClear.getColor()) << TestLog::EndMessage;
+	}
+
+	if (info.getDepthStencilClear())
+	{
+		const DepthStencilClear&	depthStencilClear	= *info.getDepthStencilClear();
+
+		log << TestLog::Message << "Clearing depth stencil attachment"
+			<< ". Offset: " << depthStencilClear.getOffset()
+			<< ", Size: " << depthStencilClear.getSize()
+			<< ", Depth: " << depthStencilClear.getDepth()
+			<< ", Stencil: " << depthStencilClear.getStencil() << TestLog::EndMessage;
+	}
+
+	if (info.getRenderQuad())
+	{
+		const RenderQuad&	renderQuad	= *info.getRenderQuad();
+
+		log << TestLog::Message << "Rendering gradient quad to " << renderQuad.getCornerA() << " -> " << renderQuad.getCornerB() << TestLog::EndMessage;
+	}
+}
+
+void logTestCaseInfo (TestLog&									log,
+					  const TestConfig&							config,
+					  const vector<bool>&						attachmentIsLazy,
+					  const vector<Maybe<VkClearValue> >&		imageClearValues,
+					  const vector<Maybe<VkClearValue> >&		renderPassClearValues,
+					  const vector<SubpassRenderInfo>&			subpassRenderInfo)
+{
+	const RenderPass&	renderPass	= config.renderPass;
+
+	logRenderPassInfo(log, renderPass);
+
+	DE_ASSERT(attachmentIsLazy.size() == renderPass.getAttachments().size());
+	DE_ASSERT(imageClearValues.size() == renderPass.getAttachments().size());
+	DE_ASSERT(renderPassClearValues.size() == renderPass.getAttachments().size());
+
+	log << TestLog::Message << "TargetSize: " << config.targetSize << TestLog::EndMessage;
+	log << TestLog::Message << "Render area, Offset: " << config.renderPos << ", Size: " << config.renderSize << TestLog::EndMessage;
+
+	for (size_t attachmentNdx = 0; attachmentNdx < attachmentIsLazy.size(); attachmentNdx++)
+	{
+		const tcu::ScopedLogSection	section	(log, "Attachment" + de::toString(attachmentNdx), "Attachment " + de::toString(attachmentNdx));
+
+		if (attachmentIsLazy[attachmentNdx])
+			log << TestLog::Message << "Is lazy." << TestLog::EndMessage;
+
+		if (imageClearValues[attachmentNdx])
+			log << TestLog::Message << "Image is cleared to " << clearValueToString(renderPass.getAttachments()[attachmentNdx].getFormat(), *imageClearValues[attachmentNdx]) << " before rendering." << TestLog::EndMessage;
+
+		if (renderPass.getAttachments()[attachmentNdx].getLoadOp() == VK_ATTACHMENT_LOAD_OP_CLEAR && renderPassClearValues[attachmentNdx])
+			log << TestLog::Message << "Attachment is cleared to " << clearValueToString(renderPass.getAttachments()[attachmentNdx].getFormat(), *renderPassClearValues[attachmentNdx]) << " in the beginning of the render pass." << TestLog::EndMessage;
+	}
+
+	for (size_t subpassNdx = 0; subpassNdx < renderPass.getSubpasses().size(); subpassNdx++)
+	{
+		const tcu::ScopedLogSection section (log, "Subpass" + de::toString(subpassNdx), "Subpass " + de::toString(subpassNdx));
+
+		logSubpassRenderInfo(log, subpassRenderInfo[subpassNdx]);
+	}
+}
+
+void initializeSubpassRenderInfo (vector<SubpassRenderInfo>& renderInfos, de::Random& rng, const RenderPass& renderPass, const TestConfig& config)
+{
+	const TestConfig::CommandBufferTypes	commandBuffer			= config.commandBufferTypes;
+	const vector<Subpass>&					subpasses				= renderPass.getSubpasses();
+	bool									lastSubpassWasSecondary	= false;
+
+	for (deUint32 subpassNdx = 0; subpassNdx < (deUint32)subpasses.size(); subpassNdx++)
+	{
+		const Subpass&				subpass				= subpasses[subpassNdx];
+		const bool					subpassIsSecondary	= commandBuffer == TestConfig::COMMANDBUFFERTYPES_SECONDARY
+														|| (commandBuffer & TestConfig::COMMANDBUFFERTYPES_SECONDARY && !lastSubpassWasSecondary) ? true : false;
+		const UVec2					viewportSize		((config.renderSize * UVec2(2)) / UVec2(3));
+		const UVec2					viewportOffset		(config.renderPos.x() + (subpassNdx % 2) * (config.renderSize.x() / 3),
+														 config.renderPos.y() + ((subpassNdx / 2) % 2) * (config.renderSize.y() / 3));
+
+		vector<ColorClear>			colorClears;
+		Maybe<DepthStencilClear>	depthStencilClear;
+		Maybe<RenderQuad>			renderQuad;
+
+		lastSubpassWasSecondary		= subpassIsSecondary;
+
+		if (config.renderTypes & TestConfig::RENDERTYPES_CLEAR)
+		{
+			const vector<AttachmentReference>&	colorAttachments	= subpass.getColorAttachments();
+
+			for (size_t attachmentRefNdx = 0; attachmentRefNdx < colorAttachments.size(); attachmentRefNdx++)
+			{
+				const AttachmentReference&	attachmentRef	= colorAttachments[attachmentRefNdx];
+				const Attachment&			attachment		= renderPass.getAttachments()[attachmentRef.getAttachment()];
+				const UVec2					size			((viewportSize * UVec2(2)) / UVec2(3));
+				const UVec2					offset			(viewportOffset.x() + ((deUint32)attachmentRefNdx % 2u) * (viewportSize.x() / 3u),
+															 viewportOffset.y() + (((deUint32)attachmentRefNdx / 2u) % 2u) * (viewportSize.y() / 3u));
+				const VkClearColorValue		color			= randomColorClearValue(attachment, rng);
+
+				colorClears.push_back(ColorClear(offset, size, color));
+			}
+
+			if (subpass.getDepthStencilAttachment().getAttachment() != VK_ATTACHMENT_UNUSED)
+			{
+				const Attachment&	attachment		= renderPass.getAttachments()[subpass.getDepthStencilAttachment().getAttachment()];
+				const UVec2			size			((viewportSize * UVec2(2)) / UVec2(3));
+				const UVec2			offset			(viewportOffset.x() + ((deUint32)colorAttachments.size() % 2u) * (viewportSize.x() / 3u),
+													 viewportOffset.y() + (((deUint32)colorAttachments.size() / 2u) % 2u) * (viewportSize.y() / 3u));
+				const VkClearValue	value			= randomClearValue(attachment, rng);
+
+				depthStencilClear = tcu::just(DepthStencilClear(offset, size, value.depthStencil.depth, value.depthStencil.stencil));
+			}
+		}
+
+		if (config.renderTypes & TestConfig::RENDERTYPES_DRAW)
+			renderQuad = tcu::just(RenderQuad(tcu::Vec4(-0.5f, -0.5f, 0.0f, 1.0f), tcu::Vec4(0.5f, 0.5f, 1.0f, 1.0f)));
+
+		renderInfos.push_back(SubpassRenderInfo(renderPass, subpassNdx, subpassIsSecondary, viewportOffset, viewportSize, renderQuad, colorClears, depthStencilClear));
+	}
+}
+
+void checkTextureFormatSupport (TestLog&					log,
+								const InstanceInterface&	vk,
+								VkPhysicalDevice			device,
+								const vector<Attachment>&	attachments)
+{
+	bool supported = true;
+
+	for (size_t attachmentNdx = 0; attachmentNdx < attachments.size(); attachmentNdx++)
+	{
+		const Attachment&			attachment					= attachments[attachmentNdx];
+		const tcu::TextureFormat	format						= mapVkFormat(attachment.getFormat());
+		const bool					isDepthOrStencilAttachment	= hasDepthComponent(format.order) || hasStencilComponent(format.order);
+		const VkFormatFeatureFlags	flags						= isDepthOrStencilAttachment? VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT : VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
+		VkFormatProperties			properties;
+
+		vk.getPhysicalDeviceFormatProperties(device, attachment.getFormat(), &properties);
+
+		if ((properties.optimalTilingFeatures & flags) != flags)
+		{
+			supported = false;
+			log << TestLog::Message << "Format: " << attachment.getFormat() << " not supported as " << (isDepthOrStencilAttachment ? "depth stencil attachment" : "color attachment") << TestLog::EndMessage;
+		}
+	}
+
+	if (!supported)
+		TCU_THROW(NotSupportedError, "Format not supported");
+}
+
+tcu::TestStatus renderPassTest (Context& context, TestConfig config)
+{
+	const UVec2							targetSize			= config.targetSize;
+	const UVec2							renderPos			= config.renderPos;
+	const UVec2							renderSize			= config.renderSize;
+	const RenderPass&					renderPassInfo		= config.renderPass;
+
+	TestLog&							log					= context.getTestContext().getLog();
+	de::Random							rng					(config.seed);
+
+	vector<bool>						attachmentIsLazy;
+	vector<Maybe<VkClearValue> >		imageClearValues;
+	vector<Maybe<VkClearValue> >		renderPassClearValues;
+
+	vector<bool>						subpassIsSecondary;
+	vector<SubpassRenderInfo>			subpassRenderInfo;
+	vector<vector<VkClearColorValue> >	subpassColorClearValues;
+
+	initializeAttachmentIsLazy(attachmentIsLazy, renderPassInfo.getAttachments(), config.imageMemory);
+	initializeImageClearValues(rng, imageClearValues, renderPassInfo.getAttachments(), attachmentIsLazy);
+	initializeRenderPassClearValues(rng, renderPassClearValues, renderPassInfo.getAttachments());
+
+	initializeSubpassIsSecondary(subpassIsSecondary, renderPassInfo.getSubpasses(), config.commandBufferTypes);
+	initializeSubpassClearValues(rng, subpassColorClearValues, renderPassInfo);
+	initializeSubpassRenderInfo(subpassRenderInfo, rng, renderPassInfo, config);
+
+	logTestCaseInfo(log, config, attachmentIsLazy, imageClearValues, renderPassClearValues, subpassRenderInfo);
+
+	checkTextureFormatSupport(log, context.getInstanceInterface(), context.getPhysicalDevice(), config.renderPass.getAttachments());
+
+	{
+		const vk::VkPhysicalDeviceProperties properties = vk::getPhysicalDeviceProperties(context.getInstanceInterface(), context.getPhysicalDevice());
+
+		log << TestLog::Message << "Max color attachments: " << properties.limits.maxColorAttachments << TestLog::EndMessage;
+
+		for (size_t subpassNdx = 0; subpassNdx < renderPassInfo.getSubpasses().size(); subpassNdx++)
+		{
+			 if (renderPassInfo.getSubpasses()[subpassNdx].getColorAttachments().size() > (size_t)properties.limits.maxColorAttachments)
+				 TCU_THROW(NotSupportedError, "Subpass uses more than maxColorAttachments.");
+		}
+	}
+
+	{
+		const VkDevice								device								= context.getDevice();
+		const DeviceInterface&						vk									= context.getDeviceInterface();
+		const VkQueue								queue								= context.getUniversalQueue();
+		const deUint32								queueIndex							= context.getUniversalQueueFamilyIndex();
+		Allocator&									allocator							= context.getDefaultAllocator();
+
+		const Unique<VkRenderPass>					renderPass							(createRenderPass(vk, device, renderPassInfo));
+		const Unique<VkCommandPool>					commandBufferPool					(createCommandPool(vk, device, queueIndex, 0));
+		const Unique<VkCommandBuffer>				initializeImagesCommandBuffer		(allocateCommandBuffer(vk, device, *commandBufferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const Unique<VkCommandBuffer>				renderCommandBuffer					(allocateCommandBuffer(vk, device, *commandBufferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+		const Unique<VkCommandBuffer>				readImagesToBuffersCommandBuffer	(allocateCommandBuffer(vk, device, *commandBufferPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
+
+		vector<de::SharedPtr<AttachmentResources> >	attachmentResources;
+		vector<de::SharedPtr<SubpassRenderer> >		subpassRenderers;
+		vector<VkImageView>							attachmentViews;
+
+		for (size_t attachmentNdx = 0; attachmentNdx < renderPassInfo.getAttachments().size(); attachmentNdx++)
+		{
+			const Attachment&	attachmentInfo	= renderPassInfo.getAttachments()[attachmentNdx];
+
+			attachmentResources.push_back(de::SharedPtr<AttachmentResources>(new AttachmentResources(vk, device, allocator, queueIndex, targetSize, attachmentInfo, attachmentIsLazy[attachmentNdx])));
+			attachmentViews.push_back(attachmentResources[attachmentNdx]->getAttachmentView());
+		}
+
+		beginCommandBuffer(vk, *initializeImagesCommandBuffer, (VkCommandBufferUsageFlags)0, DE_NULL, 0, DE_NULL, VK_FALSE, (VkQueryControlFlags)0, (VkQueryPipelineStatisticFlags)0);
+		pushImageInitializationCommands(vk, *initializeImagesCommandBuffer, renderPassInfo.getAttachments(), attachmentResources, queueIndex, imageClearValues);
+		endCommandBuffer(vk, *initializeImagesCommandBuffer);
+
+		{
+			const Unique<VkFramebuffer> framebuffer (createFramebuffer(vk, device, *renderPass, targetSize, attachmentViews));
+
+			for (size_t subpassNdx = 0; subpassNdx < renderPassInfo.getSubpasses().size(); subpassNdx++)
+				subpassRenderers.push_back(de::SharedPtr<SubpassRenderer>(new SubpassRenderer(context, vk, device, allocator, *renderPass, *framebuffer, *commandBufferPool, queueIndex, subpassRenderInfo[subpassNdx])));
+
+			beginCommandBuffer(vk, *renderCommandBuffer, (VkCommandBufferUsageFlags)0, DE_NULL, 0, DE_NULL, VK_FALSE, (VkQueryControlFlags)0, (VkQueryPipelineStatisticFlags)0);
+			pushRenderPassCommands(vk, *renderCommandBuffer, *renderPass, *framebuffer, subpassRenderers, renderPos, renderSize, renderPassClearValues, config.renderTypes);
+			endCommandBuffer(vk, *renderCommandBuffer);
+
+			beginCommandBuffer(vk, *readImagesToBuffersCommandBuffer, (VkCommandBufferUsageFlags)0, DE_NULL, 0, DE_NULL, VK_FALSE, (VkQueryControlFlags)0, (VkQueryPipelineStatisticFlags)0);
+			pushReadImagesToBuffers(vk, *readImagesToBuffersCommandBuffer, queueIndex, attachmentResources, renderPassInfo.getAttachments(), attachmentIsLazy, targetSize);
+			endCommandBuffer(vk, *readImagesToBuffersCommandBuffer);
+			{
+				const VkCommandBuffer commandBuffers[] =
+				{
+					*initializeImagesCommandBuffer,
+					*renderCommandBuffer,
+					*readImagesToBuffersCommandBuffer
+				};
+				const Unique<VkFence>	fence		(createFence(vk, device, 0u));
+
+				queueSubmit(vk, queue, DE_LENGTH_OF_ARRAY(commandBuffers), commandBuffers, *fence);
+				waitForFences(vk, device, 1, &fence.get(), VK_TRUE, ~0ull);
+			}
+		}
+
+		if (logAndVerifyImages(log, vk, device, attachmentResources, attachmentIsLazy, renderPassInfo, renderPassClearValues, imageClearValues, subpassRenderInfo, targetSize, config))
+			return tcu::TestStatus::pass("Pass");
+		else
+			return tcu::TestStatus::fail("Result verification failed");
+	}
+}
+
+static const VkFormat s_coreColorFormats[] =
+{
+	VK_FORMAT_R5G6B5_UNORM_PACK16,
+	VK_FORMAT_R8_UNORM,
+	VK_FORMAT_R8_SNORM,
+	VK_FORMAT_R8_UINT,
+	VK_FORMAT_R8_SINT,
+	VK_FORMAT_R8G8_UNORM,
+	VK_FORMAT_R8G8_SNORM,
+	VK_FORMAT_R8G8_UINT,
+	VK_FORMAT_R8G8_SINT,
+	VK_FORMAT_R8G8B8A8_UNORM,
+	VK_FORMAT_R8G8B8A8_SNORM,
+	VK_FORMAT_R8G8B8A8_UINT,
+	VK_FORMAT_R8G8B8A8_SINT,
+	VK_FORMAT_R8G8B8A8_SRGB,
+	VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+	VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+	VK_FORMAT_A8B8G8R8_UINT_PACK32,
+	VK_FORMAT_A8B8G8R8_SINT_PACK32,
+	VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+	VK_FORMAT_B8G8R8A8_UNORM,
+	VK_FORMAT_B8G8R8A8_SRGB,
+	VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+	VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+	VK_FORMAT_A2B10G10R10_UINT_PACK32,
+	VK_FORMAT_R16_UNORM,
+	VK_FORMAT_R16_SNORM,
+	VK_FORMAT_R16_UINT,
+	VK_FORMAT_R16_SINT,
+	VK_FORMAT_R16_SFLOAT,
+	VK_FORMAT_R16G16_UNORM,
+	VK_FORMAT_R16G16_SNORM,
+	VK_FORMAT_R16G16_UINT,
+	VK_FORMAT_R16G16_SINT,
+	VK_FORMAT_R16G16_SFLOAT,
+	VK_FORMAT_R16G16B16A16_UNORM,
+	VK_FORMAT_R16G16B16A16_SNORM,
+	VK_FORMAT_R16G16B16A16_UINT,
+	VK_FORMAT_R16G16B16A16_SINT,
+	VK_FORMAT_R16G16B16A16_SFLOAT,
+	VK_FORMAT_R32_UINT,
+	VK_FORMAT_R32_SINT,
+	VK_FORMAT_R32_SFLOAT,
+	VK_FORMAT_R32G32_UINT,
+	VK_FORMAT_R32G32_SINT,
+	VK_FORMAT_R32G32_SFLOAT,
+	VK_FORMAT_R32G32B32A32_UINT,
+	VK_FORMAT_R32G32B32A32_SINT,
+	VK_FORMAT_R32G32B32A32_SFLOAT
+};
+
+static const VkFormat s_coreDepthStencilFormats[] =
+{
+	VK_FORMAT_D16_UNORM,
+
+	VK_FORMAT_X8_D24_UNORM_PACK32,
+	VK_FORMAT_D32_SFLOAT,
+
+	VK_FORMAT_D24_UNORM_S8_UINT,
+	VK_FORMAT_D32_SFLOAT_S8_UINT
+};
+
+de::MovePtr<tcu::TestCaseGroup> createAttachmentTestCaseGroup (tcu::TestContext& testCtx)
+{
+	const deUint32 attachmentCounts[] = { 1, 3, 4, 8 };
+	const VkAttachmentLoadOp loadOps[] =
+	{
+		VK_ATTACHMENT_LOAD_OP_LOAD,
+		VK_ATTACHMENT_LOAD_OP_CLEAR,
+		VK_ATTACHMENT_LOAD_OP_DONT_CARE
+	};
+
+	const VkAttachmentStoreOp storeOps[] =
+	{
+		VK_ATTACHMENT_STORE_OP_STORE,
+		VK_ATTACHMENT_STORE_OP_DONT_CARE
+	};
+
+	const VkImageLayout initialAndFinalColorLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
+	};
+
+	const VkImageLayout initialAndFinalDepthStencilLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+		VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
+	};
+
+	const VkImageLayout subpassLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
+	};
+
+	const VkImageLayout depthStencilLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
+	};
+
+	const TestConfig::RenderTypes renderCommands[] =
+	{
+		TestConfig::RENDERTYPES_NONE,
+		TestConfig::RENDERTYPES_CLEAR,
+		TestConfig::RENDERTYPES_DRAW,
+		TestConfig::RENDERTYPES_CLEAR|TestConfig::RENDERTYPES_DRAW,
+	};
+
+	const TestConfig::CommandBufferTypes commandBuffers[] =
+	{
+		TestConfig::COMMANDBUFFERTYPES_INLINE,
+		TestConfig::COMMANDBUFFERTYPES_SECONDARY,
+		TestConfig::COMMANDBUFFERTYPES_INLINE|TestConfig::COMMANDBUFFERTYPES_SECONDARY
+	};
+
+	const TestConfig::ImageMemory imageMemories[] =
+	{
+		TestConfig::IMAGEMEMORY_STRICT,
+		TestConfig::IMAGEMEMORY_LAZY,
+		TestConfig::IMAGEMEMORY_STRICT|TestConfig::IMAGEMEMORY_LAZY
+	};
+
+	const UVec2 targetSizes[] =
+	{
+		UVec2(64, 64),
+		UVec2(63, 65)
+	};
+
+	const UVec2 renderPositions[] =
+	{
+		UVec2(0, 0),
+		UVec2(3, 17)
+	};
+
+	const UVec2 renderSizes[] =
+	{
+		UVec2(32, 32),
+		UVec2(60, 47)
+	};
+
+	de::Random rng (1433774382u);
+	de::MovePtr<tcu::TestCaseGroup> group (new tcu::TestCaseGroup(testCtx, "attachment", "Attachment format and count tests with load and store ops and image layouts"));
+
+	for (size_t attachmentCountNdx = 0; attachmentCountNdx < DE_LENGTH_OF_ARRAY(attachmentCounts); attachmentCountNdx++)
+	{
+		const deUint32					attachmentCount			= attachmentCounts[attachmentCountNdx];
+		const deUint32					testCaseCount			= (attachmentCount == 1 ? 100 : 200);
+		de::MovePtr<tcu::TestCaseGroup>	attachmentCountGroup	(new tcu::TestCaseGroup(testCtx, de::toString(attachmentCount).c_str(), de::toString(attachmentCount).c_str()));
+
+		for (size_t testCaseNdx = 0; testCaseNdx < testCaseCount; testCaseNdx++)
+		{
+			const bool					useDepthStencil		= rng.getBool();
+			VkImageLayout				depthStencilLayout	= VK_IMAGE_LAYOUT_GENERAL;
+			vector<Attachment>			attachments;
+			vector<AttachmentReference>	colorAttachmentReferences;
+
+			for (size_t attachmentNdx = 0; attachmentNdx < attachmentCount; attachmentNdx++)
+			{
+				const VkSampleCountFlagBits	sampleCount		= VK_SAMPLE_COUNT_1_BIT;
+				const VkFormat				format			= rng.choose<VkFormat>(DE_ARRAY_BEGIN(s_coreColorFormats), DE_ARRAY_END(s_coreColorFormats));
+				const VkAttachmentLoadOp	loadOp			= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	storeOp			= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				const VkImageLayout			initialLayout	= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalColorLayouts), DE_ARRAY_END(initialAndFinalColorLayouts));
+				const VkImageLayout			finalizeLayout	= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalColorLayouts), DE_ARRAY_END(initialAndFinalColorLayouts));
+				const VkImageLayout			subpassLayout	= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+				const VkAttachmentLoadOp	stencilLoadOp	= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	stencilStoreOp	= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				attachments.push_back(Attachment(format, sampleCount, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalizeLayout));
+				colorAttachmentReferences.push_back(AttachmentReference((deUint32)attachmentNdx, subpassLayout));
+			}
+
+			if (useDepthStencil)
+			{
+				const VkSampleCountFlagBits	sampleCount			= VK_SAMPLE_COUNT_1_BIT;
+				const VkFormat				format				= rng.choose<VkFormat>(DE_ARRAY_BEGIN(s_coreDepthStencilFormats), DE_ARRAY_END(s_coreDepthStencilFormats));
+				const VkAttachmentLoadOp	loadOp				= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	storeOp				= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				const VkImageLayout			initialLayout		= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalDepthStencilLayouts), DE_ARRAY_END(initialAndFinalDepthStencilLayouts));
+				const VkImageLayout			finalizeLayout		= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalDepthStencilLayouts), DE_ARRAY_END(initialAndFinalDepthStencilLayouts));
+
+				const VkAttachmentLoadOp	stencilLoadOp		= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	stencilStoreOp		= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				depthStencilLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(depthStencilLayouts), DE_ARRAY_END(depthStencilLayouts));
+				attachments.push_back(Attachment(format, sampleCount, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalizeLayout));
+			}
+
+			{
+				const TestConfig::RenderTypes			render			= rng.choose<TestConfig::RenderTypes>(DE_ARRAY_BEGIN(renderCommands), DE_ARRAY_END(renderCommands));
+				const TestConfig::CommandBufferTypes	commandBuffer	= rng.choose<TestConfig::CommandBufferTypes>(DE_ARRAY_BEGIN(commandBuffers), DE_ARRAY_END(commandBuffers));
+				const TestConfig::ImageMemory			imageMemory		= rng.choose<TestConfig::ImageMemory>(DE_ARRAY_BEGIN(imageMemories), DE_ARRAY_END(imageMemories));
+				const vector<Subpass>					subpasses		(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference((useDepthStencil ? (deUint32)(attachments.size() - 1) : VK_ATTACHMENT_UNUSED), depthStencilLayout), vector<AttachmentReference>()));
+				const vector<SubpassDependency>			deps;
+
+				const string							testCaseName	= de::toString(attachmentCountNdx * testCaseCount + testCaseNdx);
+				const RenderPass						renderPass		(attachments, subpasses, deps);
+				const UVec2								targetSize		= rng.choose<UVec2>(DE_ARRAY_BEGIN(targetSizes), DE_ARRAY_END(targetSizes));
+				const UVec2								renderPos		= rng.choose<UVec2>(DE_ARRAY_BEGIN(renderPositions), DE_ARRAY_END(renderPositions));
+				const UVec2								renderSize		= rng.choose<UVec2>(DE_ARRAY_BEGIN(renderSizes), DE_ARRAY_END(renderSizes));
+
+				addFunctionCaseWithPrograms<TestConfig>(attachmentCountGroup.get(), testCaseName.c_str(), testCaseName.c_str(), createTestShaders, renderPassTest, TestConfig(renderPass, render, commandBuffer, imageMemory, targetSize, renderPos, renderSize, 1293809));
+			}
+		}
+
+		group->addChild(attachmentCountGroup.release());
+	}
+
+	return group;
+}
+
+de::MovePtr<tcu::TestCaseGroup> createAttachmentAllocationTestGroup (tcu::TestContext& testCtx)
+{
+	const deUint32 attachmentCounts[] = { 4, 8 };
+	const VkAttachmentLoadOp loadOps[] =
+	{
+		VK_ATTACHMENT_LOAD_OP_LOAD,
+		VK_ATTACHMENT_LOAD_OP_CLEAR,
+		VK_ATTACHMENT_LOAD_OP_DONT_CARE
+	};
+
+	const VkAttachmentStoreOp storeOps[] =
+	{
+		VK_ATTACHMENT_STORE_OP_STORE,
+		VK_ATTACHMENT_STORE_OP_DONT_CARE
+	};
+
+	const VkImageLayout initialAndFinalColorLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+		VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+		VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
+	};
+
+	const VkImageLayout subpassLayouts[] =
+	{
+		VK_IMAGE_LAYOUT_GENERAL,
+		VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+	};
+
+	enum AllocationType
+	{
+		// Each pass uses one more attachmen than previous one
+		ALLOCATIONTYPE_GROW,
+		// Each pass uses one less attachment than previous one
+		ALLOCATIONTYPE_SHRINK,
+		// Each pass drops one attachment and picks up new one
+		ALLOCATIONTYPE_ROLL,
+		// Start by growing and end by shrinking
+		ALLOCATIONTYPE_GROW_SHRINK
+	};
+
+	const AllocationType allocationTypes[] =
+	{
+		ALLOCATIONTYPE_GROW,
+		ALLOCATIONTYPE_SHRINK,
+		ALLOCATIONTYPE_ROLL,
+		ALLOCATIONTYPE_GROW_SHRINK
+	};
+
+	const char* const allocationTypeStr[] =
+	{
+		"grow",
+		"shrink",
+		"roll",
+		"grow_shrink"
+	};
+
+	const TestConfig::RenderTypes renderCommands[] =
+	{
+		TestConfig::RENDERTYPES_NONE,
+		TestConfig::RENDERTYPES_CLEAR,
+		TestConfig::RENDERTYPES_DRAW,
+		TestConfig::RENDERTYPES_CLEAR|TestConfig::RENDERTYPES_DRAW,
+	};
+
+	const TestConfig::CommandBufferTypes commandBuffers[] =
+	{
+		TestConfig::COMMANDBUFFERTYPES_INLINE,
+		TestConfig::COMMANDBUFFERTYPES_SECONDARY,
+		TestConfig::COMMANDBUFFERTYPES_INLINE|TestConfig::COMMANDBUFFERTYPES_SECONDARY
+	};
+
+	const TestConfig::ImageMemory imageMemories[] =
+	{
+		TestConfig::IMAGEMEMORY_STRICT,
+		TestConfig::IMAGEMEMORY_LAZY,
+		TestConfig::IMAGEMEMORY_STRICT|TestConfig::IMAGEMEMORY_LAZY
+	};
+
+	const UVec2 targetSizes[] =
+	{
+		UVec2(64, 64),
+		UVec2(63, 65)
+	};
+
+	const UVec2 renderPositions[] =
+	{
+		UVec2(0, 0),
+		UVec2(3, 17)
+	};
+
+	const UVec2 renderSizes[] =
+	{
+		UVec2(32, 32),
+		UVec2(60, 47)
+	};
+
+	de::MovePtr<tcu::TestCaseGroup>	group	(new tcu::TestCaseGroup(testCtx, "attachment_allocation", "Attachment allocation tests"));
+	de::Random						rng		(3700649827u);
+
+	for (size_t allocationTypeNdx = 0; allocationTypeNdx < DE_LENGTH_OF_ARRAY(allocationTypes); allocationTypeNdx++)
+	{
+		const AllocationType			allocationType		= allocationTypes[allocationTypeNdx];
+		const size_t					testCaseCount		= 100;
+		de::MovePtr<tcu::TestCaseGroup>	allocationTypeGroup	(new tcu::TestCaseGroup(testCtx, allocationTypeStr[allocationTypeNdx], allocationTypeStr[allocationTypeNdx]));
+
+		for (size_t testCaseNdx = 0; testCaseNdx < testCaseCount; testCaseNdx++)
+		{
+			const deUint32		attachmentCount	= rng.choose<deUint32>(DE_ARRAY_BEGIN(attachmentCounts), DE_ARRAY_END(attachmentCounts));
+			vector<Attachment>	attachments;
+			vector<Subpass>		subpasses;
+
+			for (size_t attachmentNdx = 0; attachmentNdx < attachmentCount; attachmentNdx++)
+			{
+				const VkSampleCountFlagBits	sampleCount		= VK_SAMPLE_COUNT_1_BIT;
+				const VkFormat				format			= rng.choose<VkFormat>(DE_ARRAY_BEGIN(s_coreColorFormats), DE_ARRAY_END(s_coreColorFormats));
+				const VkAttachmentLoadOp	loadOp			= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	storeOp			= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				const VkImageLayout			initialLayout	= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalColorLayouts), DE_ARRAY_END(initialAndFinalColorLayouts));
+				const VkImageLayout			finalizeLayout	= rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(initialAndFinalColorLayouts), DE_ARRAY_END(initialAndFinalColorLayouts));
+
+				const VkAttachmentLoadOp	stencilLoadOp	= rng.choose<VkAttachmentLoadOp>(DE_ARRAY_BEGIN(loadOps), DE_ARRAY_END(loadOps));
+				const VkAttachmentStoreOp	stencilStoreOp	= rng.choose<VkAttachmentStoreOp>(DE_ARRAY_BEGIN(storeOps), DE_ARRAY_END(storeOps));
+
+				attachments.push_back(Attachment(format, sampleCount, loadOp, storeOp, stencilLoadOp, stencilStoreOp, initialLayout, finalizeLayout));
+			}
+
+			if (allocationType == ALLOCATIONTYPE_GROW)
+			{
+				for (size_t subpassNdx = 0; subpassNdx < attachmentCount; subpassNdx++)
+				{
+					vector<AttachmentReference>	colorAttachmentReferences;
+
+					for (size_t attachmentNdx = 0; attachmentNdx < subpassNdx + 1; attachmentNdx++)
+					{
+						const VkImageLayout subpassLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+						colorAttachmentReferences.push_back(AttachmentReference((deUint32)attachmentNdx, subpassLayout));
+					}
+
+					subpasses.push_back(Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL), vector<AttachmentReference>()));
+				}
+			}
+			else if (allocationType == ALLOCATIONTYPE_SHRINK)
+			{
+				for (size_t subpassNdx = 0; subpassNdx < attachmentCount; subpassNdx++)
+				{
+					vector<AttachmentReference>	colorAttachmentReferences;
+
+					for (size_t attachmentNdx = 0; attachmentNdx < (attachmentCount - subpassNdx); attachmentNdx++)
+					{
+						const VkImageLayout subpassLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+						colorAttachmentReferences.push_back(AttachmentReference((deUint32)attachmentNdx, subpassLayout));
+					}
+
+					subpasses.push_back(Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL), vector<AttachmentReference>()));
+				}
+			}
+			else if (allocationType == ALLOCATIONTYPE_ROLL)
+			{
+				for (size_t subpassNdx = 0; subpassNdx < attachmentCount / 2; subpassNdx++)
+				{
+					vector<AttachmentReference>	colorAttachmentReferences;
+
+					for (size_t attachmentNdx = 0; attachmentNdx < attachmentCount / 2; attachmentNdx++)
+					{
+						const VkImageLayout subpassLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+						colorAttachmentReferences.push_back(AttachmentReference((deUint32)(subpassNdx + attachmentNdx), subpassLayout));
+					}
+
+					subpasses.push_back(Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL), vector<AttachmentReference>()));
+				}
+			}
+			else if (allocationType == ALLOCATIONTYPE_GROW_SHRINK)
+			{
+				for (size_t subpassNdx = 0; subpassNdx < attachmentCount; subpassNdx++)
+				{
+					vector<AttachmentReference>	colorAttachmentReferences;
+
+					for (size_t attachmentNdx = 0; attachmentNdx < subpassNdx + 1; attachmentNdx++)
+					{
+						const VkImageLayout subpassLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+						colorAttachmentReferences.push_back(AttachmentReference((deUint32)attachmentNdx, subpassLayout));
+					}
+
+					subpasses.push_back(Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL), vector<AttachmentReference>()));
+				}
+
+				for (size_t subpassNdx = 0; subpassNdx < attachmentCount; subpassNdx++)
+				{
+					vector<AttachmentReference>	colorAttachmentReferences;
+
+					for (size_t attachmentNdx = 0; attachmentNdx < (attachmentCount - subpassNdx); attachmentNdx++)
+					{
+						const VkImageLayout subpassLayout = rng.choose<VkImageLayout>(DE_ARRAY_BEGIN(subpassLayouts), DE_ARRAY_END(subpassLayouts));
+
+						colorAttachmentReferences.push_back(AttachmentReference((deUint32)attachmentNdx, subpassLayout));
+					}
+
+					subpasses.push_back(Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS, 0u, vector<AttachmentReference>(), colorAttachmentReferences, vector<AttachmentReference>(), AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL), vector<AttachmentReference>()));
+				}
+			}
+			else
+				DE_FATAL("Unknown allocation type");
+
+			{
+				const TestConfig::RenderTypes			render			= rng.choose<TestConfig::RenderTypes>(DE_ARRAY_BEGIN(renderCommands), DE_ARRAY_END(renderCommands));
+				const TestConfig::CommandBufferTypes	commandBuffer	= rng.choose<TestConfig::CommandBufferTypes>(DE_ARRAY_BEGIN(commandBuffers), DE_ARRAY_END(commandBuffers));
+				const TestConfig::ImageMemory			imageMemory		= rng.choose<TestConfig::ImageMemory>(DE_ARRAY_BEGIN(imageMemories), DE_ARRAY_END(imageMemories));
+
+				const string							testCaseName	= de::toString(testCaseNdx);
+				const UVec2								targetSize		= rng.choose<UVec2>(DE_ARRAY_BEGIN(targetSizes), DE_ARRAY_END(targetSizes));
+				const UVec2								renderPos		= rng.choose<UVec2>(DE_ARRAY_BEGIN(renderPositions), DE_ARRAY_END(renderPositions));
+				const UVec2								renderSize		= rng.choose<UVec2>(DE_ARRAY_BEGIN(renderSizes), DE_ARRAY_END(renderSizes));
+
+				vector<SubpassDependency>				deps;
+
+				for (size_t subpassNdx = 0; subpassNdx < subpasses.size() - 1; subpassNdx++)
+				{
+					const bool byRegion				= rng.getBool();
+					deps.push_back(SubpassDependency((deUint32)subpassNdx, (deUint32)subpassNdx + 1,
+													 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
+														| VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
+														| VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
+														| VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+
+													 VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
+														| VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
+														| VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
+														| VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+
+													 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+													 VK_ACCESS_COLOR_ATTACHMENT_READ_BIT, // \todo [pyry] Correct?
+
+													 byRegion ? (VkBool32)VK_TRUE : (VkBool32)VK_FALSE));
+				}
+
+				const RenderPass					renderPass		(attachments, subpasses, deps);
+
+				addFunctionCaseWithPrograms<TestConfig>(allocationTypeGroup.get(), testCaseName.c_str(), testCaseName.c_str(), createTestShaders, renderPassTest, TestConfig(renderPass, render, commandBuffer, imageMemory, targetSize, renderPos, renderSize, 80329));
+			}
+		}
+
+		group->addChild(allocationTypeGroup.release());
+	}
+
+	return group;
+}
+
+de::MovePtr<tcu::TestCaseGroup> createSimpleTestGroup (tcu::TestContext& testCtx)
+{
+	const UVec2						targetSize	(64, 64);
+	const UVec2						renderPos	(0, 0);
+	const UVec2						renderSize	(64, 64);
+	de::MovePtr<tcu::TestCaseGroup>	group		(new tcu::TestCaseGroup(testCtx, "simple", "Simple basic render pass tests"));
+
+	// color
+	{
+		const RenderPass	renderPass	(vector<Attachment>(1, Attachment(VK_FORMAT_R8G8B8A8_UNORM,
+																		  VK_SAMPLE_COUNT_1_BIT,
+																		  VK_ATTACHMENT_LOAD_OP_CLEAR,
+																		  VK_ATTACHMENT_STORE_OP_STORE,
+																		  VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																		  VK_ATTACHMENT_STORE_OP_DONT_CARE,
+																		  VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+																		  VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(1, AttachmentReference(0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "color", "Single color attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// depth
+	{
+		const RenderPass	renderPass	(vector<Attachment>(1, Attachment(VK_FORMAT_X8_D24_UNORM_PACK32,
+																		  VK_SAMPLE_COUNT_1_BIT,
+																		  VK_ATTACHMENT_LOAD_OP_CLEAR,
+																		  VK_ATTACHMENT_STORE_OP_STORE,
+																		  VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																		  VK_ATTACHMENT_STORE_OP_DONT_CARE,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "depth", "Single depth attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// stencil
+	{
+		const RenderPass	renderPass	(vector<Attachment>(1, Attachment(VK_FORMAT_S8_UINT,
+																		  VK_SAMPLE_COUNT_1_BIT,
+																		  VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																		  VK_ATTACHMENT_STORE_OP_DONT_CARE,
+																		  VK_ATTACHMENT_LOAD_OP_CLEAR,
+																		  VK_ATTACHMENT_STORE_OP_STORE,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "stencil", "Single stencil attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// depth_stencil
+	{
+		const RenderPass	renderPass	(vector<Attachment>(1, Attachment(VK_FORMAT_D24_UNORM_S8_UINT,
+																		  VK_SAMPLE_COUNT_1_BIT,
+																		  VK_ATTACHMENT_LOAD_OP_CLEAR,
+																		  VK_ATTACHMENT_STORE_OP_STORE,
+																		  VK_ATTACHMENT_LOAD_OP_CLEAR,
+																		  VK_ATTACHMENT_STORE_OP_STORE,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																		  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "depth_stencil", "Single depth stencil attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// color_depth
+	{
+		const Attachment	attachments[] =
+		{
+			Attachment(VK_FORMAT_R8G8B8A8_UNORM,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+					   VK_ATTACHMENT_STORE_OP_DONT_CARE,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL),
+			Attachment(VK_FORMAT_X8_D24_UNORM_PACK32,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+					   VK_ATTACHMENT_STORE_OP_DONT_CARE,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+		};
+
+		const RenderPass	renderPass	(vector<Attachment>(DE_ARRAY_BEGIN(attachments), DE_ARRAY_END(attachments)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(1, AttachmentReference(0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "color_depth", "Color and depth attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// color_stencil
+	{
+		const Attachment	attachments[] =
+		{
+			Attachment(VK_FORMAT_R8G8B8A8_UNORM,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+					   VK_ATTACHMENT_STORE_OP_DONT_CARE,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL),
+			Attachment(VK_FORMAT_S8_UINT,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+					   VK_ATTACHMENT_STORE_OP_DONT_CARE,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+		};
+
+		const RenderPass	renderPass	(vector<Attachment>(DE_ARRAY_BEGIN(attachments), DE_ARRAY_END(attachments)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(1, AttachmentReference(0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "color_stencil", "Color and stencil attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	// color_depth_stencil
+	{
+		const Attachment	attachments[] =
+		{
+			Attachment(VK_FORMAT_R8G8B8A8_UNORM,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+					   VK_ATTACHMENT_STORE_OP_DONT_CARE,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL),
+			Attachment(VK_FORMAT_D24_UNORM_S8_UINT,
+					   VK_SAMPLE_COUNT_1_BIT,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_ATTACHMENT_LOAD_OP_CLEAR,
+					   VK_ATTACHMENT_STORE_OP_STORE,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+					   VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+		};
+
+		const RenderPass	renderPass	(vector<Attachment>(DE_ARRAY_BEGIN(attachments), DE_ARRAY_END(attachments)),
+										 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																	0u,
+																	vector<AttachmentReference>(),
+																	vector<AttachmentReference>(1, AttachmentReference(0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+																	vector<AttachmentReference>(),
+																	AttachmentReference(1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																	vector<AttachmentReference>())),
+										 vector<SubpassDependency>());
+
+		addFunctionCaseWithPrograms<TestConfig>(group.get(), "color_depth_stencil", "Color, depth and stencil attachment case.", createTestShaders, renderPassTest, TestConfig(renderPass, TestConfig::RENDERTYPES_DRAW, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+	}
+
+	return group;
+}
+
+std::string formatToName (VkFormat format)
+{
+	const std::string	formatStr	= de::toString(format);
+	const std::string	prefix		= "VK_FORMAT_";
+
+	DE_ASSERT(formatStr.substr(0, prefix.length()) == prefix);
+
+	return de::toLower(formatStr.substr(prefix.length()));
+}
+
+de::MovePtr<tcu::TestCaseGroup> createFormatTestGroup(tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group	(new tcu::TestCaseGroup(testCtx, "formats", "Tests for different image formats."));
+
+	const UVec2 targetSize	(64, 64);
+	const UVec2 renderPos	(0, 0);
+	const UVec2 renderSize	(64, 64);
+
+	const struct
+	{
+		const char* const			str;
+		const VkAttachmentLoadOp	op;
+	} loadOps[] =
+	{
+		{ "clear",		VK_ATTACHMENT_LOAD_OP_CLEAR		},
+		{ "load",		VK_ATTACHMENT_LOAD_OP_LOAD		},
+		{ "dont_care",	VK_ATTACHMENT_LOAD_OP_DONT_CARE	}
+	};
+
+	const struct
+	{
+		 const char* const				str;
+		 const TestConfig::RenderTypes	types;
+	} renderTypes[] =
+	{
+		{ "clear",		TestConfig::RENDERTYPES_CLEAR								},
+		{ "draw",		TestConfig::RENDERTYPES_DRAW								},
+		{ "clear_draw",	TestConfig::RENDERTYPES_CLEAR|TestConfig::RENDERTYPES_DRAW	}
+	};
+
+	// Color formats
+	for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(s_coreColorFormats); formatNdx++)
+	{
+		const VkFormat					format		= s_coreColorFormats[formatNdx];
+		de::MovePtr<tcu::TestCaseGroup>	formatGroup	(new tcu::TestCaseGroup(testCtx, formatToName(format).c_str(), de::toString(format).c_str()));
+
+		for (size_t loadOpNdx = 0; loadOpNdx < DE_LENGTH_OF_ARRAY(loadOps); loadOpNdx++)
+		{
+			const VkAttachmentLoadOp		loadOp	= loadOps[loadOpNdx].op;
+			de::MovePtr<tcu::TestCaseGroup>	loadOpGroup	(new tcu::TestCaseGroup(testCtx, loadOps[loadOpNdx].str, loadOps[loadOpNdx].str));
+
+			for (size_t renderTypeNdx = 0; renderTypeNdx < DE_LENGTH_OF_ARRAY(renderTypes); renderTypeNdx++)
+			{
+				const RenderPass	renderPass	(vector<Attachment>(1, Attachment(format,
+																				  VK_SAMPLE_COUNT_1_BIT,
+																				  loadOp,
+																				  VK_ATTACHMENT_STORE_OP_STORE,
+																				  VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																				  VK_ATTACHMENT_STORE_OP_DONT_CARE,
+																				  VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+																				  VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+												 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																			0u,
+																			vector<AttachmentReference>(),
+																			vector<AttachmentReference>(1, AttachmentReference(0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)),
+																			vector<AttachmentReference>(),
+																			AttachmentReference(VK_ATTACHMENT_UNUSED, VK_IMAGE_LAYOUT_GENERAL),
+																			vector<AttachmentReference>())),
+												 vector<SubpassDependency>());
+
+				addFunctionCaseWithPrograms<TestConfig>(loadOpGroup.get(), renderTypes[renderTypeNdx].str, renderTypes[renderTypeNdx].str, createTestShaders, renderPassTest, TestConfig(renderPass, renderTypes[renderTypeNdx].types, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+			}
+
+			formatGroup->addChild(loadOpGroup.release());
+		}
+
+		group->addChild(formatGroup.release());
+	}
+
+	// Depth stencil formats
+	for (size_t formatNdx = 0; formatNdx < DE_LENGTH_OF_ARRAY(s_coreDepthStencilFormats); formatNdx++)
+	{
+		const VkFormat					format		= s_coreDepthStencilFormats[formatNdx];
+		de::MovePtr<tcu::TestCaseGroup>	formatGroup	(new tcu::TestCaseGroup(testCtx, formatToName(format).c_str(), de::toString(format).c_str()));
+
+		for (size_t loadOpNdx = 0; loadOpNdx < DE_LENGTH_OF_ARRAY(loadOps); loadOpNdx++)
+		{
+			const VkAttachmentLoadOp		loadOp	= loadOps[loadOpNdx].op;
+			de::MovePtr<tcu::TestCaseGroup>	loadOpGroup	(new tcu::TestCaseGroup(testCtx, loadOps[loadOpNdx].str, loadOps[loadOpNdx].str));
+
+			for (size_t renderTypeNdx = 0; renderTypeNdx < DE_LENGTH_OF_ARRAY(renderTypes); renderTypeNdx++)
+			{
+				const RenderPass	renderPass	(vector<Attachment>(1, Attachment(format,
+																				  VK_SAMPLE_COUNT_1_BIT,
+																				  VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+																				  VK_ATTACHMENT_STORE_OP_DONT_CARE,
+																				  loadOp,
+																				  VK_ATTACHMENT_STORE_OP_STORE,
+																				  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+																				  VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)),
+												 vector<Subpass>(1, Subpass(VK_PIPELINE_BIND_POINT_GRAPHICS,
+																			0u,
+																			vector<AttachmentReference>(),
+																			vector<AttachmentReference>(),
+																			vector<AttachmentReference>(),
+																			AttachmentReference(0, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL),
+																			vector<AttachmentReference>())),
+												 vector<SubpassDependency>());
+
+				addFunctionCaseWithPrograms<TestConfig>(loadOpGroup.get(), renderTypes[renderTypeNdx].str, renderTypes[renderTypeNdx].str, createTestShaders, renderPassTest, TestConfig(renderPass, renderTypes[renderTypeNdx].types, TestConfig::COMMANDBUFFERTYPES_INLINE, TestConfig::IMAGEMEMORY_STRICT, targetSize, renderPos, renderSize, 90239));
+			}
+
+			formatGroup->addChild(loadOpGroup.release());
+		}
+
+		group->addChild(formatGroup.release());
+	}
+
+	return group;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createRenderPassTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	renderpassTests	(new tcu::TestCaseGroup(testCtx, "renderpass", "RenderPass Tests"));
+
+	renderpassTests->addChild(createSimpleTestGroup(testCtx).release());
+	renderpassTests->addChild(createFormatTestGroup(testCtx).release());
+	renderpassTests->addChild(createAttachmentTestCaseGroup(testCtx).release());
+	renderpassTests->addChild(createAttachmentAllocationTestGroup(testCtx).release());
+
+	return renderpassTests.release();
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktRenderPassTests.hpp b/external/vulkancts/modules/vulkan/vktRenderPassTests.hpp
new file mode 100644
index 0000000..efebfa0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktRenderPassTests.hpp
@@ -0,0 +1,47 @@
+#ifndef _VKTRENDERPASSTESTS_HPP
+#define _VKTRENDERPASSTESTS_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief RenderPass tests
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+tcu::TestCaseGroup*	createRenderPassTests	(tcu::TestContext& testCtx);
+
+} // vkt
+
+#endif // _VKTRENDERPASSTESTS_HPP
diff --git a/external/vulkancts/modules/vulkan/vktShaderLibrary.cpp b/external/vulkancts/modules/vulkan/vktShaderLibrary.cpp
new file mode 100644
index 0000000..bfb26bf
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktShaderLibrary.cpp
@@ -0,0 +1,1873 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief ShaderLibrary Vulkan implementation
+ *//*--------------------------------------------------------------------*/
+
+#include "vktShaderLibrary.hpp"
+#include "vktTestCase.hpp"
+
+#include "vkPrograms.hpp"
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkBuilderUtil.hpp"
+#include "vkTypeUtil.hpp"
+
+#include "gluShaderLibrary.hpp"
+#include "gluShaderUtil.hpp"
+
+#include "tcuStringTemplate.hpp"
+#include "tcuTexture.hpp"
+#include "tcuTestLog.hpp"
+#include "tcuVector.hpp"
+#include "tcuVectorUtil.hpp"
+
+#include "deStringUtil.hpp"
+#include "deArrayUtil.hpp"
+#include "deMemory.h"
+
+#include <sstream>
+#include <map>
+
+namespace vkt
+{
+
+using std::string;
+using std::vector;
+using std::map;
+using std::pair;
+using std::ostringstream;
+
+using de::MovePtr;
+using de::UniquePtr;
+
+using glu::ShaderType;
+using glu::ProgramSources;
+using glu::DataType;
+
+using glu::sl::ShaderCaseSpecification;
+using glu::sl::ProgramSpecializationParams;
+using glu::sl::RequiredExtension;
+using glu::sl::Value;
+using glu::sl::ValueBlock;
+
+using tcu::TestStatus;
+using tcu::StringTemplate;
+using tcu::Vec2;
+using tcu::ConstPixelBufferAccess;
+using tcu::TextureFormat;
+using tcu::TestLog;
+
+using vk::SourceCollections;
+using vk::Move;
+using vk::Unique;
+
+namespace
+{
+
+enum
+{
+	REFERENCE_UNIFORM_BINDING	= 0,
+	USER_UNIFORM_BINDING		= 1
+};
+
+string getShaderName (ShaderType shaderType, size_t progNdx)
+{
+	ostringstream str;
+	str << glu::getShaderTypeName(shaderType);
+	if (progNdx > 0)
+		str << "_" << progNdx;
+	return str.str();
+}
+
+void genUniformBlock (ostringstream& out, const string& blockName, const string& instanceName, int setNdx, int bindingNdx, const vector<Value>& uniforms)
+{
+	out << "layout(";
+
+	if (setNdx != 0)
+		out << "set = " << setNdx << ", ";
+
+	out << "binding = " << bindingNdx << ", std140) uniform " << blockName << "\n"
+		<< "{\n";
+
+	for (vector<Value>::const_iterator val = uniforms.begin(); val != uniforms.end(); ++val)
+		out << "\t" << glu::declare(val->type, val->name, 1) << ";\n";
+
+	out << "}";
+
+	if (!instanceName.empty())
+		out << " " << instanceName;
+
+	out << ";\n";
+}
+
+void declareReferenceBlock (ostringstream& out, const ValueBlock& valueBlock)
+{
+	if (!valueBlock.outputs.empty())
+		genUniformBlock(out, "Reference", "ref", 0, REFERENCE_UNIFORM_BINDING, valueBlock.outputs);
+}
+
+void declareUniforms (ostringstream& out, const ValueBlock& valueBlock)
+{
+	if (!valueBlock.uniforms.empty())
+		genUniformBlock(out, "Uniforms", "", 0, USER_UNIFORM_BINDING, valueBlock.uniforms);
+}
+
+DataType getTransportType (DataType valueType)
+{
+	if (isDataTypeBoolOrBVec(valueType))
+		return glu::getDataTypeIntVec(getDataTypeScalarSize(valueType));
+	else
+		return valueType;
+}
+
+int getNumTransportLocations (DataType valueType)
+{
+	return isDataTypeMatrix(valueType) ? getDataTypeMatrixNumColumns(valueType) : 1;
+}
+
+// This functions builds a matching vertex shader for a 'both' case, when
+// the fragment shader is being tested.
+// We need to build attributes and varyings for each 'input'.
+string genVertexShader (const ShaderCaseSpecification& spec)
+{
+	ostringstream	res;
+	int				curInputLoc		= 0;
+	int				curOutputLoc	= 0;
+
+	res << glu::getGLSLVersionDeclaration(spec.targetVersion) << "\n";
+
+	// Declarations (position + attribute/varying for each input).
+	res << "precision highp float;\n";
+	res << "precision highp int;\n";
+	res << "\n";
+	res << "layout(location = 0) in highp vec4 dEQP_Position;\n";
+	curInputLoc += 1;
+
+	for (size_t ndx = 0; ndx < spec.values.inputs.size(); ndx++)
+	{
+		const Value&		val					= spec.values.inputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		res << "layout(location = " << curInputLoc << ") in " << transportTypeStr << " a_" << val.name << ";\n";
+		res << "layout(location = " << curOutputLoc << ") flat out " << transportTypeStr << " " << (transportType != valueType ? "v_" : "") << val.name << ";\n";
+
+		curInputLoc		+= numLocs;
+		curOutputLoc	+= numLocs;
+	}
+	res << "\n";
+
+	// Main function.
+	// - gl_Position = dEQP_Position;
+	// - for each input: write attribute directly to varying
+	res << "void main()\n";
+	res << "{\n";
+	res << "	gl_Position = dEQP_Position;\n";
+	for (size_t ndx = 0; ndx < spec.values.inputs.size(); ndx++)
+	{
+		const Value&	val		= spec.values.inputs[ndx];
+		const string&	name	= val.name;
+
+		res << "	" << (getTransportType(val.type.getBasicType()) != val.type.getBasicType() ? "v_" : "")
+			<< name << " = a_" << name << ";\n";
+	}
+
+	res << "}\n";
+	return res.str();
+}
+
+void genCompareOp (ostringstream& output, const char* dstVec4Var, const ValueBlock& valueBlock, const char* checkVarName)
+{
+	bool isFirstOutput = true;
+
+	for (size_t ndx = 0; ndx < valueBlock.outputs.size(); ndx++)
+	{
+		const Value&	val		= valueBlock.outputs[ndx];
+
+		// Check if we're only interested in one variable (then skip if not the right one).
+		if (checkVarName && val.name != checkVarName)
+			continue;
+
+		// Prefix.
+		if (isFirstOutput)
+		{
+			output << "bool RES = ";
+			isFirstOutput = false;
+		}
+		else
+			output << "RES = RES && ";
+
+		// Generate actual comparison.
+		if (getDataTypeScalarType(val.type.getBasicType()) == glu::TYPE_FLOAT)
+			output << "isOk(" << val.name << ", ref." << val.name << ", 0.05);\n";
+		else
+			output << "isOk(" << val.name << ", ref." << val.name << ");\n";
+	}
+
+	if (isFirstOutput)
+		output << dstVec4Var << " = vec4(1.0);\n";
+	else
+		output << dstVec4Var << " = vec4(RES, RES, RES, 1.0);\n";
+}
+
+string genFragmentShader (const ShaderCaseSpecification& spec)
+{
+	ostringstream	shader;
+	ostringstream	setup;
+	int				curInLoc	= 0;
+
+	shader << glu::getGLSLVersionDeclaration(spec.targetVersion) << "\n";
+
+	shader << "precision highp float;\n";
+	shader << "precision highp int;\n";
+	shader << "\n";
+
+	shader << "layout(location = 0) out mediump vec4 dEQP_FragColor;\n";
+	shader << "\n";
+
+	genCompareFunctions(shader, spec.values, false);
+	shader << "\n";
+
+	// Declarations (varying, reference for each output).
+	for (size_t ndx = 0; ndx < spec.values.outputs.size(); ndx++)
+	{
+		const Value&		val					= spec.values.outputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const char*	const	valueTypeStr		= getDataTypeName(valueType);
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		shader << "layout(location = " << curInLoc << ") flat in " << transportTypeStr << " " << (valueType != transportType ? "v_" : "") << val.name << ";\n";
+
+		if (valueType != transportType)
+			setup << "	" << valueTypeStr << " " << val.name << " = " << valueTypeStr << "(v_" << val.name << ");\n";
+
+		curInLoc += numLocs;
+	}
+
+	declareReferenceBlock(shader, spec.values);
+
+	shader << "\n";
+	shader << "void main()\n";
+	shader << "{\n";
+
+	shader << setup.str();
+
+	shader << "	";
+	genCompareOp(shader, "dEQP_FragColor", spec.values, DE_NULL);
+
+	shader << "}\n";
+	return shader.str();
+}
+
+// Specialize a shader for the vertex shader test case.
+string specializeVertexShader (const ShaderCaseSpecification& spec, const string& src)
+{
+	ostringstream		decl;
+	ostringstream		setup;
+	ostringstream		output;
+	int					curInputLoc		= 0;
+	int					curOutputLoc	= 0;
+
+	// generated from "both" case
+	DE_ASSERT(spec.caseType == glu::sl::CASETYPE_VERTEX_ONLY);
+
+	// Output (write out position).
+	output << "gl_Position = dEQP_Position;\n";
+
+	// Declarations (position + attribute for each input, varying for each output).
+	decl << "layout(location = 0) in highp vec4 dEQP_Position;\n";
+	curInputLoc += 1;
+
+	for (size_t ndx = 0; ndx < spec.values.inputs.size(); ndx++)
+	{
+		const Value&		val					= spec.values.inputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const char*	const	valueTypeStr		= getDataTypeName(valueType);
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		decl << "layout(location = " << curInputLoc << ") in ";
+
+		curInputLoc += numLocs;
+
+		if (valueType == transportType)
+			decl << transportTypeStr << " " << val.name << ";\n";
+		else
+		{
+			decl << transportTypeStr << " a_" << val.name << ";\n";
+			setup << valueTypeStr << " " << val.name << " = " << valueTypeStr << "(a_" << val.name << ");\n";
+		}
+	}
+
+	declareUniforms(decl, spec.values);
+
+	for (size_t ndx = 0; ndx < spec.values.outputs.size(); ndx++)
+	{
+		const Value&		val					= spec.values.outputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const char*	const	valueTypeStr		= getDataTypeName(valueType);
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		decl << "layout(location = " << curOutputLoc << ") flat out ";
+
+		curOutputLoc += numLocs;
+
+		if (valueType == transportType)
+			decl << transportTypeStr << " " << val.name << ";\n";
+		else
+		{
+			decl << transportTypeStr << " v_" << val.name << ";\n";
+			decl << valueTypeStr << " " << val.name << ";\n";
+
+			output << "v_" << val.name << " = " << transportTypeStr << "(" << val.name << ");\n";
+		}
+	}
+
+	// Shader specialization.
+	map<string, string> params;
+	params.insert(pair<string, string>("DECLARATIONS", decl.str()));
+	params.insert(pair<string, string>("SETUP", setup.str()));
+	params.insert(pair<string, string>("OUTPUT", output.str()));
+	params.insert(pair<string, string>("POSITION_FRAG_COLOR", "gl_Position"));
+
+	StringTemplate	tmpl	(src);
+	const string	baseSrc	= tmpl.specialize(params);
+	const string	withExt	= injectExtensionRequirements(baseSrc, spec.programs[0].requiredExtensions, glu::SHADERTYPE_VERTEX);
+
+	return withExt;
+}
+
+// Specialize a shader for the fragment shader test case.
+string specializeFragmentShader (const ShaderCaseSpecification& spec, const string& src)
+{
+	ostringstream		decl;
+	ostringstream		setup;
+	ostringstream		output;
+	int					curInputLoc	= 0;
+
+	// generated from "both" case
+	DE_ASSERT(spec.caseType == glu::sl::CASETYPE_FRAGMENT_ONLY);
+
+	genCompareFunctions(decl, spec.values, false);
+	genCompareOp(output, "dEQP_FragColor", spec.values, DE_NULL);
+
+	decl << "layout(location = 0) out mediump vec4 dEQP_FragColor;\n";
+
+	for (size_t ndx = 0; ndx < spec.values.inputs.size(); ndx++)
+	{
+		const Value&		val					= spec.values.inputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const char*	const	valueTypeStr		= getDataTypeName(valueType);
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		decl << "layout(location = " << curInputLoc << ") flat in ";
+
+		curInputLoc += numLocs;
+
+		if (valueType == transportType)
+			decl << transportTypeStr << " " << val.name << ";\n";
+		else
+		{
+			decl << transportTypeStr << " v_" << val.name << ";\n";
+			setup << valueTypeStr << " " << val.name << " = " << valueTypeStr << "(v_" << val.name << ");\n";
+		}
+	}
+
+	declareUniforms(decl, spec.values);
+	declareReferenceBlock(decl, spec.values);
+
+	for (size_t ndx = 0; ndx < spec.values.outputs.size(); ndx++)
+	{
+		const Value&		val				= spec.values.outputs[ndx];
+		const DataType		basicType		= val.type.getBasicType();
+		const char* const	refTypeStr		= getDataTypeName(basicType);
+
+		decl << refTypeStr << " " << val.name << ";\n";
+	}
+
+	// Shader specialization.
+	map<string, string> params;
+	params.insert(pair<string, string>("DECLARATIONS", decl.str()));
+	params.insert(pair<string, string>("SETUP", setup.str()));
+	params.insert(pair<string, string>("OUTPUT", output.str()));
+	params.insert(pair<string, string>("POSITION_FRAG_COLOR", "dEQP_FragColor"));
+
+	StringTemplate	tmpl	(src);
+	const string	baseSrc	= tmpl.specialize(params);
+	const string	withExt	= injectExtensionRequirements(baseSrc, spec.programs[0].requiredExtensions, glu::SHADERTYPE_FRAGMENT);
+
+	return withExt;
+}
+
+map<string, string> generateVertexSpecialization (const ProgramSpecializationParams& specParams)
+{
+	ostringstream			decl;
+	ostringstream			setup;
+	map<string, string>		params;
+	int						curInputLoc		= 0;
+
+	decl << "layout(location = 0) in highp vec4 dEQP_Position;\n";
+	curInputLoc += 1;
+
+	for (size_t ndx = 0; ndx < specParams.caseSpec.values.inputs.size(); ndx++)
+	{
+		const Value&		val					= specParams.caseSpec.values.inputs[ndx];
+		const DataType		valueType			= val.type.getBasicType();
+		const char*	const	valueTypeStr		= getDataTypeName(valueType);
+		const DataType		transportType		= getTransportType(valueType);
+		const char* const	transportTypeStr	= getDataTypeName(transportType);
+		const int			numLocs				= getNumTransportLocations(valueType);
+
+		decl << "layout(location = " << curInputLoc << ") in ";
+
+		curInputLoc += numLocs;
+
+		if (valueType == transportType)
+			decl << transportTypeStr << " " << val.name << ";\n";
+		else
+		{
+			decl << transportTypeStr << " a_" << val.name << ";\n";
+			setup << valueTypeStr << " " << val.name << " = " << valueTypeStr << "(a_" << val.name << ");\n";
+		}
+	}
+
+	declareUniforms(decl, specParams.caseSpec.values);
+
+	params.insert(pair<string, string>("VERTEX_DECLARATIONS",	decl.str()));
+	params.insert(pair<string, string>("VERTEX_SETUP",			setup.str()));
+	params.insert(pair<string, string>("VERTEX_OUTPUT",			string("gl_Position = dEQP_Position;\n")));
+
+	return params;
+}
+
+map<string, string> generateFragmentSpecialization (const ProgramSpecializationParams& specParams)
+{
+	ostringstream		decl;
+	ostringstream		output;
+	map<string, string>	params;
+
+	genCompareFunctions(decl, specParams.caseSpec.values, false);
+	genCompareOp(output, "dEQP_FragColor", specParams.caseSpec.values, DE_NULL);
+
+	decl << "layout(location = 0) out mediump vec4 dEQP_FragColor;\n";
+
+	for (size_t ndx = 0; ndx < specParams.caseSpec.values.outputs.size(); ndx++)
+	{
+		const Value&		val			= specParams.caseSpec.values.outputs[ndx];
+		const char*	const	refTypeStr	= getDataTypeName(val.type.getBasicType());
+
+		decl << refTypeStr << " " << val.name << ";\n";
+	}
+
+	declareReferenceBlock(decl, specParams.caseSpec.values);
+	declareUniforms(decl, specParams.caseSpec.values);
+
+	params.insert(pair<string, string>("FRAGMENT_DECLARATIONS",	decl.str()));
+	params.insert(pair<string, string>("FRAGMENT_OUTPUT",		output.str()));
+	params.insert(pair<string, string>("FRAG_COLOR",			"dEQP_FragColor"));
+
+	return params;
+}
+
+map<string, string> generateGeometrySpecialization (const ProgramSpecializationParams& specParams)
+{
+	ostringstream		decl;
+	map<string, string>	params;
+
+	decl << "layout (triangles) in;\n";
+	decl << "layout (triangle_strip, max_vertices=3) out;\n";
+	decl << "\n";
+
+	declareUniforms(decl, specParams.caseSpec.values);
+
+	params.insert(pair<string, string>("GEOMETRY_DECLARATIONS",		decl.str()));
+
+	return params;
+}
+
+map<string, string> generateTessControlSpecialization (const ProgramSpecializationParams& specParams)
+{
+	ostringstream		decl;
+	ostringstream		output;
+	map<string, string>	params;
+
+	decl << "layout (vertices=3) out;\n";
+	decl << "\n";
+
+	declareUniforms(decl, specParams.caseSpec.values);
+
+	output <<	"gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
+				"gl_TessLevelInner[0] = 2.0;\n"
+				"gl_TessLevelInner[1] = 2.0;\n"
+				"gl_TessLevelOuter[0] = 2.0;\n"
+				"gl_TessLevelOuter[1] = 2.0;\n"
+				"gl_TessLevelOuter[2] = 2.0;\n"
+				"gl_TessLevelOuter[3] = 2.0;";
+
+	params.insert(pair<string, string>("TESSELLATION_CONTROL_DECLARATIONS",	decl.str()));
+	params.insert(pair<string, string>("TESSELLATION_CONTROL_OUTPUT",		output.str()));
+	params.insert(pair<string, string>("GL_MAX_PATCH_VERTICES",				de::toString(specParams.maxPatchVertices)));
+
+	return params;
+}
+
+map<string, string> generateTessEvalSpecialization (const ProgramSpecializationParams& specParams)
+{
+	ostringstream		decl;
+	ostringstream		output;
+	map<string, string>	params;
+
+	decl << "layout (triangles) in;\n";
+	decl << "\n";
+
+	declareUniforms(decl, specParams.caseSpec.values);
+
+	output <<	"gl_Position = gl_TessCoord[0] * gl_in[0].gl_Position + gl_TessCoord[1] * gl_in[1].gl_Position + gl_TessCoord[2] * gl_in[2].gl_Position;\n";
+
+	params.insert(pair<string, string>("TESSELLATION_EVALUATION_DECLARATIONS",	decl.str()));
+	params.insert(pair<string, string>("TESSELLATION_EVALUATION_OUTPUT",		output.str()));
+	params.insert(pair<string, string>("GL_MAX_PATCH_VERTICES",					de::toString(specParams.maxPatchVertices)));
+
+	return params;
+}
+
+void specializeShaderSources (ProgramSources&						dst,
+							  const ProgramSources&					src,
+							  const ProgramSpecializationParams&	specParams,
+							  glu::ShaderType						shaderType,
+							  map<string, string>					(*specializationGenerator) (const ProgramSpecializationParams& specParams))
+{
+	if (!src.sources[shaderType].empty())
+	{
+		const map<string, string>	tmplParams	= specializationGenerator(specParams);
+
+		for (size_t ndx = 0; ndx < src.sources[shaderType].size(); ++ndx)
+		{
+			const StringTemplate	tmpl			(src.sources[shaderType][ndx]);
+			const string			baseGLSLCode	= tmpl.specialize(tmplParams);
+			const string			sourceWithExts	= injectExtensionRequirements(baseGLSLCode, specParams.requiredExtensions, shaderType);
+
+			dst << glu::ShaderSource(shaderType, sourceWithExts);
+		}
+	}
+}
+
+void specializeProgramSources (glu::ProgramSources&					dst,
+							   const glu::ProgramSources&			src,
+							   const ProgramSpecializationParams&	specParams)
+{
+	specializeShaderSources(dst, src, specParams, glu::SHADERTYPE_VERTEX,					generateVertexSpecialization);
+	specializeShaderSources(dst, src, specParams, glu::SHADERTYPE_FRAGMENT,					generateFragmentSpecialization);
+	specializeShaderSources(dst, src, specParams, glu::SHADERTYPE_GEOMETRY,					generateGeometrySpecialization);
+	specializeShaderSources(dst, src, specParams, glu::SHADERTYPE_TESSELLATION_CONTROL,		generateTessControlSpecialization);
+	specializeShaderSources(dst, src, specParams, glu::SHADERTYPE_TESSELLATION_EVALUATION,	generateTessEvalSpecialization);
+
+	dst << glu::ProgramSeparable(src.separable);
+}
+
+struct ValueBufferLayout
+{
+	struct Entry
+	{
+		int		offset;
+		int		vecStride;	//! Applies to matrices only
+
+		Entry (void) : offset(0), vecStride(0) {}
+		Entry (int offset_, int vecStride_) : offset(offset_), vecStride(vecStride_) {}
+	};
+
+	vector<Entry>	entries;
+	int				size;
+
+	ValueBufferLayout (void) : size(0) {}
+};
+
+ValueBufferLayout computeStd140Layout (const vector<Value>& values)
+{
+	ValueBufferLayout layout;
+
+	layout.entries.resize(values.size());
+
+	for (size_t ndx = 0; ndx < values.size(); ++ndx)
+	{
+		const DataType	basicType	= values[ndx].type.getBasicType();
+		const bool		isMatrix	= isDataTypeMatrix(basicType);
+		const int		numVecs		= isMatrix ? getDataTypeMatrixNumColumns(basicType) : 1;
+		const DataType	vecType		= isMatrix ? glu::getDataTypeFloatVec(getDataTypeMatrixNumRows(basicType)) : basicType;
+		const int		vecSize		= getDataTypeScalarSize(vecType);
+		const int		alignment	= ((isMatrix || vecSize == 3) ? 4 : vecSize)*int(sizeof(deUint32));
+
+		layout.size			= deAlign32(layout.size, alignment);
+		layout.entries[ndx] = ValueBufferLayout::Entry(layout.size, alignment);
+		layout.size			+= alignment*(numVecs-1) + vecSize*int(sizeof(deUint32));
+	}
+
+	return layout;
+}
+
+ValueBufferLayout computeStd430Layout (const vector<Value>& values)
+{
+	ValueBufferLayout layout;
+
+	layout.entries.resize(values.size());
+
+	for (size_t ndx = 0; ndx < values.size(); ++ndx)
+	{
+		const DataType	basicType	= values[ndx].type.getBasicType();
+		const int		numVecs		= isDataTypeMatrix(basicType) ? getDataTypeMatrixNumColumns(basicType) : 1;
+		const DataType	vecType		= isDataTypeMatrix(basicType) ? glu::getDataTypeFloatVec(getDataTypeMatrixNumRows(basicType)) : basicType;
+		const int		vecSize		= getDataTypeScalarSize(vecType);
+		const int		alignment	= (vecSize == 3 ? 4 : vecSize)*int(sizeof(deUint32));
+
+		layout.size			= deAlign32(layout.size, alignment);
+		layout.entries[ndx] = ValueBufferLayout::Entry(layout.size, alignment);
+		layout.size			+= alignment*(numVecs-1) + vecSize*int(sizeof(deUint32));
+	}
+
+	return layout;
+}
+
+void copyToLayout (void* dst, const ValueBufferLayout::Entry& entryLayout, const Value& value, int arrayNdx)
+{
+	const DataType	basicType	= value.type.getBasicType();
+	const int		scalarSize	= getDataTypeScalarSize(basicType);
+	const int		numVecs		= isDataTypeMatrix(basicType) ? getDataTypeMatrixNumColumns(basicType) : 1;
+	const int		numComps	= isDataTypeMatrix(basicType) ? getDataTypeMatrixNumRows(basicType) : scalarSize;
+
+	DE_ASSERT(size_t((arrayNdx+1)*scalarSize) <= value.elements.size());
+
+	if (isDataTypeBoolOrBVec(basicType))
+	{
+		for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+		{
+			for (int compNdx = 0; compNdx < numComps; compNdx++)
+			{
+				const deUint32 data = value.elements[arrayNdx*scalarSize + vecNdx*numComps + compNdx].bool32 ? ~0u : 0u;
+
+				deMemcpy((deUint8*)dst + entryLayout.offset + vecNdx*entryLayout.vecStride + compNdx * sizeof(deUint32),
+						 &data,
+						 sizeof(deUint32));
+			}
+		}
+	}
+	else
+	{
+		for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+			deMemcpy((deUint8*)dst + entryLayout.offset + vecNdx*entryLayout.vecStride,
+					 &value.elements[arrayNdx*scalarSize + vecNdx*numComps],
+					 numComps*sizeof(deUint32));
+	}
+}
+
+void copyToLayout (void* dst, const ValueBufferLayout& layout, const vector<Value>& values, int arrayNdx)
+{
+	DE_ASSERT(layout.entries.size() == values.size());
+
+	for (size_t ndx = 0; ndx < values.size(); ndx++)
+		copyToLayout(dst, layout.entries[ndx], values[ndx], arrayNdx);
+}
+
+deUint32 getShaderStages (const ShaderCaseSpecification& spec)
+{
+	if (spec.caseType == glu::sl::CASETYPE_COMPLETE)
+	{
+		deUint32	stages	= 0u;
+
+		for (size_t progNdx = 0; progNdx < spec.programs.size(); progNdx++)
+		{
+			for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+			{
+				if (!spec.programs[progNdx].sources.sources[shaderType].empty())
+					stages |= (1u << shaderType);
+			}
+		}
+
+		return stages;
+	}
+	else
+		return (1u << glu::SHADERTYPE_VERTEX) | (1u << glu::SHADERTYPE_FRAGMENT);
+}
+
+class PipelineProgram
+{
+public:
+								PipelineProgram		(Context& context, const ShaderCaseSpecification& spec);
+
+	deUint32					getStages			(void) const					{ return m_stages;							}
+
+	bool						hasShader			(glu::ShaderType type) const	{ return (m_stages & (1u << type)) != 0;	}
+	vk::VkShaderModule			getShader			(glu::ShaderType type) const	{ return *m_shaderModules[type];			}
+
+private:
+	const deUint32				m_stages;
+	Move<vk::VkShaderModule>	m_shaderModules[glu::SHADERTYPE_LAST];
+};
+
+PipelineProgram::PipelineProgram (Context& context, const ShaderCaseSpecification& spec)
+	: m_stages(getShaderStages(spec))
+{
+	// \note Currently only a single source program is supported as framework lacks SPIR-V linking capability
+	TCU_CHECK_INTERNAL(spec.programs.size() == 1);
+
+	for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+	{
+		if ((m_stages & (1u << shaderType)) != 0)
+		{
+			m_shaderModules[shaderType]	= vk::createShaderModule(context.getDeviceInterface(), context.getDevice(),
+																 context.getBinaryCollection().get(getShaderName((glu::ShaderType)shaderType, 0)), 0u);
+		}
+	}
+}
+
+vector<vk::VkPipelineShaderStageCreateInfo> getPipelineShaderStageCreateInfo (const PipelineProgram& program)
+{
+	vector<vk::VkPipelineShaderStageCreateInfo>	infos;
+
+	for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+	{
+		if (program.hasShader((glu::ShaderType)shaderType))
+		{
+			const vk::VkPipelineShaderStageCreateInfo info =
+			{
+				vk::VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,	// sType
+				DE_NULL,													// pNext
+				(vk::VkPipelineShaderStageCreateFlags)0,
+				vk::getVkShaderStage((glu::ShaderType)shaderType),			// stage
+				program.getShader((glu::ShaderType)shaderType),				// module
+				"main",
+				DE_NULL,													// pSpecializationInfo
+			};
+
+			infos.push_back(info);
+		}
+	}
+
+	return infos;
+}
+
+Move<vk::VkBuffer> createBuffer (Context& context, vk::VkDeviceSize size, vk::VkBufferUsageFlags usageFlags)
+{
+	const deUint32					queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	const vk::VkBufferCreateInfo	params				=
+	{
+		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,	// sType
+		DE_NULL,									// pNext
+		0u,											// flags
+		size,										// size
+		usageFlags,									// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,				// sharingMode
+		1u,											// queueFamilyCount
+		&queueFamilyIndex,							// pQueueFamilyIndices
+	};
+
+	return vk::createBuffer(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkImage> createImage2D (Context& context, deUint32 width, deUint32 height, vk::VkFormat format, vk::VkImageTiling tiling, vk::VkImageUsageFlags usageFlags)
+{
+	const deUint32					queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	const vk::VkImageCreateInfo		params				=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,	// sType
+		DE_NULL,									// pNext
+		0u,											// flags
+		vk::VK_IMAGE_TYPE_2D,						// imageType
+		format,										// format
+		{ width, height, 1u },						// extent
+		1u,											// mipLevels
+		1u,											// arraySize
+		vk::VK_SAMPLE_COUNT_1_BIT,					// samples
+		tiling,										// tiling
+		usageFlags,									// usage
+		vk::VK_SHARING_MODE_EXCLUSIVE,				// sharingMode
+		1u,											// queueFamilyCount
+		&queueFamilyIndex,							// pQueueFamilyIndices
+		vk::VK_IMAGE_LAYOUT_UNDEFINED,				// initialLayout
+	};
+
+	return vk::createImage(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkImageView> createAttachmentView (Context& context, vk::VkImage image, vk::VkFormat format)
+{
+	const vk::VkImageViewCreateInfo	params				=
+	{
+		vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,		// sType
+		DE_NULL,											// pNext
+		0u,													// flags
+		image,												// image
+		vk::VK_IMAGE_VIEW_TYPE_2D,							// viewType
+		format,												// format
+		vk::makeComponentMappingRGBA(),						// channels
+		{
+			vk::VK_IMAGE_ASPECT_COLOR_BIT,						// aspectMask
+			0u,													// baseMipLevel
+			1u,													// mipLevels
+			0u,													// baseArrayLayer
+			1u,													// arraySize
+		},													// subresourceRange
+	};
+
+	return vk::createImageView(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkRenderPass> createRenderPass (Context& context, vk::VkFormat colorAttFormat)
+{
+	const vk::VkAttachmentDescription	colorAttDesc		=
+	{
+		0u,														// flags
+		colorAttFormat,											// format
+		vk::VK_SAMPLE_COUNT_1_BIT,								// samples
+		vk::VK_ATTACHMENT_LOAD_OP_CLEAR,						// loadOp
+		vk::VK_ATTACHMENT_STORE_OP_STORE,						// storeOp
+		vk::VK_ATTACHMENT_LOAD_OP_DONT_CARE,					// stencilLoadOp
+		vk::VK_ATTACHMENT_STORE_OP_DONT_CARE,					// stencilStoreOp
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// initialLayout
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// finalLayout
+	};
+	const vk::VkAttachmentReference		colorAttRef			=
+	{
+		0u,														// attachment
+		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,			// layout
+	};
+	const vk::VkAttachmentReference		dsAttRef			=
+	{
+		vk::VK_NO_ATTACHMENT,									// attachment
+		vk::VK_IMAGE_LAYOUT_GENERAL,							// layout
+	};
+	const vk::VkSubpassDescription		subpassDesc			=
+	{
+		(vk::VkSubpassDescriptionFlags)0,
+		vk::VK_PIPELINE_BIND_POINT_GRAPHICS,					// pipelineBindPoint
+		0u,														// inputCount
+		DE_NULL,												// pInputAttachments
+		1u,														// colorCount
+		&colorAttRef,											// pColorAttachments
+		DE_NULL,												// pResolveAttachments
+		&dsAttRef,												// depthStencilAttachment
+		0u,														// preserveCount
+		DE_NULL,												// pPreserveAttachments
+
+	};
+	const vk::VkRenderPassCreateInfo	renderPassParams	=
+	{
+		vk::VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,			// sType
+		DE_NULL,												// pNext
+		(vk::VkRenderPassCreateFlags)0,
+		1u,														// attachmentCount
+		&colorAttDesc,											// pAttachments
+		1u,														// subpassCount
+		&subpassDesc,											// pSubpasses
+		0u,														// dependencyCount
+		DE_NULL,												// pDependencies
+	};
+
+	return vk::createRenderPass(context.getDeviceInterface(), context.getDevice(), &renderPassParams);
+}
+
+vk::VkShaderStageFlags getVkStageFlags (deUint32 stages)
+{
+	vk::VkShaderStageFlags	vkStages	= 0u;
+
+	for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+	{
+		if ((stages & (1u << shaderType)) != 0)
+			vkStages |= vk::getVkShaderStage((glu::ShaderType)shaderType);
+	}
+
+	return vkStages;
+}
+
+Move<vk::VkDescriptorSetLayout> createDescriptorSetLayout (Context& context, deUint32 shaderStages)
+{
+	DE_STATIC_ASSERT(REFERENCE_UNIFORM_BINDING	== 0);
+	DE_STATIC_ASSERT(USER_UNIFORM_BINDING		== 1);
+
+	return vk::DescriptorSetLayoutBuilder()
+				.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, vk::VK_SHADER_STAGE_FRAGMENT_BIT)
+				.addSingleBinding(vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, getVkStageFlags(shaderStages))
+				.build(context.getDeviceInterface(), context.getDevice());
+}
+
+Move<vk::VkPipelineLayout> createPipelineLayout (Context& context, vk::VkDescriptorSetLayout descriptorSetLayout)
+{
+	const vk::VkPipelineLayoutCreateInfo	params	=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,	// sType
+		DE_NULL,											// pNext
+		(vk::VkPipelineLayoutCreateFlags)0,
+		1u,													// descriptorSetCount
+		&descriptorSetLayout,								// pSetLayouts
+		0u,													// pushConstantRangeCount
+		DE_NULL,											// pPushConstantRanges
+	};
+
+	return vk::createPipelineLayout(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+vk::VkFormat getVecFormat (DataType scalarType, int scalarSize)
+{
+	switch (scalarType)
+	{
+		case glu::TYPE_FLOAT:
+		{
+			const vk::VkFormat vecFmts[] =
+			{
+				vk::VK_FORMAT_R32_SFLOAT,
+				vk::VK_FORMAT_R32G32_SFLOAT,
+				vk::VK_FORMAT_R32G32B32_SFLOAT,
+				vk::VK_FORMAT_R32G32B32A32_SFLOAT,
+			};
+			return de::getSizedArrayElement<4>(vecFmts, scalarSize-1);
+		}
+
+		case glu::TYPE_INT:
+		{
+			const vk::VkFormat vecFmts[] =
+			{
+				vk::VK_FORMAT_R32_SINT,
+				vk::VK_FORMAT_R32G32_SINT,
+				vk::VK_FORMAT_R32G32B32_SINT,
+				vk::VK_FORMAT_R32G32B32A32_SINT,
+			};
+			return de::getSizedArrayElement<4>(vecFmts, scalarSize-1);
+		}
+
+		case glu::TYPE_UINT:
+		{
+			const vk::VkFormat vecFmts[] =
+			{
+				vk::VK_FORMAT_R32_UINT,
+				vk::VK_FORMAT_R32G32_UINT,
+				vk::VK_FORMAT_R32G32B32_UINT,
+				vk::VK_FORMAT_R32G32B32A32_UINT,
+			};
+			return de::getSizedArrayElement<4>(vecFmts, scalarSize-1);
+		}
+
+		case glu::TYPE_BOOL:
+		{
+			const vk::VkFormat vecFmts[] =
+			{
+				vk::VK_FORMAT_R32_UINT,
+				vk::VK_FORMAT_R32G32_UINT,
+				vk::VK_FORMAT_R32G32B32_UINT,
+				vk::VK_FORMAT_R32G32B32A32_UINT,
+			};
+			return de::getSizedArrayElement<4>(vecFmts, scalarSize-1);
+		}
+
+		default:
+			DE_FATAL("Unknown scalar type");
+			return vk::VK_FORMAT_R8G8B8A8_UINT;
+	}
+}
+
+vector<vk::VkVertexInputAttributeDescription> getVertexAttributeDescriptions (const vector<Value>& inputValues, const ValueBufferLayout& layout)
+{
+	vector<vk::VkVertexInputAttributeDescription>	attribs;
+
+	// Position
+	{
+		const vk::VkVertexInputAttributeDescription	posDesc	=
+		{
+			0u,								// location
+			0u,								// binding
+			vk::VK_FORMAT_R32G32_SFLOAT,	// format
+			0u,								// offset
+		};
+
+		attribs.push_back(posDesc);
+	}
+
+	// Input values
+	for (size_t inputNdx = 0; inputNdx < inputValues.size(); inputNdx++)
+	{
+		const Value&					input		= inputValues[inputNdx];
+		const ValueBufferLayout::Entry&	layoutEntry	= layout.entries[inputNdx];
+		const DataType					basicType	= input.type.getBasicType();
+		const int						numVecs		= isDataTypeMatrix(basicType)
+													? getDataTypeMatrixNumColumns(basicType)
+													: 1;
+		const int						vecSize		= isDataTypeMatrix(basicType)
+													? getDataTypeMatrixNumRows(basicType)
+													: getDataTypeScalarSize(basicType);
+		const DataType					scalarType	= getDataTypeScalarType(basicType);
+		const vk::VkFormat				vecFmt		= getVecFormat(scalarType, vecSize);
+
+		for (int vecNdx = 0; vecNdx < numVecs; vecNdx++)
+		{
+			const deUint32								curLoc	= (deUint32)attribs.size();
+			const deUint32								offset	= (deUint32)(layoutEntry.offset + layoutEntry.vecStride*vecNdx);
+			const vk::VkVertexInputAttributeDescription	desc	=
+			{
+				curLoc,		// location
+				1u,			// binding
+				vecFmt,		// format
+				offset,		// offset
+			};
+
+			attribs.push_back(desc);
+		}
+	}
+
+	return attribs;
+}
+
+Move<vk::VkPipeline> createPipeline (Context&					context,
+									 const vector<Value>&		inputValues,
+									 const ValueBufferLayout&	inputLayout,
+									 const PipelineProgram&		program,
+									 vk::VkRenderPass			renderPass,
+									 vk::VkPipelineLayout		pipelineLayout,
+									 tcu::UVec2					renderSize)
+{
+	const vector<vk::VkPipelineShaderStageCreateInfo>	shaderStageParams		(getPipelineShaderStageCreateInfo(program));
+	const vector<vk::VkVertexInputAttributeDescription>	vertexAttribParams		(getVertexAttributeDescriptions(inputValues, inputLayout));
+	const vk::VkPipelineDepthStencilStateCreateInfo		depthStencilParams		=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,		// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineDepthStencilStateCreateFlags)0,
+		vk::VK_FALSE,														// depthTestEnable
+		vk::VK_FALSE,														// depthWriteEnable
+		vk::VK_COMPARE_OP_ALWAYS,											// depthCompareOp
+		vk::VK_FALSE,														// depthBoundsTestEnable
+		vk::VK_FALSE,														// stencilTestEnable
+		{
+			vk::VK_STENCIL_OP_KEEP,												// stencilFailOp;
+			vk::VK_STENCIL_OP_KEEP,												// stencilPassOp;
+			vk::VK_STENCIL_OP_KEEP,												// stencilDepthFailOp;
+			vk::VK_COMPARE_OP_ALWAYS,											// stencilCompareOp;
+			0u,																	// stencilCompareMask
+			0u,																	// stencilWriteMask
+			0u,																	// stencilReference
+		},																	// front;
+		{
+			vk::VK_STENCIL_OP_KEEP,												// stencilFailOp;
+			vk::VK_STENCIL_OP_KEEP,												// stencilPassOp;
+			vk::VK_STENCIL_OP_KEEP,												// stencilDepthFailOp;
+			vk::VK_COMPARE_OP_ALWAYS,											// stencilCompareOp;
+			0u,																	// stencilCompareMask
+			0u,																	// stencilWriteMask
+			0u,																	// stencilReference
+		},																	// back;
+		-1.0f,																// minDepthBounds
+		+1.0f,																// maxDepthBounds
+	};
+	const vk::VkViewport								viewport0				=
+	{
+		0.0f,																// originX
+		0.0f,																// originY
+		(float)renderSize.x(),												// width
+		(float)renderSize.y(),												// height
+		0.0f,																// minDepth
+		1.0f,																// maxDepth
+	};
+	const vk::VkRect2D									scissor0				=
+	{
+		{ 0u, 0u },															// offset
+		{ renderSize.x(), renderSize.y() }									// extent
+	};
+	const vk::VkPipelineViewportStateCreateInfo			viewportParams			=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,			// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineViewportStateCreateFlags)0,
+		1u,																	// viewportCount
+		&viewport0,															// pViewports
+		1u,																	// scissorCount
+		&scissor0,															// pScissors
+	};
+	const vk::VkPipelineMultisampleStateCreateInfo		multisampleParams		=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,		// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineMultisampleStateCreateFlags)0,
+		vk::VK_SAMPLE_COUNT_1_BIT,											// rasterSamples
+		DE_FALSE,															// sampleShadingEnable
+		0.0f,																// minSampleShading
+		DE_NULL,															// pSampleMask
+		vk::VK_FALSE,														// alphaToCoverageEnable
+		vk::VK_FALSE,														// alphaToOneEnable
+	};
+	const vk::VkPipelineRasterizationStateCreateInfo	rasterParams			=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,		// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineRasterizationStateCreateFlags)0,
+		DE_TRUE,															// depthClipEnable
+		DE_FALSE,															// rasterizerDiscardEnable
+		vk::VK_POLYGON_MODE_FILL,											// fillMode
+		vk::VK_CULL_MODE_NONE,												// cullMode;
+		vk::VK_FRONT_FACE_COUNTER_CLOCKWISE,								// frontFace;
+		vk::VK_FALSE,														// depthBiasEnable
+		0.0f,																// depthBiasConstantFactor
+		0.0f,																// depthBiasClamp
+		0.0f,																// depthBiasSlopeFactor
+		1.0f,																// lineWidth
+	};
+	const vk::VkPipelineInputAssemblyStateCreateInfo	inputAssemblyParams		=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,	// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineInputAssemblyStateCreateFlags)0,
+		vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,							// topology
+		DE_FALSE,															// primitiveRestartEnable
+	};
+	const vk::VkVertexInputBindingDescription			vertexBindings[]		=
+	{
+		{
+			0u,																	// binding
+			(deUint32)sizeof(tcu::Vec2),										// stride
+			vk::VK_VERTEX_INPUT_RATE_VERTEX,									// stepRate
+		},
+		{
+			1u,																	// binding
+			0u,																	// stride
+			vk::VK_VERTEX_INPUT_RATE_INSTANCE,									// stepRate
+		},
+	};
+	const vk::VkPipelineVertexInputStateCreateInfo		vertexInputStateParams	=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,		// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineVertexInputStateCreateFlags)0,
+		DE_LENGTH_OF_ARRAY(vertexBindings),									// bindingCount
+		vertexBindings,														// pVertexBindingDescriptions
+		(deUint32)vertexAttribParams.size(),								// attributeCount
+		&vertexAttribParams[0],												// pVertexAttributeDescriptions
+	};
+	const vk::VkColorComponentFlags						allCompMask				= vk::VK_COLOR_COMPONENT_R_BIT
+																				| vk::VK_COLOR_COMPONENT_G_BIT
+																				| vk::VK_COLOR_COMPONENT_B_BIT
+																				| vk::VK_COLOR_COMPONENT_A_BIT;
+	const vk::VkPipelineColorBlendAttachmentState		attBlendParams			=
+	{
+		vk::VK_FALSE,														// blendEnable
+		vk::VK_BLEND_FACTOR_ONE,											// srcBlendColor
+		vk::VK_BLEND_FACTOR_ZERO,											// destBlendColor
+		vk::VK_BLEND_OP_ADD,												// blendOpColor
+		vk::VK_BLEND_FACTOR_ONE,											// srcBlendAlpha
+		vk::VK_BLEND_FACTOR_ZERO,											// destBlendAlpha
+		vk::VK_BLEND_OP_ADD,												// blendOpAlpha
+		allCompMask,														// componentWriteMask
+	};
+	const vk::VkPipelineColorBlendStateCreateInfo		blendParams				=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,		// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineColorBlendStateCreateFlags)0,
+		vk::VK_FALSE,														// logicOpEnable
+		vk::VK_LOGIC_OP_COPY,												// logicOp
+		1u,																	// attachmentCount
+		&attBlendParams,													// pAttachments
+		{ 0.0f, 0.0f, 0.0f, 0.0f },											// blendConstants
+	};
+	const vk::VkPipelineDynamicStateCreateInfo			dynStateParams			=
+	{
+		vk::VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,			// sType
+		DE_NULL,															// pNext
+		(vk::VkPipelineDynamicStateCreateFlags)0,
+		0u,																	// dynamicStateCount
+		DE_NULL,															// pDynamicStates
+	};
+	const vk::VkGraphicsPipelineCreateInfo				pipelineParams			=
+	{
+		vk::VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,				// sType
+		DE_NULL,															// pNext
+		0u,																	// flags
+		(deUint32)shaderStageParams.size(),									// stageCount
+		&shaderStageParams[0],												// pStages
+		&vertexInputStateParams,											// pVertexInputState
+		&inputAssemblyParams,												// pInputAssemblyState
+		DE_NULL,															// pTessellationState
+		&viewportParams,													// pViewportState
+		&rasterParams,														// pRasterState
+		&multisampleParams,													// pMultisampleState
+		&depthStencilParams,												// pDepthStencilState
+		&blendParams,														// pColorBlendState
+		&dynStateParams,													// pDynamicState
+		pipelineLayout,														// layout
+		renderPass,															// renderPass
+		0u,																	// subpass
+		DE_NULL,															// basePipelineHandle
+		0u,																	// basePipelineIndex
+	};
+
+	return vk::createGraphicsPipeline(context.getDeviceInterface(), context.getDevice(), DE_NULL, &pipelineParams);
+}
+
+Move<vk::VkFramebuffer> createFramebuffer (Context& context, vk::VkRenderPass renderPass, vk::VkImageView colorAttView, int width, int height)
+{
+	const vk::VkFramebufferCreateInfo	framebufferParams	=
+	{
+		vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,	// sType
+		DE_NULL,										// pNext
+		(vk::VkFramebufferCreateFlags)0,
+		renderPass,										// renderPass
+		1u,												// attachmentCount
+		&colorAttView,									// pAttachments
+		(deUint32)width,								// width
+		(deUint32)height,								// height
+		1u,												// layers
+	};
+
+	return vk::createFramebuffer(context.getDeviceInterface(), context.getDevice(), &framebufferParams);
+}
+
+Move<vk::VkCommandPool> createCommandPool (Context& context)
+{
+	const deUint32						queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+	const vk::VkCommandPoolCreateInfo	params				=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,		// sType
+		DE_NULL,											// pNext
+		(vk::VkCommandPoolCreateFlags)0,
+		queueFamilyIndex,									// queueFamilyIndex
+	};
+
+	return vk::createCommandPool(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkDescriptorPool> createDescriptorPool (Context& context)
+{
+	return vk::DescriptorPoolBuilder()
+				.addType(vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 2u)
+				.build(context.getDeviceInterface(), context.getDevice(), vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
+}
+
+Move<vk::VkDescriptorSet> allocateDescriptorSet (Context& context, vk::VkDescriptorPool descriptorPool, vk::VkDescriptorSetLayout setLayout)
+{
+	const vk::VkDescriptorSetAllocateInfo	params	=
+	{
+		vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+		DE_NULL,
+		descriptorPool,
+		1u,
+		&setLayout
+	};
+
+	return vk::allocateDescriptorSet(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+Move<vk::VkCommandBuffer> allocateCommandBuffer (Context& context, vk::VkCommandPool cmdPool)
+{
+	const vk::VkCommandBufferAllocateInfo	params	=
+	{
+		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,	// sType
+		DE_NULL,											// pNext
+		cmdPool,											// commandPool
+		vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY,				// level
+		1u,													// bufferCount
+	};
+
+	return vk::allocateCommandBuffer(context.getDeviceInterface(), context.getDevice(), &params);
+}
+
+MovePtr<vk::Allocation> allocateAndBindMemory (Context& context, vk::VkBuffer buffer, vk::MemoryRequirement memReqs)
+{
+	const vk::DeviceInterface&		vkd		= context.getDeviceInterface();
+	const vk::VkMemoryRequirements	bufReqs	= vk::getBufferMemoryRequirements(vkd, context.getDevice(), buffer);
+	MovePtr<vk::Allocation>			memory	= context.getDefaultAllocator().allocate(bufReqs, memReqs);
+
+	vkd.bindBufferMemory(context.getDevice(), buffer, memory->getMemory(), memory->getOffset());
+
+	return memory;
+}
+
+MovePtr<vk::Allocation> allocateAndBindMemory (Context& context, vk::VkImage image, vk::MemoryRequirement memReqs)
+{
+	const vk::DeviceInterface&		vkd		= context.getDeviceInterface();
+	const vk::VkMemoryRequirements	imgReqs	= vk::getImageMemoryRequirements(vkd, context.getDevice(), image);
+	MovePtr<vk::Allocation>			memory	= context.getDefaultAllocator().allocate(imgReqs, memReqs);
+
+	vkd.bindImageMemory(context.getDevice(), image, memory->getMemory(), memory->getOffset());
+
+	return memory;
+}
+
+void writeValuesToMem (Context& context, const vk::Allocation& dst, const ValueBufferLayout& layout, const vector<Value>& values, int arrayNdx)
+{
+	copyToLayout(dst.getHostPtr(), layout, values, arrayNdx);
+
+	// \note Buffers are not allocated with coherency / uncached requirement so we need to manually flush CPU write caches
+	flushMappedMemoryRange(context.getDeviceInterface(), context.getDevice(), dst.getMemory(), dst.getOffset(), (vk::VkDeviceSize)layout.size);
+}
+
+class ShaderCaseInstance : public TestInstance
+{
+public:
+													ShaderCaseInstance		(Context& context, const ShaderCaseSpecification& spec);
+													~ShaderCaseInstance		(void);
+
+	TestStatus										iterate					(void);
+
+private:
+	enum
+	{
+		RENDER_WIDTH		= 64,
+		RENDER_HEIGHT		= 64,
+
+		POSITIONS_OFFSET	= 0,
+		POSITIONS_SIZE		= (int)sizeof(Vec2)*4,
+
+		INDICES_OFFSET		= POSITIONS_SIZE,
+		INDICES_SIZE		= (int)sizeof(deUint16)*6,
+
+		TOTAL_POS_NDX_SIZE	= POSITIONS_SIZE+INDICES_SIZE
+	};
+
+	const ShaderCaseSpecification&					m_spec;
+
+	const Unique<vk::VkBuffer>						m_posNdxBuffer;
+	const UniquePtr<vk::Allocation>					m_posNdxMem;
+
+	const ValueBufferLayout							m_inputLayout;
+	const Unique<vk::VkBuffer>						m_inputBuffer;			// Input values (attributes). Can be NULL if no inputs present
+	const UniquePtr<vk::Allocation>					m_inputMem;				// Input memory, can be NULL if no input buffer exists
+
+	const ValueBufferLayout							m_referenceLayout;
+	const Unique<vk::VkBuffer>						m_referenceBuffer;		// Output (reference) values. Can be NULL if no outputs present
+	const UniquePtr<vk::Allocation>					m_referenceMem;			// Output (reference) memory, can be NULL if no reference buffer exists
+
+	const ValueBufferLayout							m_uniformLayout;
+	const Unique<vk::VkBuffer>						m_uniformBuffer;		// Uniform values. Can be NULL if no uniforms present
+	const UniquePtr<vk::Allocation>					m_uniformMem;			// Uniform memory, can be NULL if no uniform buffer exists
+
+	const Unique<vk::VkBuffer>						m_readImageBuffer;
+	const UniquePtr<vk::Allocation>					m_readImageMem;
+
+	const Unique<vk::VkImage>						m_rtImage;
+	const UniquePtr<vk::Allocation>					m_rtMem;
+	const Unique<vk::VkImageView>					m_rtView;
+
+	const Unique<vk::VkRenderPass>					m_renderPass;
+	const Unique<vk::VkFramebuffer>					m_framebuffer;
+	const PipelineProgram							m_program;
+	const Unique<vk::VkDescriptorSetLayout>			m_descriptorSetLayout;
+	const Unique<vk::VkPipelineLayout>				m_pipelineLayout;
+	const Unique<vk::VkPipeline>					m_pipeline;
+
+	const Unique<vk::VkDescriptorPool>				m_descriptorPool;
+	const Unique<vk::VkDescriptorSet>				m_descriptorSet;
+
+	const Unique<vk::VkCommandPool>					m_cmdPool;
+	const Unique<vk::VkCommandBuffer>				m_cmdBuffer;
+
+	int												m_subCaseNdx;
+};
+
+ShaderCaseInstance::ShaderCaseInstance (Context& context, const ShaderCaseSpecification& spec)
+	: TestInstance			(context)
+	, m_spec				(spec)
+
+	, m_posNdxBuffer		(createBuffer(context, (vk::VkDeviceSize)TOTAL_POS_NDX_SIZE, vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT|vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT))
+	, m_posNdxMem			(allocateAndBindMemory(context, *m_posNdxBuffer, vk::MemoryRequirement::HostVisible))
+
+	, m_inputLayout			(computeStd430Layout(spec.values.inputs))
+	, m_inputBuffer			(m_inputLayout.size > 0 ? createBuffer(context, (vk::VkDeviceSize)m_inputLayout.size, vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT) : Move<vk::VkBuffer>())
+	, m_inputMem			(m_inputLayout.size > 0 ? allocateAndBindMemory(context, *m_inputBuffer, vk::MemoryRequirement::HostVisible) : MovePtr<vk::Allocation>())
+
+	, m_referenceLayout		(computeStd140Layout(spec.values.outputs))
+	, m_referenceBuffer		(m_referenceLayout.size > 0 ? createBuffer(context, (vk::VkDeviceSize)m_referenceLayout.size, vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) : Move<vk::VkBuffer>())
+	, m_referenceMem		(m_referenceLayout.size > 0 ? allocateAndBindMemory(context, *m_referenceBuffer, vk::MemoryRequirement::HostVisible) : MovePtr<vk::Allocation>())
+
+	, m_uniformLayout		(computeStd140Layout(spec.values.uniforms))
+	, m_uniformBuffer		(m_uniformLayout.size > 0 ? createBuffer(context, (vk::VkDeviceSize)m_uniformLayout.size, vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) : Move<vk::VkBuffer>())
+	, m_uniformMem			(m_uniformLayout.size > 0 ? allocateAndBindMemory(context, *m_uniformBuffer, vk::MemoryRequirement::HostVisible) : MovePtr<vk::Allocation>())
+
+	, m_readImageBuffer		(createBuffer(context, (vk::VkDeviceSize)(RENDER_WIDTH*RENDER_HEIGHT*4), vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT))
+	, m_readImageMem		(allocateAndBindMemory(context, *m_readImageBuffer, vk::MemoryRequirement::HostVisible))
+
+	, m_rtImage				(createImage2D(context, RENDER_WIDTH, RENDER_HEIGHT, vk::VK_FORMAT_R8G8B8A8_UNORM, vk::VK_IMAGE_TILING_OPTIMAL, vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT))
+	, m_rtMem				(allocateAndBindMemory(context, *m_rtImage, vk::MemoryRequirement::Any))
+	, m_rtView				(createAttachmentView(context, *m_rtImage, vk::VK_FORMAT_R8G8B8A8_UNORM))
+
+	, m_renderPass			(createRenderPass(context, vk::VK_FORMAT_R8G8B8A8_UNORM))
+	, m_framebuffer			(createFramebuffer(context, *m_renderPass, *m_rtView, RENDER_WIDTH, RENDER_HEIGHT))
+	, m_program				(context, spec)
+	, m_descriptorSetLayout	(createDescriptorSetLayout(context, m_program.getStages()))
+	, m_pipelineLayout		(createPipelineLayout(context, *m_descriptorSetLayout))
+	, m_pipeline			(createPipeline(context, spec.values.inputs, m_inputLayout, m_program, *m_renderPass, *m_pipelineLayout, tcu::UVec2(RENDER_WIDTH, RENDER_HEIGHT)))
+
+	, m_descriptorPool		(createDescriptorPool(context))
+	, m_descriptorSet		(allocateDescriptorSet(context, *m_descriptorPool, *m_descriptorSetLayout))
+
+	, m_cmdPool				(createCommandPool(context))
+	, m_cmdBuffer			(allocateCommandBuffer(context, *m_cmdPool))
+
+	, m_subCaseNdx			(0)
+{
+	const vk::DeviceInterface&	vkd					= context.getDeviceInterface();
+	const deUint32				queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
+
+	{
+		const Vec2			s_positions[]	=
+		{
+			Vec2(-1.0f, -1.0f),
+			Vec2(-1.0f, +1.0f),
+			Vec2(+1.0f, -1.0f),
+			Vec2(+1.0f, +1.0f)
+		};
+		const deUint16		s_indices[]		=
+		{
+			0, 1, 2,
+			1, 3, 2
+		};
+
+		DE_STATIC_ASSERT(sizeof(s_positions) == POSITIONS_SIZE);
+		DE_STATIC_ASSERT(sizeof(s_indices) == INDICES_SIZE);
+
+		deMemcpy((deUint8*)m_posNdxMem->getHostPtr() + POSITIONS_OFFSET,	&s_positions[0],	sizeof(s_positions));
+		deMemcpy((deUint8*)m_posNdxMem->getHostPtr() + INDICES_OFFSET,		&s_indices[0],		sizeof(s_indices));
+
+		flushMappedMemoryRange(m_context.getDeviceInterface(), context.getDevice(), m_posNdxMem->getMemory(), m_posNdxMem->getOffset(), sizeof(s_positions)+sizeof(s_indices));
+	}
+
+	if (!m_spec.values.uniforms.empty())
+	{
+		const vk::VkDescriptorBufferInfo	bufInfo	=
+		{
+			*m_uniformBuffer,
+			(vk::VkDeviceSize)0,	// offset
+			(vk::VkDeviceSize)m_uniformLayout.size
+		};
+
+		vk::DescriptorSetUpdateBuilder()
+			.writeSingle(*m_descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(USER_UNIFORM_BINDING),
+						 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &bufInfo)
+			.update(vkd, m_context.getDevice());
+	}
+
+	if (!m_spec.values.outputs.empty())
+	{
+		const vk::VkDescriptorBufferInfo	bufInfo	=
+		{
+			*m_referenceBuffer,
+			(vk::VkDeviceSize)0,	// offset
+			(vk::VkDeviceSize)m_referenceLayout.size
+		};
+
+		vk::DescriptorSetUpdateBuilder()
+			.writeSingle(*m_descriptorSet, vk::DescriptorSetUpdateBuilder::Location::binding(REFERENCE_UNIFORM_BINDING),
+						 vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, &bufInfo)
+			.update(vkd, m_context.getDevice());
+	}
+
+	// Record command buffer
+
+	{
+		const vk::VkCommandBufferBeginInfo beginInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,	// sType
+			DE_NULL,											// pNext
+			0u,													// flags
+			(const vk::VkCommandBufferInheritanceInfo*)DE_NULL,
+		};
+
+		VK_CHECK(vkd.beginCommandBuffer(*m_cmdBuffer, &beginInfo));
+	}
+
+	{
+		const vk::VkMemoryBarrier		vertFlushBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,													// sType
+			DE_NULL,																				// pNext
+			vk::VK_ACCESS_HOST_WRITE_BIT,															// srcAccessMask
+			vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT|vk::VK_ACCESS_UNIFORM_READ_BIT,					// dstAccessMask
+		};
+		const vk::VkImageMemoryBarrier	colorAttBarrier		=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// sType
+			DE_NULL,										// pNext
+			0u,												// srcAccessMask
+			vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// dstAccessMask
+			vk::VK_IMAGE_LAYOUT_UNDEFINED,					// oldLayout
+			vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// newLayout
+			queueFamilyIndex,								// srcQueueFamilyIndex
+			queueFamilyIndex,								// destQueueFamilyIndex
+			*m_rtImage,										// image
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+				0u,												// baseMipLevel
+				1u,												// mipLevels
+				0u,												// baseArraySlice
+				1u,												// arraySize
+			}												// subresourceRange
+		};
+
+		vkd.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, (vk::VkDependencyFlags)0,
+							   1, &vertFlushBarrier,
+							   0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+							   1, &colorAttBarrier);
+	}
+
+	{
+		const vk::VkClearValue			clearValue		= vk::makeClearValueColorF32(0.125f, 0.25f, 0.75f, 1.0f);
+		const vk::VkRenderPassBeginInfo	passBeginInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,	// sType
+			DE_NULL,										// pNext
+			*m_renderPass,									// renderPass
+			*m_framebuffer,									// framebuffer
+			{ { 0, 0 }, { RENDER_WIDTH, RENDER_HEIGHT } },	// renderArea
+			1u,												// clearValueCount
+			&clearValue,									// pClearValues
+		};
+
+		vkd.cmdBeginRenderPass(*m_cmdBuffer, &passBeginInfo, vk::VK_SUBPASS_CONTENTS_INLINE);
+	}
+
+	vkd.cmdBindPipeline			(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
+	vkd.cmdBindDescriptorSets	(*m_cmdBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &*m_descriptorSet, 0u, DE_NULL);
+
+	{
+		const vk::VkBuffer		buffers[]	= { *m_posNdxBuffer, *m_inputBuffer };
+		const vk::VkDeviceSize	offsets[]	= { POSITIONS_OFFSET, 0u };
+		const deUint32			numBuffers	= buffers[1] != 0 ? 2u : 1u;
+		vkd.cmdBindVertexBuffers(*m_cmdBuffer, 0u, numBuffers, buffers, offsets);
+	}
+
+	vkd.cmdBindIndexBuffer	(*m_cmdBuffer, *m_posNdxBuffer, (vk::VkDeviceSize)INDICES_OFFSET, vk::VK_INDEX_TYPE_UINT16);
+	vkd.cmdDrawIndexed		(*m_cmdBuffer, 6u, 1u, 0u, 0u, 0u);
+	vkd.cmdEndRenderPass	(*m_cmdBuffer);
+
+	{
+		const vk::VkImageMemoryBarrier	renderFinishBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,		// sType
+			DE_NULL,										// pNext
+			vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,		// srcAccessMask
+			vk::VK_ACCESS_TRANSFER_READ_BIT,				// dstAccessMask
+			vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,	// oldLayout
+			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,		// newLayout
+			queueFamilyIndex,								// srcQueueFamilyIndex
+			queueFamilyIndex,								// destQueueFamilyIndex
+			*m_rtImage,										// image
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,					// aspectMask
+				0u,												// baseMipLevel
+				1u,												// mipLevels
+				0u,												// baseArraySlice
+				1u,												// arraySize
+			}												// subresourceRange
+		};
+
+		vkd.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0,
+							   0, (const vk::VkMemoryBarrier*)DE_NULL,
+							   0, (const vk::VkBufferMemoryBarrier*)DE_NULL,
+							   1, &renderFinishBarrier);
+	}
+
+	{
+		const vk::VkBufferImageCopy	copyParams	=
+		{
+			(vk::VkDeviceSize)0u,					// bufferOffset
+			(deUint32)RENDER_WIDTH,					// bufferRowLength
+			(deUint32)RENDER_HEIGHT,				// bufferImageHeight
+			{
+				vk::VK_IMAGE_ASPECT_COLOR_BIT,			// aspect
+				0u,										// mipLevel
+				0u,										// arrayLayer
+				1u,										// arraySize
+			},										// imageSubresource
+			{ 0u, 0u, 0u },							// imageOffset
+			{ RENDER_WIDTH, RENDER_HEIGHT, 1u }		// imageExtent
+		};
+
+		vkd.cmdCopyImageToBuffer(*m_cmdBuffer, *m_rtImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *m_readImageBuffer, 1u, &copyParams);
+	}
+
+	{
+		const vk::VkBufferMemoryBarrier	copyFinishBarrier	=
+		{
+			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,		// sType
+			DE_NULL,											// pNext
+			vk::VK_ACCESS_TRANSFER_WRITE_BIT,					// srcAccessMask
+			vk::VK_ACCESS_HOST_READ_BIT,						// dstAccessMask
+			queueFamilyIndex,									// srcQueueFamilyIndex
+			queueFamilyIndex,									// destQueueFamilyIndex
+			*m_readImageBuffer,									// buffer
+			0u,													// offset
+			(vk::VkDeviceSize)(RENDER_WIDTH*RENDER_HEIGHT*4)	// size
+		};
+
+		vkd.cmdPipelineBarrier(*m_cmdBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0,
+							   0, (const vk::VkMemoryBarrier*)DE_NULL,
+							   1, &copyFinishBarrier,
+							   0, (const vk::VkImageMemoryBarrier*)DE_NULL);
+	}
+
+	VK_CHECK(vkd.endCommandBuffer(*m_cmdBuffer));
+}
+
+ShaderCaseInstance::~ShaderCaseInstance (void)
+{
+}
+
+int getNumSubCases (const ValueBlock& values)
+{
+	if (!values.outputs.empty())
+		return int(values.outputs[0].elements.size() / values.outputs[0].type.getScalarSize());
+	else
+		return 1; // Always run at least one iteration even if no output values are specified
+}
+
+bool checkResultImage (const ConstPixelBufferAccess& result)
+{
+	const tcu::IVec4	refPix	(255, 255, 255, 255);
+
+	for (int y = 0; y < result.getHeight(); y++)
+	{
+		for (int x = 0; x < result.getWidth(); x++)
+		{
+			const tcu::IVec4	resPix	= result.getPixelInt(x, y);
+
+			if (boolAny(notEqual(resPix, refPix)))
+				return false;
+		}
+	}
+
+	return true;
+}
+
+TestStatus ShaderCaseInstance::iterate (void)
+{
+	const vk::DeviceInterface&	vkd		= m_context.getDeviceInterface();
+	const vk::VkDevice			device	= m_context.getDevice();
+	const vk::VkQueue			queue	= m_context.getUniversalQueue();
+
+	if (!m_spec.values.inputs.empty())
+		writeValuesToMem(m_context, *m_inputMem, m_inputLayout, m_spec.values.inputs, m_subCaseNdx);
+
+	if (!m_spec.values.outputs.empty())
+		writeValuesToMem(m_context, *m_referenceMem, m_referenceLayout, m_spec.values.outputs, m_subCaseNdx);
+
+	if (!m_spec.values.uniforms.empty())
+		writeValuesToMem(m_context, *m_uniformMem, m_uniformLayout, m_spec.values.uniforms, m_subCaseNdx);
+
+	{
+		const vk::VkSubmitInfo		submitInfo	=
+		{
+			vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
+			DE_NULL,
+			0u,											// waitSemaphoreCount
+			(const vk::VkSemaphore*)0,					// pWaitSemaphores
+			(const vk::VkPipelineStageFlags*)DE_NULL,
+			1u,
+			&m_cmdBuffer.get(),
+			0u,											// signalSemaphoreCount
+			(const vk::VkSemaphore*)0,					// pSignalSemaphores
+		};
+		const vk::VkFenceCreateInfo	fenceParams	=
+		{
+			vk::VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,	// sType
+			DE_NULL,									// pNext
+			0u,											// flags
+		};
+		const Unique<vk::VkFence>	fence		(vk::createFence(vkd, device, &fenceParams));
+
+		VK_CHECK(vkd.queueSubmit	(queue, 1u, &submitInfo, *fence));
+		VK_CHECK(vkd.waitForFences	(device, 1u, &fence.get(), DE_TRUE, ~0ull));
+	}
+
+	{
+		const ConstPixelBufferAccess	imgAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), RENDER_WIDTH, RENDER_HEIGHT, 1, m_readImageMem->getHostPtr());
+
+		invalidateMappedMemoryRange(vkd, device, m_readImageMem->getMemory(), m_readImageMem->getOffset(), (vk::VkDeviceSize)(RENDER_WIDTH*RENDER_HEIGHT*4));
+
+		if (!checkResultImage(imgAccess))
+		{
+			TestLog&	log		= m_context.getTestContext().getLog();
+
+			log << TestLog::Message << "ERROR: Got non-white pixels on sub-case " << m_subCaseNdx << TestLog::EndMessage
+				<< TestLog::Image("Result", "Result", imgAccess);
+
+			dumpValues(log, m_spec.values, m_subCaseNdx);
+
+			return TestStatus::fail(string("Got invalid pixels at sub-case ") + de::toString(m_subCaseNdx));
+		}
+	}
+
+	if (++m_subCaseNdx < getNumSubCases(m_spec.values))
+		return TestStatus::incomplete();
+	else
+		return TestStatus::pass("All sub-cases passed");
+}
+
+class ShaderCase : public TestCase
+{
+public:
+									ShaderCase		(tcu::TestContext& testCtx, const string& name, const string& description, const ShaderCaseSpecification& spec);
+
+
+	void							initPrograms	(SourceCollections& programCollection) const;
+	TestInstance*					createInstance	(Context& context) const;
+
+private:
+	const ShaderCaseSpecification	m_spec;
+};
+
+ShaderCase::ShaderCase (tcu::TestContext& testCtx, const string& name, const string& description, const ShaderCaseSpecification& spec)
+	: TestCase	(testCtx, name, description)
+	, m_spec	(spec)
+{
+}
+
+void ShaderCase::initPrograms (SourceCollections& sourceCollection) const
+{
+	vector<ProgramSources>	specializedSources	(m_spec.programs.size());
+
+	DE_ASSERT(isValid(m_spec));
+
+	if (m_spec.expectResult != glu::sl::EXPECT_PASS)
+		TCU_THROW(InternalError, "Only EXPECT_PASS is supported");
+
+	if (m_spec.caseType == glu::sl::CASETYPE_VERTEX_ONLY)
+	{
+		DE_ASSERT(m_spec.programs.size() == 1 && m_spec.programs[0].sources.sources[glu::SHADERTYPE_VERTEX].size() == 1);
+		specializedSources[0] << glu::VertexSource(specializeVertexShader(m_spec, m_spec.programs[0].sources.sources[glu::SHADERTYPE_VERTEX][0]))
+							  << glu::FragmentSource(genFragmentShader(m_spec));
+	}
+	else if (m_spec.caseType == glu::sl::CASETYPE_FRAGMENT_ONLY)
+	{
+		DE_ASSERT(m_spec.programs.size() == 1 && m_spec.programs[0].sources.sources[glu::SHADERTYPE_FRAGMENT].size() == 1);
+		specializedSources[0] << glu::VertexSource(genVertexShader(m_spec))
+							  << glu::FragmentSource(specializeFragmentShader(m_spec, m_spec.programs[0].sources.sources[glu::SHADERTYPE_FRAGMENT][0]));
+	}
+	else
+	{
+		DE_ASSERT(m_spec.caseType == glu::sl::CASETYPE_COMPLETE);
+
+		const int	maxPatchVertices	= 4; // \todo [2015-08-05 pyry] Query
+
+		for (size_t progNdx = 0; progNdx < m_spec.programs.size(); progNdx++)
+		{
+			const ProgramSpecializationParams	progSpecParams	(m_spec, m_spec.programs[progNdx].requiredExtensions, maxPatchVertices);
+
+			specializeProgramSources(specializedSources[progNdx], m_spec.programs[progNdx].sources, progSpecParams);
+		}
+	}
+
+	for (size_t progNdx = 0; progNdx < specializedSources.size(); progNdx++)
+	{
+		for (int shaderType = 0; shaderType < glu::SHADERTYPE_LAST; shaderType++)
+		{
+			if (!specializedSources[progNdx].sources[shaderType].empty())
+			{
+				glu::ProgramSources& curSrc	= sourceCollection.glslSources.add(getShaderName((glu::ShaderType)shaderType, progNdx));
+				curSrc.sources[shaderType] = specializedSources[progNdx].sources[shaderType];
+			}
+		}
+	}
+}
+
+TestInstance* ShaderCase::createInstance (Context& context) const
+{
+	return new ShaderCaseInstance(context, m_spec);
+}
+
+class ShaderCaseFactory : public glu::sl::ShaderCaseFactory
+{
+public:
+	ShaderCaseFactory (tcu::TestContext& testCtx)
+		: m_testCtx(testCtx)
+	{
+	}
+
+	tcu::TestCaseGroup* createGroup (const string& name, const string& description, const vector<tcu::TestNode*>& children)
+	{
+		return new tcu::TestCaseGroup(m_testCtx, name.c_str(), description.c_str(), children);
+	}
+
+	tcu::TestCase* createCase (const string& name, const string& description, const ShaderCaseSpecification& spec)
+	{
+		return new ShaderCase(m_testCtx, name, description, spec);
+	}
+
+private:
+	tcu::TestContext&	m_testCtx;
+};
+
+class ShaderLibraryGroup : public tcu::TestCaseGroup
+{
+public:
+	ShaderLibraryGroup (tcu::TestContext& testCtx, const string& name, const string& description, const string& filename)
+		 : tcu::TestCaseGroup	(testCtx, name.c_str(), description.c_str())
+		 , m_filename			(filename)
+	{
+	}
+
+	void init (void)
+	{
+		ShaderCaseFactory				caseFactory	(m_testCtx);
+		const vector<tcu::TestNode*>	children	= glu::sl::parseFile(m_testCtx.getArchive(), m_filename, &caseFactory);
+
+		for (size_t ndx = 0; ndx < children.size(); ndx++)
+		{
+			try
+			{
+				addChild(children[ndx]);
+			}
+			catch (...)
+			{
+				for (; ndx < children.size(); ndx++)
+					delete children[ndx];
+				throw;
+			}
+		}
+	}
+
+private:
+	const string	m_filename;
+};
+
+} // anonymous
+
+MovePtr<tcu::TestCaseGroup> createShaderLibraryGroup (tcu::TestContext& testCtx, const string& name, const string& description, const string& filename)
+{
+	return MovePtr<tcu::TestCaseGroup>(new ShaderLibraryGroup(testCtx, name, description, filename));
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktShaderLibrary.hpp b/external/vulkancts/modules/vulkan/vktShaderLibrary.hpp
new file mode 100644
index 0000000..17542a4
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktShaderLibrary.hpp
@@ -0,0 +1,48 @@
+#ifndef _VKTSHADERLIBRARY_HPP
+#define _VKTSHADERLIBRARY_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief ShaderLibrary Vulkan implementation
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+#include "deUniquePtr.hpp"
+
+namespace vkt
+{
+
+de::MovePtr<tcu::TestCaseGroup>		createShaderLibraryGroup	(tcu::TestContext& testCtx, const std::string& name, const std::string& description, const std::string& filename);
+
+} // vkt
+
+#endif // _VKTSHADERLIBRARY_HPP
diff --git a/external/vulkancts/modules/vulkan/vktTestCase.cpp b/external/vulkancts/modules/vulkan/vktTestCase.cpp
new file mode 100644
index 0000000..657d640
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestCase.cpp
@@ -0,0 +1,195 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan test case base classes
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCase.hpp"
+
+#include "vkRef.hpp"
+#include "vkRefUtil.hpp"
+#include "vkQueryUtil.hpp"
+#include "vkDeviceUtil.hpp"
+#include "vkMemUtil.hpp"
+#include "vkPlatform.hpp"
+
+#include "deMemory.h"
+
+namespace vkt
+{
+
+// Default device utilities
+
+using std::vector;
+using namespace vk;
+
+static deUint32 findQueueFamilyIndexWithCaps (const InstanceInterface& vkInstance, VkPhysicalDevice physicalDevice, VkQueueFlags requiredCaps)
+{
+	const vector<VkQueueFamilyProperties>	queueProps	= getPhysicalDeviceQueueFamilyProperties(vkInstance, physicalDevice);
+
+	for (size_t queueNdx = 0; queueNdx < queueProps.size(); queueNdx++)
+	{
+		if ((queueProps[queueNdx].queueFlags & requiredCaps) == requiredCaps)
+			return (deUint32)queueNdx;
+	}
+
+	TCU_THROW(NotSupportedError, "No matching queue found");
+}
+
+Move<VkDevice> createDefaultDevice (const InstanceInterface& vki, VkPhysicalDevice physicalDevice, deUint32 queueIndex, const VkPhysicalDeviceFeatures& enabledFeatures)
+{
+	VkDeviceQueueCreateInfo		queueInfo;
+	VkDeviceCreateInfo			deviceInfo;
+	const float					queuePriority	= 1.0f;
+
+	deMemset(&queueInfo,	0, sizeof(queueInfo));
+	deMemset(&deviceInfo,	0, sizeof(deviceInfo));
+
+	queueInfo.sType							= VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
+	queueInfo.pNext							= DE_NULL;
+	queueInfo.flags							= (VkDeviceQueueCreateFlags)0u;
+	queueInfo.queueFamilyIndex				= queueIndex;
+	queueInfo.queueCount					= 1u;
+	queueInfo.pQueuePriorities				= &queuePriority;
+
+	deviceInfo.sType						= VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
+	deviceInfo.pNext						= DE_NULL;
+	deviceInfo.queueCreateInfoCount			= 1u;
+	deviceInfo.pQueueCreateInfos			= &queueInfo;
+	deviceInfo.enabledExtensionCount		= 0u;
+	deviceInfo.ppEnabledExtensionNames		= DE_NULL;
+	deviceInfo.enabledLayerCount			= 0u;
+	deviceInfo.ppEnabledLayerNames			= DE_NULL;
+	deviceInfo.pEnabledFeatures				= &enabledFeatures;
+
+	return createDevice(vki, physicalDevice, &deviceInfo);
+};
+
+class DefaultDevice
+{
+public:
+										DefaultDevice					(const PlatformInterface& vkPlatform, const tcu::CommandLine& cmdLine);
+										~DefaultDevice					(void);
+
+	VkInstance							getInstance						(void) const	{ return *m_instance;					}
+	const InstanceInterface&			getInstanceInterface			(void) const	{ return m_instanceInterface;			}
+
+	VkPhysicalDevice					getPhysicalDevice				(void) const	{ return m_physicalDevice;				}
+	const VkPhysicalDeviceFeatures&		getDeviceFeatures				(void) const	{ return m_deviceFeatures;				}
+	VkDevice							getDevice						(void) const	{ return *m_device;						}
+	const DeviceInterface&				getDeviceInterface				(void) const	{ return m_deviceInterface;				}
+	const VkPhysicalDeviceProperties&	getDeviceProperties				(void) const	{ return m_deviceProperties;			}
+
+	deUint32							getUniversalQueueFamilyIndex	(void) const	{ return m_universalQueueFamilyIndex;	}
+	VkQueue								getUniversalQueue				(void) const;
+
+private:
+	const Unique<VkInstance>			m_instance;
+	const InstanceDriver				m_instanceInterface;
+
+	const VkPhysicalDevice				m_physicalDevice;
+
+	const deUint32						m_universalQueueFamilyIndex;
+	const VkPhysicalDeviceFeatures		m_deviceFeatures;
+	const VkPhysicalDeviceProperties	m_deviceProperties;
+
+	const Unique<VkDevice>				m_device;
+	const DeviceDriver					m_deviceInterface;
+};
+
+DefaultDevice::DefaultDevice (const PlatformInterface& vkPlatform, const tcu::CommandLine& cmdLine)
+	: m_instance					(createDefaultInstance(vkPlatform))
+	, m_instanceInterface			(vkPlatform, *m_instance)
+	, m_physicalDevice				(chooseDevice(m_instanceInterface, *m_instance, cmdLine))
+	, m_universalQueueFamilyIndex	(findQueueFamilyIndexWithCaps(m_instanceInterface, m_physicalDevice, VK_QUEUE_GRAPHICS_BIT|VK_QUEUE_COMPUTE_BIT))
+	, m_deviceFeatures				(getPhysicalDeviceFeatures(m_instanceInterface, m_physicalDevice)) // \note All supported features are enabled
+	, m_deviceProperties			(getPhysicalDeviceProperties(m_instanceInterface, m_physicalDevice)) // \note All supported features are enabled
+	, m_device						(createDefaultDevice(m_instanceInterface, m_physicalDevice, m_universalQueueFamilyIndex, m_deviceFeatures))
+	, m_deviceInterface				(m_instanceInterface, *m_device)
+{
+}
+
+DefaultDevice::~DefaultDevice (void)
+{
+}
+
+VkQueue DefaultDevice::getUniversalQueue (void) const
+{
+	VkQueue	queue	= 0;
+	m_deviceInterface.getDeviceQueue(*m_device, m_universalQueueFamilyIndex, 0, &queue);
+	return queue;
+}
+
+// Allocator utilities
+
+vk::Allocator* createAllocator (DefaultDevice* device)
+{
+	const VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(device->getInstanceInterface(), device->getPhysicalDevice());
+
+	// \todo [2015-07-24 jarkko] support allocator selection/configuration from command line (or compile time)
+	return new SimpleAllocator(device->getDeviceInterface(), device->getDevice(), memoryProperties);
+}
+
+// Context
+
+Context::Context (tcu::TestContext&							testCtx,
+				  const vk::PlatformInterface&				platformInterface,
+				  vk::ProgramCollection<vk::ProgramBinary>&	progCollection)
+	: m_testCtx				(testCtx)
+	, m_platformInterface	(platformInterface)
+	, m_progCollection		(progCollection)
+	, m_device				(new DefaultDevice(m_platformInterface, testCtx.getCommandLine()))
+	, m_allocator			(createAllocator(m_device.get()))
+{
+}
+
+Context::~Context (void)
+{
+}
+
+vk::VkInstance						Context::getInstance					(void) const { return m_device->getInstance();					}
+const vk::InstanceInterface&		Context::getInstanceInterface			(void) const { return m_device->getInstanceInterface();			}
+vk::VkPhysicalDevice				Context::getPhysicalDevice				(void) const { return m_device->getPhysicalDevice();			}
+const vk::VkPhysicalDeviceFeatures&	Context::getDeviceFeatures				(void) const { return m_device->getDeviceFeatures();			}
+const vk::VkPhysicalDeviceProperties&	Context::getDeviceProperties		(void) const { return m_device->getDeviceProperties();			}
+vk::VkDevice						Context::getDevice						(void) const { return m_device->getDevice();					}
+const vk::DeviceInterface&			Context::getDeviceInterface				(void) const { return m_device->getDeviceInterface();			}
+deUint32							Context::getUniversalQueueFamilyIndex	(void) const { return m_device->getUniversalQueueFamilyIndex();	}
+vk::VkQueue							Context::getUniversalQueue				(void) const { return m_device->getUniversalQueue();			}
+vk::Allocator&						Context::getDefaultAllocator			(void) const { return *m_allocator;								}
+
+// TestCase
+
+void TestCase::initPrograms (SourceCollections&) const
+{
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktTestCase.hpp b/external/vulkancts/modules/vulkan/vktTestCase.hpp
new file mode 100644
index 0000000..be5bf56
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestCase.hpp
@@ -0,0 +1,143 @@
+#ifndef _VKTTESTCASE_HPP
+#define _VKTTESTCASE_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan test case base classes
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+#include "vkDefs.hpp"
+#include "deUniquePtr.hpp"
+
+namespace glu
+{
+struct ProgramSources;
+}
+
+namespace vk
+{
+class PlatformInterface;
+class ProgramBinary;
+template<typename Program> class ProgramCollection;
+class Allocator;
+struct SourceCollections;
+}
+
+namespace vkt
+{
+
+class DefaultDevice;
+
+class Context
+{
+public:
+												Context							(tcu::TestContext&							testCtx,
+																				 const vk::PlatformInterface&				platformInterface,
+																				 vk::ProgramCollection<vk::ProgramBinary>&	progCollection);
+												~Context						(void);
+
+	tcu::TestContext&							getTestContext					(void) const { return m_testCtx;			}
+	const vk::PlatformInterface&				getPlatformInterface			(void) const { return m_platformInterface;	}
+	vk::ProgramCollection<vk::ProgramBinary>&	getBinaryCollection				(void) const { return m_progCollection;		}
+
+	// Default instance & device, selected with --deqp-vk-device-id=N
+	vk::VkInstance								getInstance						(void) const;
+	const vk::InstanceInterface&				getInstanceInterface			(void) const;
+	vk::VkPhysicalDevice						getPhysicalDevice				(void) const;
+	const vk::VkPhysicalDeviceFeatures&			getDeviceFeatures				(void) const;
+	const vk::VkPhysicalDeviceProperties&		getDeviceProperties				(void) const;
+	vk::VkDevice								getDevice						(void) const;
+	const vk::DeviceInterface&					getDeviceInterface				(void) const;
+	deUint32									getUniversalQueueFamilyIndex	(void) const;
+	vk::VkQueue									getUniversalQueue				(void) const;
+
+	vk::Allocator&								getDefaultAllocator				(void) const;
+
+protected:
+	tcu::TestContext&							m_testCtx;
+	const vk::PlatformInterface&				m_platformInterface;
+	vk::ProgramCollection<vk::ProgramBinary>&	m_progCollection;
+
+	const de::UniquePtr<DefaultDevice>			m_device;
+	const de::UniquePtr<vk::Allocator>			m_allocator;
+
+private:
+												Context							(const Context&); // Not allowed
+	Context&									operator=						(const Context&); // Not allowed
+};
+
+
+class TestInstance;
+
+class TestCase : public tcu::TestCase
+{
+public:
+							TestCase		(tcu::TestContext& testCtx, const std::string& name, const std::string& description);
+							TestCase		(tcu::TestContext& testCtx, tcu::TestNodeType type, const std::string& name, const std::string& description);
+	virtual					~TestCase		(void) {}
+
+	virtual void			initPrograms	(vk::SourceCollections& programCollection) const;
+	virtual TestInstance*	createInstance	(Context& context) const = 0;
+
+	IterateResult			iterate			(void) { DE_ASSERT(false); return STOP; } // Deprecated in this module
+};
+
+class TestInstance
+{
+public:
+								TestInstance	(Context& context) : m_context(context) {}
+	virtual						~TestInstance	(void) {}
+
+	virtual tcu::TestStatus		iterate			(void) = 0;
+
+protected:
+	Context&					m_context;
+
+private:
+								TestInstance	(const TestInstance&);
+	TestInstance&				operator=		(const TestInstance&);
+};
+
+inline TestCase::TestCase (tcu::TestContext& testCtx, const std::string& name, const std::string& description)
+	: tcu::TestCase(testCtx, name.c_str(), description.c_str())
+{
+}
+
+inline TestCase::TestCase (tcu::TestContext& testCtx, tcu::TestNodeType type, const std::string& name, const std::string& description)
+	: tcu::TestCase(testCtx, type, name.c_str(), description.c_str())
+{
+}
+
+} // vkt
+
+#endif // _VKTTESTCASE_HPP
diff --git a/external/vulkancts/modules/vulkan/vktTestCaseUtil.cpp b/external/vulkancts/modules/vulkan/vktTestCaseUtil.cpp
new file mode 100644
index 0000000..7d63796
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestCaseUtil.cpp
@@ -0,0 +1,37 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief TestCase utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestCaseUtil.hpp"
+
+DE_EMPTY_CPP_FILE
diff --git a/external/vulkancts/modules/vulkan/vktTestCaseUtil.hpp b/external/vulkancts/modules/vulkan/vktTestCaseUtil.hpp
new file mode 100644
index 0000000..1fa09d3
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestCaseUtil.hpp
@@ -0,0 +1,257 @@
+#ifndef _VKTTESTCASEUTIL_HPP
+#define _VKTTESTCASEUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief TestCase utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+
+template<typename Arg0>
+struct NoPrograms1
+{
+	void	init	(vk::SourceCollections&, Arg0) const {}
+};
+
+template<typename Instance, typename Arg0, typename Programs = NoPrograms1<Arg0> >
+class InstanceFactory1 : public TestCase
+{
+public:
+					InstanceFactory1	(tcu::TestContext& testCtx, tcu::TestNodeType type, const std::string& name, const std::string& desc, const Arg0& arg0)
+						: TestCase	(testCtx, type, name, desc)
+						, m_progs	()
+						, m_arg0	(arg0)
+					{}
+
+					InstanceFactory1	(tcu::TestContext& testCtx, tcu::TestNodeType type, const std::string& name, const std::string& desc, const Programs& progs, const Arg0& arg0)
+						: TestCase	(testCtx, type, name, desc)
+						, m_progs	(progs)
+						, m_arg0	(arg0)
+					{}
+
+	void			initPrograms		(vk::SourceCollections& dst) const { m_progs.init(dst, m_arg0); }
+	TestInstance*	createInstance		(Context& context) const { return new Instance(context, m_arg0); }
+
+private:
+	const Programs	m_progs;
+	const Arg0		m_arg0;
+};
+
+class FunctionInstance0 : public TestInstance
+{
+public:
+	typedef tcu::TestStatus	(*Function)	(Context& context);
+
+					FunctionInstance0	(Context& context, Function function)
+						: TestInstance	(context)
+						, m_function	(function)
+					{}
+
+	tcu::TestStatus	iterate				(void) { return m_function(m_context); }
+
+private:
+	const Function	m_function;
+};
+
+template<typename Arg0>
+class FunctionInstance1 : public TestInstance
+{
+public:
+	typedef tcu::TestStatus	(*Function)	(Context& context, Arg0 arg0);
+
+	struct Args
+	{
+		Args (Function func_, Arg0 arg0_) : func(func_), arg0(arg0_) {}
+
+		Function	func;
+		Arg0		arg0;
+	};
+
+					FunctionInstance1	(Context& context, const Args& args)
+						: TestInstance	(context)
+						, m_args		(args)
+					{}
+
+	tcu::TestStatus	iterate				(void) { return m_args.func(m_context, m_args.arg0); }
+
+private:
+	const Args		m_args;
+};
+
+class FunctionPrograms0
+{
+public:
+	typedef void	(*Function)		(vk::SourceCollections& dst);
+
+					FunctionPrograms0	(Function func)
+						: m_func(func)
+					{}
+
+	void			init			(vk::SourceCollections& dst, FunctionInstance0::Function) const { m_func(dst); }
+
+private:
+	const Function	m_func;
+};
+
+template<typename Arg0>
+class FunctionPrograms1
+{
+public:
+	typedef void	(*Function)		(vk::SourceCollections& dst, Arg0 arg0);
+
+					FunctionPrograms1	(Function func)
+						: m_func(func)
+					{}
+
+	void			init			(vk::SourceCollections& dst, const typename FunctionInstance1<Arg0>::Args& args) const { m_func(dst, args.arg0); }
+
+private:
+	const Function	m_func;
+};
+
+// createFunctionCase
+
+inline TestCase* createFunctionCase (tcu::TestContext&				testCtx,
+									 tcu::TestNodeType				type,
+									 const std::string&				name,
+									 const std::string&				desc,
+									 FunctionInstance0::Function	testFunction)
+{
+	return new InstanceFactory1<FunctionInstance0, FunctionInstance0::Function>(testCtx, type, name, desc, testFunction);
+}
+
+inline TestCase* createFunctionCaseWithPrograms (tcu::TestContext&				testCtx,
+												 tcu::TestNodeType				type,
+												 const std::string&				name,
+												 const std::string&				desc,
+												 FunctionPrograms0::Function	initPrograms,
+												 FunctionInstance0::Function	testFunction)
+{
+	return new InstanceFactory1<FunctionInstance0, FunctionInstance0::Function, FunctionPrograms0>(
+		testCtx, type, name, desc, FunctionPrograms0(initPrograms), testFunction);
+}
+
+template<typename Arg0>
+TestCase* createFunctionCase (tcu::TestContext&								testCtx,
+							  tcu::TestNodeType								type,
+							  const std::string&							name,
+							  const std::string&							desc,
+							  typename FunctionInstance1<Arg0>::Function	testFunction,
+							  Arg0											arg0)
+{
+	return new InstanceFactory1<FunctionInstance1<Arg0>, typename FunctionInstance1<Arg0>::Args>(
+		testCtx, type, name, desc, typename FunctionInstance1<Arg0>::Args(testFunction, arg0));
+}
+
+template<typename Arg0>
+TestCase* createFunctionCaseWithPrograms (tcu::TestContext&								testCtx,
+										  tcu::TestNodeType								type,
+										  const std::string&							name,
+										  const std::string&							desc,
+										  typename FunctionPrograms1<Arg0>::Function	initPrograms,
+										  typename FunctionInstance1<Arg0>::Function	testFunction,
+										  Arg0											arg0)
+{
+	return new InstanceFactory1<FunctionInstance1<Arg0>, typename FunctionInstance1<Arg0>::Args, FunctionPrograms1<Arg0> >(
+		testCtx, type, name, desc, FunctionPrograms1<Arg0>(initPrograms), typename FunctionInstance1<Arg0>::Args(testFunction, arg0));
+}
+
+// addFunctionCase
+
+inline void addFunctionCase (tcu::TestCaseGroup*			group,
+							 const std::string&				name,
+							 const std::string&				desc,
+							 FunctionInstance0::Function	testFunc)
+{
+	group->addChild(createFunctionCase(group->getTestContext(), tcu::NODETYPE_SELF_VALIDATE, name, desc, testFunc));
+}
+
+inline void addFunctionCaseWithPrograms (tcu::TestCaseGroup*			group,
+										 const std::string&				name,
+										 const std::string&				desc,
+										 FunctionPrograms0::Function	initPrograms,
+										 FunctionInstance0::Function	testFunc)
+{
+	group->addChild(createFunctionCaseWithPrograms(group->getTestContext(), tcu::NODETYPE_SELF_VALIDATE, name, desc, initPrograms, testFunc));
+}
+
+template<typename Arg0>
+void addFunctionCase (tcu::TestCaseGroup*							group,
+					  const std::string&							name,
+					  const std::string&							desc,
+					  typename FunctionInstance1<Arg0>::Function	testFunc,
+					  Arg0											arg0)
+{
+	group->addChild(createFunctionCase<Arg0>(group->getTestContext(), tcu::NODETYPE_SELF_VALIDATE, name, desc, testFunc, arg0));
+}
+
+template<typename Arg0>
+void addFunctionCase (tcu::TestCaseGroup*							group,
+					  tcu::TestNodeType								type,
+					  const std::string&							name,
+					  const std::string&							desc,
+					  typename FunctionInstance1<Arg0>::Function	testFunc,
+					  Arg0											arg0)
+{
+	group->addChild(createFunctionCase<Arg0>(group->getTestContext(), type, name, desc, testFunc, arg0));
+}
+
+template<typename Arg0>
+void addFunctionCaseWithPrograms (tcu::TestCaseGroup*							group,
+								  const std::string&							name,
+								  const std::string&							desc,
+								  typename FunctionPrograms1<Arg0>::Function	initPrograms,
+								  typename FunctionInstance1<Arg0>::Function	testFunc,
+								  Arg0											arg0)
+{
+	group->addChild(createFunctionCaseWithPrograms<Arg0>(group->getTestContext(), tcu::NODETYPE_SELF_VALIDATE, name, desc, initPrograms, testFunc, arg0));
+}
+
+template<typename Arg0>
+void addFunctionCaseWithPrograms (tcu::TestCaseGroup*							group,
+								  tcu::TestNodeType								type,
+								  const std::string&							name,
+								  const std::string&							desc,
+								  typename FunctionPrograms1<Arg0>::Function	initPrograms,
+								  typename FunctionInstance1<Arg0>::Function	testFunc,
+								  Arg0											arg0)
+{
+	group->addChild(createFunctionCaseWithPrograms<Arg0>(group->getTestContext(), type, name, desc, initPrograms, testFunc, arg0));
+}
+
+} // vkt
+
+#endif // _VKTTESTCASEUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/vktTestGroupUtil.cpp b/external/vulkancts/modules/vulkan/vktTestGroupUtil.cpp
new file mode 100644
index 0000000..bd94406
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestGroupUtil.cpp
@@ -0,0 +1,58 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2016 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief TestCaseGroup utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestGroupUtil.hpp"
+
+namespace vkt
+{
+
+TestGroupHelper0::TestGroupHelper0 (tcu::TestContext&	testCtx,
+									const std::string&	name,
+									const std::string&	description,
+									CreateChildrenFunc	createChildren)
+	: tcu::TestCaseGroup	(testCtx, name.c_str(), description.c_str())
+	, m_createChildren		(createChildren)
+{
+}
+
+TestGroupHelper0::~TestGroupHelper0 (void)
+{
+}
+
+void TestGroupHelper0::init (void)
+{
+	m_createChildren(this);
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktTestGroupUtil.hpp b/external/vulkancts/modules/vulkan/vktTestGroupUtil.hpp
new file mode 100644
index 0000000..f3acf93
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestGroupUtil.hpp
@@ -0,0 +1,103 @@
+#ifndef _VKTTESTGROUPUTIL_HPP
+#define _VKTTESTGROUPUTIL_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2016 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief TestCaseGroup utilities
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace vkt
+{
+
+class TestGroupHelper0 : public tcu::TestCaseGroup
+{
+public:
+	typedef void (*CreateChildrenFunc) (tcu::TestCaseGroup* testGroup);
+
+								TestGroupHelper0	(tcu::TestContext&		testCtx,
+													 const std::string&		name,
+													 const std::string&		description,
+													 CreateChildrenFunc		createChildren);
+								~TestGroupHelper0	(void);
+
+	void						init				(void);
+
+private:
+	const CreateChildrenFunc	m_createChildren;
+};
+
+template<typename Arg0>
+class TestGroupHelper1 : public tcu::TestCaseGroup
+{
+public:
+	typedef void (*CreateChildrenFunc) (tcu::TestCaseGroup* testGroup, Arg0 arg0);
+
+								TestGroupHelper1	(tcu::TestContext&		testCtx,
+													 const std::string&		name,
+													 const std::string&		description,
+													 CreateChildrenFunc		createChildren,
+													 const Arg0&			arg0)
+									: tcu::TestCaseGroup	(testCtx, name.c_str(), description.c_str())
+									, m_createChildren		(createChildren)
+									, m_arg0				(arg0)
+								{}
+
+	void						init				(void) { m_createChildren(this, m_arg0); }
+
+private:
+	const CreateChildrenFunc	m_createChildren;
+	const Arg0					m_arg0;
+};
+
+inline tcu::TestCaseGroup* createTestGroup (tcu::TestContext&						testCtx,
+											const std::string&						name,
+											const std::string&						description,
+											TestGroupHelper0::CreateChildrenFunc	createChildren)
+{
+	return new TestGroupHelper0(testCtx, name, description, createChildren);
+}
+
+template<typename Arg0>
+tcu::TestCaseGroup* createTestGroup (tcu::TestContext&										testCtx,
+									 const std::string&										name,
+									 const std::string&										description,
+									 typename TestGroupHelper1<Arg0>::CreateChildrenFunc	createChildren,
+									 Arg0													arg0)
+{
+	return new TestGroupHelper1<Arg0>(testCtx, name, description, createChildren, arg0);
+}
+
+} // vkt
+
+#endif // _VKTTESTGROUPUTIL_HPP
diff --git a/external/vulkancts/modules/vulkan/vktTestPackage.cpp b/external/vulkancts/modules/vulkan/vktTestPackage.cpp
new file mode 100644
index 0000000..4d669fe
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestPackage.cpp
@@ -0,0 +1,331 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Test Package
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestPackage.hpp"
+
+#include "tcuPlatform.hpp"
+#include "tcuTestCase.hpp"
+#include "tcuTestLog.hpp"
+
+#include "vkPlatform.hpp"
+#include "vkPrograms.hpp"
+#include "vkBinaryRegistry.hpp"
+#include "vkGlslToSpirV.hpp"
+#include "vkSpirVAsm.hpp"
+
+#include "deUniquePtr.hpp"
+
+#include "vktTestGroupUtil.hpp"
+#include "vktApiTests.hpp"
+#include "vktPipelineTests.hpp"
+#include "vktBindingModelTests.hpp"
+#include "vktSpvAsmTests.hpp"
+#include "vktShaderLibrary.hpp"
+#include "vktRenderPassTests.hpp"
+#include "vktMemoryTests.hpp"
+#include "vktShaderRenderDiscardTests.hpp"
+#include "vktShaderRenderIndexingTests.hpp"
+#include "vktShaderRenderLoopTests.hpp"
+#include "vktShaderRenderMatrixTests.hpp"
+#include "vktShaderRenderOperatorTests.hpp"
+#include "vktShaderRenderReturnTests.hpp"
+#include "vktShaderRenderStructTests.hpp"
+#include "vktShaderRenderSwitchTests.hpp"
+#include "vktShaderBuiltinTests.hpp"
+#include "vktOpaqueTypeIndexingTests.hpp"
+#include "vktUniformBlockTests.hpp"
+#include "vktDynamicStateTests.hpp"
+#include "vktSSBOLayoutTests.hpp"
+#include "vktQueryPoolTests.hpp"
+#include "vktDrawTests.hpp"
+#include "vktComputeTests.hpp"
+#include "vktImageTests.hpp"
+#include "vktInfoTests.hpp"
+
+#include <vector>
+#include <sstream>
+
+namespace // compilation
+{
+
+vk::ProgramBinary* compileProgram (const glu::ProgramSources& source, glu::ShaderProgramInfo* buildInfo)
+{
+	return vk::buildProgram(source, vk::PROGRAM_FORMAT_SPIRV, buildInfo);
+}
+
+vk::ProgramBinary* compileProgram (const vk::SpirVAsmSource& source, vk::SpirVProgramInfo* buildInfo)
+{
+	return vk::assembleProgram(source, buildInfo);
+}
+
+template <typename InfoType, typename IteratorType>
+vk::ProgramBinary* buildProgram (const std::string&					casePath,
+								 IteratorType						iter,
+								 const vk::BinaryRegistryReader&	prebuiltBinRegistry,
+								 tcu::TestLog&						log,
+								 vk::BinaryCollection*				progCollection)
+{
+	const vk::ProgramIdentifier		progId		(casePath, iter.getName());
+	const tcu::ScopedLogSection		progSection	(log, iter.getName(), "Program: " + iter.getName());
+	de::MovePtr<vk::ProgramBinary>	binProg;
+	InfoType						buildInfo;
+
+	try
+	{
+		binProg	= de::MovePtr<vk::ProgramBinary>(compileProgram(iter.getProgram(), &buildInfo));
+		log << buildInfo;
+	}
+	catch (const tcu::NotSupportedError& err)
+	{
+		// Try to load from cache
+		log << err << tcu::TestLog::Message << "Building from source not supported, loading stored binary instead" << tcu::TestLog::EndMessage;
+
+		binProg = de::MovePtr<vk::ProgramBinary>(prebuiltBinRegistry.loadProgram(progId));
+
+		log << iter.getProgram();
+	}
+	catch (const tcu::Exception&)
+	{
+		// Build failed for other reason
+		log << buildInfo;
+		throw;
+	}
+
+	TCU_CHECK_INTERNAL(binProg);
+
+	{
+		vk::ProgramBinary* const	returnBinary	= binProg.get();
+
+		progCollection->add(progId.programName, binProg);
+
+		return returnBinary;
+	}
+}
+
+} // anonymous(compilation)
+
+namespace vkt
+{
+
+using std::vector;
+using de::UniquePtr;
+using de::MovePtr;
+using tcu::TestLog;
+
+// TestCaseExecutor
+
+class TestCaseExecutor : public tcu::TestCaseExecutor
+{
+public:
+											TestCaseExecutor	(tcu::TestContext& testCtx);
+											~TestCaseExecutor	(void);
+
+	virtual void							init				(tcu::TestCase* testCase, const std::string& path);
+	virtual void							deinit				(tcu::TestCase* testCase);
+
+	virtual tcu::TestNode::IterateResult	iterate				(tcu::TestCase* testCase);
+
+private:
+	vk::BinaryCollection					m_progCollection;
+	vk::BinaryRegistryReader				m_prebuiltBinRegistry;
+
+	de::UniquePtr<vk::Library>				m_library;
+	Context									m_context;
+
+	TestInstance*							m_instance;			//!< Current test case instance
+};
+
+static MovePtr<vk::Library> createLibrary (tcu::TestContext& testCtx)
+{
+	return MovePtr<vk::Library>(testCtx.getPlatform().getVulkanPlatform().createLibrary());
+}
+
+TestCaseExecutor::TestCaseExecutor (tcu::TestContext& testCtx)
+	: m_prebuiltBinRegistry	(testCtx.getArchive(), "vulkan/prebuilt")
+	, m_library				(createLibrary(testCtx))
+	, m_context				(testCtx, m_library->getPlatformInterface(), m_progCollection)
+	, m_instance			(DE_NULL)
+{
+}
+
+TestCaseExecutor::~TestCaseExecutor (void)
+{
+	delete m_instance;
+}
+
+void TestCaseExecutor::init (tcu::TestCase* testCase, const std::string& casePath)
+{
+	const TestCase*			vktCase		= dynamic_cast<TestCase*>(testCase);
+	tcu::TestLog&			log			= m_context.getTestContext().getLog();
+	vk::SourceCollections	sourceProgs;
+
+	DE_UNREF(casePath); // \todo [2015-03-13 pyry] Use this to identify ProgramCollection storage path
+
+	if (!vktCase)
+		TCU_THROW(InternalError, "Test node not an instance of vkt::TestCase");
+
+	m_progCollection.clear();
+	vktCase->initPrograms(sourceProgs);
+
+	for (vk::GlslSourceCollection::Iterator progIter = sourceProgs.glslSources.begin(); progIter != sourceProgs.glslSources.end(); ++progIter)
+	{
+		vk::ProgramBinary* binProg = buildProgram<glu::ShaderProgramInfo, vk::GlslSourceCollection::Iterator>(casePath, progIter, m_prebuiltBinRegistry, log, &m_progCollection);
+
+		try
+		{
+			std::ostringstream disasm;
+
+			vk::disassembleSpirV(binProg->getSize(), binProg->getBinary(), &disasm);
+
+			log << TestLog::KernelSource(disasm.str());
+		}
+		catch (const tcu::NotSupportedError& err)
+		{
+			log << err;
+		}
+	}
+
+	for (vk::SpirVAsmCollection::Iterator asmIterator = sourceProgs.spirvAsmSources.begin(); asmIterator != sourceProgs.spirvAsmSources.end(); ++asmIterator)
+	{
+		buildProgram<vk::SpirVProgramInfo, vk::SpirVAsmCollection::Iterator>(casePath, asmIterator, m_prebuiltBinRegistry, log, &m_progCollection);
+	}
+
+	DE_ASSERT(!m_instance);
+	m_instance = vktCase->createInstance(m_context);
+}
+
+void TestCaseExecutor::deinit (tcu::TestCase*)
+{
+	delete m_instance;
+	m_instance = DE_NULL;
+}
+
+tcu::TestNode::IterateResult TestCaseExecutor::iterate (tcu::TestCase*)
+{
+	DE_ASSERT(m_instance);
+
+	const tcu::TestStatus	result	= m_instance->iterate();
+
+	if (result.isComplete())
+	{
+		// Vulkan tests shouldn't set result directly
+		DE_ASSERT(m_context.getTestContext().getTestResult() == QP_TEST_RESULT_LAST);
+		m_context.getTestContext().setTestResult(result.getCode(), result.getDescription().c_str());
+		return tcu::TestNode::STOP;
+	}
+	else
+		return tcu::TestNode::CONTINUE;
+}
+
+// GLSL shader tests
+
+void createGlslTests (tcu::TestCaseGroup* glslTests)
+{
+	tcu::TestContext&	testCtx		= glslTests->getTestContext();
+
+	// ShaderLibrary-based tests
+	static const struct
+	{
+		const char*		name;
+		const char*		description;
+	} s_es310Tests[] =
+	{
+		{ "arrays",						"Arrays"					},
+		{ "conditionals",				"Conditional statements"	},
+		{ "constant_expressions",		"Constant expressions"		},
+		{ "constants",					"Constants"					},
+		{ "conversions",				"Type conversions"			},
+		{ "functions",					"Functions"					},
+		{ "linkage",					"Linking"					},
+		{ "scoping",					"Scoping"					},
+		{ "swizzles",					"Swizzles"					},
+	};
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_es310Tests); ndx++)
+		glslTests->addChild(createShaderLibraryGroup(testCtx,
+													 s_es310Tests[ndx].name,
+													 s_es310Tests[ndx].description,
+													 std::string("vulkan/glsl/es310/") + s_es310Tests[ndx].name + ".test").release());
+
+	// ShaderRenderCase-based tests
+	glslTests->addChild(sr::createDiscardTests	(testCtx));
+	glslTests->addChild(sr::createIndexingTests	(testCtx));
+	glslTests->addChild(sr::createLoopTests		(testCtx));
+	glslTests->addChild(sr::createMatrixTests	(testCtx));
+	glslTests->addChild(sr::createOperatorTests	(testCtx));
+	glslTests->addChild(sr::createReturnTests	(testCtx));
+	glslTests->addChild(sr::createStructTests	(testCtx));
+	glslTests->addChild(sr::createSwitchTests	(testCtx));
+
+	// ShaderExecutor-based tests
+	glslTests->addChild(shaderexecutor::createBuiltinTests				(testCtx));
+	glslTests->addChild(shaderexecutor::createOpaqueTypeIndexingTests	(testCtx));
+}
+
+// TestPackage
+
+TestPackage::TestPackage (tcu::TestContext& testCtx)
+	: tcu::TestPackage(testCtx, "dEQP-VK", "dEQP Vulkan Tests")
+{
+}
+
+TestPackage::~TestPackage (void)
+{
+}
+
+tcu::TestCaseExecutor* TestPackage::createExecutor (void) const
+{
+	return new TestCaseExecutor(m_testCtx);
+}
+
+void TestPackage::init (void)
+{
+	addChild(createTestGroup			(m_testCtx, "info", "Build and Device Info Tests", createInfoTests));
+	addChild(api::createTests			(m_testCtx));
+	addChild(pipeline::createTests		(m_testCtx));
+	addChild(BindingModel::createTests	(m_testCtx));
+	addChild(SpirVAssembly::createTests	(m_testCtx));
+	addChild(createTestGroup			(m_testCtx, "glsl", "GLSL shader execution tests", createGlslTests));
+	addChild(createRenderPassTests		(m_testCtx));
+	addChild(memory::createTests		(m_testCtx));
+	addChild(ubo::createTests			(m_testCtx));
+	addChild(DynamicState::createTests	(m_testCtx));
+	addChild(ssbo::createTests			(m_testCtx));
+	addChild(QueryPool::createTests		(m_testCtx));
+	addChild(Draw::createTests			(m_testCtx));
+	addChild(compute::createTests		(m_testCtx));
+	addChild(image::createTests			(m_testCtx));
+}
+
+} // vkt
diff --git a/external/vulkancts/modules/vulkan/vktTestPackage.hpp b/external/vulkancts/modules/vulkan/vktTestPackage.hpp
new file mode 100644
index 0000000..f50ab91
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestPackage.hpp
@@ -0,0 +1,57 @@
+#ifndef _VKTTESTPACKAGE_HPP
+#define _VKTTESTPACKAGE_HPP
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Test Package
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestPackage.hpp"
+#include "tcuResource.hpp"
+#include "vktTestCase.hpp"
+
+namespace vkt
+{
+
+class TestPackage : public tcu::TestPackage
+{
+public:
+								TestPackage			(tcu::TestContext& testCtx);
+	virtual						~TestPackage		(void);
+
+	virtual void				init				(void);
+	tcu::TestCaseExecutor*		createExecutor		(void) const;
+};
+
+} // vkt
+
+#endif // _VKTTESTPACKAGE_HPP
diff --git a/external/vulkancts/modules/vulkan/vktTestPackageEntry.cpp b/external/vulkancts/modules/vulkan/vktTestPackageEntry.cpp
new file mode 100644
index 0000000..6b377e7
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vktTestPackageEntry.cpp
@@ -0,0 +1,44 @@
+/*-------------------------------------------------------------------------
+ * Vulkan Conformance Tests
+ * ------------------------
+ *
+ * Copyright (c) 2015 Google Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and/or associated documentation files (the
+ * "Materials"), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice(s) and this permission notice shall be
+ * included in all copies or substantial portions of the Materials.
+ *
+ * The Materials are Confidential Information as defined by the
+ * Khronos Membership Agreement until designated non-confidential by
+ * Khronos, at which point this condition clause shall be removed.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan Test Package Entry Point.
+ *//*--------------------------------------------------------------------*/
+
+#include "vktTestPackage.hpp"
+
+// Register package to test executor.
+
+static tcu::TestPackage* createTestPackage (tcu::TestContext& testCtx)
+{
+	return new vkt::TestPackage(testCtx);
+}
+
+tcu::TestPackageDescriptor g_vktPackageDescriptor("dEQP-VK", createTestPackage);
diff --git a/external/vulkancts/modules/vulkan/vulkan.cmake b/external/vulkancts/modules/vulkan/vulkan.cmake
new file mode 100644
index 0000000..2b785b0
--- /dev/null
+++ b/external/vulkancts/modules/vulkan/vulkan.cmake
@@ -0,0 +1 @@
+add_subdirectory(vulkan)
diff --git a/external/vulkancts/mustpass/1.0.0/.gitignore b/external/vulkancts/mustpass/1.0.0/.gitignore
new file mode 100644
index 0000000..0f7a745
--- /dev/null
+++ b/external/vulkancts/mustpass/1.0.0/.gitignore
@@ -0,0 +1,2 @@
+com.drawelements.deqp.vk.xml
+mustpass.xml
diff --git a/external/vulkancts/mustpass/1.0.0/src/excluded-tests.txt b/external/vulkancts/mustpass/1.0.0/src/excluded-tests.txt
new file mode 100644
index 0000000..7b31b48
--- /dev/null
+++ b/external/vulkancts/mustpass/1.0.0/src/excluded-tests.txt
@@ -0,0 +1,20 @@
+# Issue 218: Excluded built-in function precision tests
+dEQP-VK.glsl.builtin.precision.clamp.highp_compute.*
+dEQP-VK.glsl.builtin.precision.cos.mediump_*
+dEQP-VK.glsl.builtin.precision.div.highp_compute.*
+dEQP-VK.glsl.builtin.precision.max.highp_compute.*
+dEQP-VK.glsl.builtin.precision.min.highp_compute.*
+dEQP-VK.glsl.builtin.precision.mod.highp_compute.*
+dEQP-VK.glsl.builtin.precision.reflect.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sin.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.tan.mediump_compute.*
+
+# Issue 242: Excluded blend tests where propagated errors may be large
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
diff --git a/external/vulkancts/mustpass/1.0.0/src/master.txt b/external/vulkancts/mustpass/1.0.0/src/master.txt
new file mode 100644
index 0000000..6db63aa
--- /dev/null
+++ b/external/vulkancts/mustpass/1.0.0/src/master.txt
@@ -0,0 +1 @@
+dEQP-VK.*
diff --git a/external/vulkancts/mustpass/1.0.0/src/test-issues.txt b/external/vulkancts/mustpass/1.0.0/src/test-issues.txt
new file mode 100644
index 0000000..a0a3563
--- /dev/null
+++ b/external/vulkancts/mustpass/1.0.0/src/test-issues.txt
@@ -0,0 +1,21 @@
+# Issue 217: Built-in function precision test issues
+dEQP-VK.glsl.builtin.precision.dot.highp_compute.*
+dEQP-VK.glsl.builtin.precision.smoothstep.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.smoothstep.highp_compute.*
+dEQP-VK.glsl.builtin.precision.atan2.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.atan2.highp_compute.*
+dEQP-VK.glsl.builtin.precision.acosh.highp_compute.*
+dEQP-VK.glsl.builtin.precision.atanh.highp_compute.*
+dEQP-VK.glsl.builtin.precision.atanh.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.reflect.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.reflect.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.acos.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.atan.mediump_compute.*
+dEQP-VK.glsl.builtin.precision.inverse.highp_compute.mat2
+dEQP-VK.glsl.builtin.precision.refract.*
+
+# Issue 233: Negative zero flat varying issue
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_vertex
diff --git a/external/vulkancts/mustpass/1.0.0/vk-default.txt b/external/vulkancts/mustpass/1.0.0/vk-default.txt
new file mode 100644
index 0000000..bee57b9
--- /dev/null
+++ b/external/vulkancts/mustpass/1.0.0/vk-default.txt
@@ -0,0 +1,80319 @@
+dEQP-VK.info.build
+dEQP-VK.info.device
+dEQP-VK.info.platform
+dEQP-VK.api.smoke.create_sampler
+dEQP-VK.api.smoke.create_shader
+dEQP-VK.api.smoke.triangle
+dEQP-VK.api.smoke.asm_triangle
+dEQP-VK.api.info.instance.physical_devices
+dEQP-VK.api.info.instance.layers
+dEQP-VK.api.info.instance.extensions
+dEQP-VK.api.info.device.features
+dEQP-VK.api.info.device.properties
+dEQP-VK.api.info.device.queue_family_properties
+dEQP-VK.api.info.device.memory_properties
+dEQP-VK.api.info.device.layers
+dEQP-VK.api.info.device.extensions
+dEQP-VK.api.info.format_properties.r4g4_unorm_pack8
+dEQP-VK.api.info.format_properties.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.format_properties.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.format_properties.r5g6b5_unorm_pack16
+dEQP-VK.api.info.format_properties.b5g6r5_unorm_pack16
+dEQP-VK.api.info.format_properties.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.format_properties.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.format_properties.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.format_properties.r8_unorm
+dEQP-VK.api.info.format_properties.r8_snorm
+dEQP-VK.api.info.format_properties.r8_uscaled
+dEQP-VK.api.info.format_properties.r8_sscaled
+dEQP-VK.api.info.format_properties.r8_uint
+dEQP-VK.api.info.format_properties.r8_sint
+dEQP-VK.api.info.format_properties.r8_srgb
+dEQP-VK.api.info.format_properties.r8g8_unorm
+dEQP-VK.api.info.format_properties.r8g8_snorm
+dEQP-VK.api.info.format_properties.r8g8_uscaled
+dEQP-VK.api.info.format_properties.r8g8_sscaled
+dEQP-VK.api.info.format_properties.r8g8_uint
+dEQP-VK.api.info.format_properties.r8g8_sint
+dEQP-VK.api.info.format_properties.r8g8_srgb
+dEQP-VK.api.info.format_properties.r8g8b8_unorm
+dEQP-VK.api.info.format_properties.r8g8b8_snorm
+dEQP-VK.api.info.format_properties.r8g8b8_uscaled
+dEQP-VK.api.info.format_properties.r8g8b8_sscaled
+dEQP-VK.api.info.format_properties.r8g8b8_uint
+dEQP-VK.api.info.format_properties.r8g8b8_sint
+dEQP-VK.api.info.format_properties.r8g8b8_srgb
+dEQP-VK.api.info.format_properties.b8g8r8_unorm
+dEQP-VK.api.info.format_properties.b8g8r8_snorm
+dEQP-VK.api.info.format_properties.b8g8r8_uscaled
+dEQP-VK.api.info.format_properties.b8g8r8_sscaled
+dEQP-VK.api.info.format_properties.b8g8r8_uint
+dEQP-VK.api.info.format_properties.b8g8r8_sint
+dEQP-VK.api.info.format_properties.b8g8r8_srgb
+dEQP-VK.api.info.format_properties.r8g8b8a8_unorm
+dEQP-VK.api.info.format_properties.r8g8b8a8_snorm
+dEQP-VK.api.info.format_properties.r8g8b8a8_uscaled
+dEQP-VK.api.info.format_properties.r8g8b8a8_sscaled
+dEQP-VK.api.info.format_properties.r8g8b8a8_uint
+dEQP-VK.api.info.format_properties.r8g8b8a8_sint
+dEQP-VK.api.info.format_properties.r8g8b8a8_srgb
+dEQP-VK.api.info.format_properties.b8g8r8a8_unorm
+dEQP-VK.api.info.format_properties.b8g8r8a8_snorm
+dEQP-VK.api.info.format_properties.b8g8r8a8_uscaled
+dEQP-VK.api.info.format_properties.b8g8r8a8_sscaled
+dEQP-VK.api.info.format_properties.b8g8r8a8_uint
+dEQP-VK.api.info.format_properties.b8g8r8a8_sint
+dEQP-VK.api.info.format_properties.b8g8r8a8_srgb
+dEQP-VK.api.info.format_properties.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.format_properties.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.format_properties.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.format_properties.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.format_properties.r16_unorm
+dEQP-VK.api.info.format_properties.r16_snorm
+dEQP-VK.api.info.format_properties.r16_uscaled
+dEQP-VK.api.info.format_properties.r16_sscaled
+dEQP-VK.api.info.format_properties.r16_uint
+dEQP-VK.api.info.format_properties.r16_sint
+dEQP-VK.api.info.format_properties.r16_sfloat
+dEQP-VK.api.info.format_properties.r16g16_unorm
+dEQP-VK.api.info.format_properties.r16g16_snorm
+dEQP-VK.api.info.format_properties.r16g16_uscaled
+dEQP-VK.api.info.format_properties.r16g16_sscaled
+dEQP-VK.api.info.format_properties.r16g16_uint
+dEQP-VK.api.info.format_properties.r16g16_sint
+dEQP-VK.api.info.format_properties.r16g16_sfloat
+dEQP-VK.api.info.format_properties.r16g16b16_unorm
+dEQP-VK.api.info.format_properties.r16g16b16_snorm
+dEQP-VK.api.info.format_properties.r16g16b16_uscaled
+dEQP-VK.api.info.format_properties.r16g16b16_sscaled
+dEQP-VK.api.info.format_properties.r16g16b16_uint
+dEQP-VK.api.info.format_properties.r16g16b16_sint
+dEQP-VK.api.info.format_properties.r16g16b16_sfloat
+dEQP-VK.api.info.format_properties.r16g16b16a16_unorm
+dEQP-VK.api.info.format_properties.r16g16b16a16_snorm
+dEQP-VK.api.info.format_properties.r16g16b16a16_uscaled
+dEQP-VK.api.info.format_properties.r16g16b16a16_sscaled
+dEQP-VK.api.info.format_properties.r16g16b16a16_uint
+dEQP-VK.api.info.format_properties.r16g16b16a16_sint
+dEQP-VK.api.info.format_properties.r16g16b16a16_sfloat
+dEQP-VK.api.info.format_properties.r32_uint
+dEQP-VK.api.info.format_properties.r32_sint
+dEQP-VK.api.info.format_properties.r32_sfloat
+dEQP-VK.api.info.format_properties.r32g32_uint
+dEQP-VK.api.info.format_properties.r32g32_sint
+dEQP-VK.api.info.format_properties.r32g32_sfloat
+dEQP-VK.api.info.format_properties.r32g32b32_uint
+dEQP-VK.api.info.format_properties.r32g32b32_sint
+dEQP-VK.api.info.format_properties.r32g32b32_sfloat
+dEQP-VK.api.info.format_properties.r32g32b32a32_uint
+dEQP-VK.api.info.format_properties.r32g32b32a32_sint
+dEQP-VK.api.info.format_properties.r32g32b32a32_sfloat
+dEQP-VK.api.info.format_properties.r64_uint
+dEQP-VK.api.info.format_properties.r64_sint
+dEQP-VK.api.info.format_properties.r64_sfloat
+dEQP-VK.api.info.format_properties.r64g64_uint
+dEQP-VK.api.info.format_properties.r64g64_sint
+dEQP-VK.api.info.format_properties.r64g64_sfloat
+dEQP-VK.api.info.format_properties.r64g64b64_uint
+dEQP-VK.api.info.format_properties.r64g64b64_sint
+dEQP-VK.api.info.format_properties.r64g64b64_sfloat
+dEQP-VK.api.info.format_properties.r64g64b64a64_uint
+dEQP-VK.api.info.format_properties.r64g64b64a64_sint
+dEQP-VK.api.info.format_properties.r64g64b64a64_sfloat
+dEQP-VK.api.info.format_properties.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.format_properties.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.format_properties.d16_unorm
+dEQP-VK.api.info.format_properties.x8_d24_unorm_pack32
+dEQP-VK.api.info.format_properties.d32_sfloat
+dEQP-VK.api.info.format_properties.s8_uint
+dEQP-VK.api.info.format_properties.d16_unorm_s8_uint
+dEQP-VK.api.info.format_properties.d24_unorm_s8_uint
+dEQP-VK.api.info.format_properties.d32_sfloat_s8_uint
+dEQP-VK.api.info.format_properties.bc1_rgb_unorm_block
+dEQP-VK.api.info.format_properties.bc1_rgb_srgb_block
+dEQP-VK.api.info.format_properties.bc1_rgba_unorm_block
+dEQP-VK.api.info.format_properties.bc1_rgba_srgb_block
+dEQP-VK.api.info.format_properties.bc2_unorm_block
+dEQP-VK.api.info.format_properties.bc2_srgb_block
+dEQP-VK.api.info.format_properties.bc3_unorm_block
+dEQP-VK.api.info.format_properties.bc3_srgb_block
+dEQP-VK.api.info.format_properties.bc4_unorm_block
+dEQP-VK.api.info.format_properties.bc4_snorm_block
+dEQP-VK.api.info.format_properties.bc5_unorm_block
+dEQP-VK.api.info.format_properties.bc5_snorm_block
+dEQP-VK.api.info.format_properties.bc6h_ufloat_block
+dEQP-VK.api.info.format_properties.bc6h_sfloat_block
+dEQP-VK.api.info.format_properties.bc7_unorm_block
+dEQP-VK.api.info.format_properties.bc7_srgb_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.format_properties.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.format_properties.eac_r11_unorm_block
+dEQP-VK.api.info.format_properties.eac_r11_snorm_block
+dEQP-VK.api.info.format_properties.eac_r11g11_unorm_block
+dEQP-VK.api.info.format_properties.eac_r11g11_snorm_block
+dEQP-VK.api.info.format_properties.astc_4x4_unorm_block
+dEQP-VK.api.info.format_properties.astc_4x4_srgb_block
+dEQP-VK.api.info.format_properties.astc_5x4_unorm_block
+dEQP-VK.api.info.format_properties.astc_5x4_srgb_block
+dEQP-VK.api.info.format_properties.astc_5x5_unorm_block
+dEQP-VK.api.info.format_properties.astc_5x5_srgb_block
+dEQP-VK.api.info.format_properties.astc_6x5_unorm_block
+dEQP-VK.api.info.format_properties.astc_6x5_srgb_block
+dEQP-VK.api.info.format_properties.astc_6x6_unorm_block
+dEQP-VK.api.info.format_properties.astc_6x6_srgb_block
+dEQP-VK.api.info.format_properties.astc_8x5_unorm_block
+dEQP-VK.api.info.format_properties.astc_8x5_srgb_block
+dEQP-VK.api.info.format_properties.astc_8x6_unorm_block
+dEQP-VK.api.info.format_properties.astc_8x6_srgb_block
+dEQP-VK.api.info.format_properties.astc_8x8_unorm_block
+dEQP-VK.api.info.format_properties.astc_8x8_srgb_block
+dEQP-VK.api.info.format_properties.astc_10x5_unorm_block
+dEQP-VK.api.info.format_properties.astc_10x5_srgb_block
+dEQP-VK.api.info.format_properties.astc_10x6_unorm_block
+dEQP-VK.api.info.format_properties.astc_10x6_srgb_block
+dEQP-VK.api.info.format_properties.astc_10x8_unorm_block
+dEQP-VK.api.info.format_properties.astc_10x8_srgb_block
+dEQP-VK.api.info.format_properties.astc_10x10_unorm_block
+dEQP-VK.api.info.format_properties.astc_10x10_srgb_block
+dEQP-VK.api.info.format_properties.astc_12x10_unorm_block
+dEQP-VK.api.info.format_properties.astc_12x10_srgb_block
+dEQP-VK.api.info.format_properties.astc_12x12_unorm_block
+dEQP-VK.api.info.format_properties.astc_12x12_srgb_block
+dEQP-VK.api.info.format_properties.depth_stencil
+dEQP-VK.api.info.format_properties.compressed_formats
+dEQP-VK.api.info.image_format_properties.1d.optimal.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.1d.optimal.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.1d.optimal.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.d16_unorm
+dEQP-VK.api.info.image_format_properties.1d.optimal.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.optimal.d32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.optimal.s8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.optimal.astc_12x12_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.1d.linear.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r32_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r64_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.1d.linear.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.d16_unorm
+dEQP-VK.api.info.image_format_properties.1d.linear.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.1d.linear.d32_sfloat
+dEQP-VK.api.info.image_format_properties.1d.linear.s8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.1d.linear.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.1d.linear.astc_12x12_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.2d.optimal.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.2d.optimal.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.d16_unorm
+dEQP-VK.api.info.image_format_properties.2d.optimal.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.optimal.d32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.optimal.s8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.optimal.astc_12x12_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.2d.linear.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r32_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r64_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.2d.linear.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.d16_unorm
+dEQP-VK.api.info.image_format_properties.2d.linear.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.2d.linear.d32_sfloat
+dEQP-VK.api.info.image_format_properties.2d.linear.s8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.2d.linear.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.2d.linear.astc_12x12_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.3d.optimal.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.3d.optimal.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.d16_unorm
+dEQP-VK.api.info.image_format_properties.3d.optimal.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.optimal.d32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.optimal.s8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.optimal.astc_12x12_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.r4g4_unorm_pack8
+dEQP-VK.api.info.image_format_properties.3d.linear.r4g4b4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.b4g4r4a4_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.r5g6b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.b5g6r5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.r5g5b5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.b5g5r5a1_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.a1r5g5b5_unorm_pack16
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r8g8b8a8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.b8g8r8a8_srgb
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a8b8g8r8_srgb_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2r10g10b10_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_snorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_uscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_sscaled_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_uint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.a2b10g10r10_sint_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_snorm
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_uscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_sscaled
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r16g16b16a16_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r32_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32a32_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32a32_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r32g32b32a32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r64_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64a64_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64a64_sint
+dEQP-VK.api.info.image_format_properties.3d.linear.r64g64b64a64_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.b10g11r11_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.e5b9g9r9_ufloat_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.d16_unorm
+dEQP-VK.api.info.image_format_properties.3d.linear.x8_d24_unorm_pack32
+dEQP-VK.api.info.image_format_properties.3d.linear.d32_sfloat
+dEQP-VK.api.info.image_format_properties.3d.linear.s8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.d16_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.d24_unorm_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.d32_sfloat_s8_uint
+dEQP-VK.api.info.image_format_properties.3d.linear.bc1_rgb_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc1_rgb_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc1_rgba_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc1_rgba_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc2_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc2_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc3_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc3_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc4_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc5_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc6h_ufloat_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc6h_sfloat_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc7_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.bc7_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8a1_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8a1_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8a8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.etc2_r8g8b8a8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.eac_r11_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.eac_r11_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.eac_r11g11_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.eac_r11g11_snorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_4x4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_4x4_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_5x4_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_5x4_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_5x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_5x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_6x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_6x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_6x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_6x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_8x8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x5_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x5_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x6_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x6_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x8_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x8_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x10_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_10x10_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_12x10_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_12x10_srgb_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_12x12_unorm_block
+dEQP-VK.api.info.image_format_properties.3d.linear.astc_12x12_srgb_block
+dEQP-VK.api.device_init.create_instance_name_version
+dEQP-VK.api.device_init.create_instance_invalid_api_version
+dEQP-VK.api.device_init.create_instance_unsupported_extensions
+dEQP-VK.api.device_init.create_device
+dEQP-VK.api.device_init.create_multiple_devices
+dEQP-VK.api.device_init.create_device_unsupported_extensions
+dEQP-VK.api.device_init.create_device_various_queue_counts
+dEQP-VK.api.object_management.single.instance
+dEQP-VK.api.object_management.single.device
+dEQP-VK.api.object_management.single.device_memory_small
+dEQP-VK.api.object_management.single.buffer_uniform_small
+dEQP-VK.api.object_management.single.buffer_uniform_large
+dEQP-VK.api.object_management.single.buffer_storage_small
+dEQP-VK.api.object_management.single.buffer_storage_large
+dEQP-VK.api.object_management.single.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.single.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.single.image_1d
+dEQP-VK.api.object_management.single.image_2d
+dEQP-VK.api.object_management.single.image_3d
+dEQP-VK.api.object_management.single.image_view_1d
+dEQP-VK.api.object_management.single.image_view_1d_arr
+dEQP-VK.api.object_management.single.image_view_2d
+dEQP-VK.api.object_management.single.image_view_2d_arr
+dEQP-VK.api.object_management.single.image_view_cube
+dEQP-VK.api.object_management.single.image_view_cube_arr
+dEQP-VK.api.object_management.single.image_view_3d
+dEQP-VK.api.object_management.single.semaphore
+dEQP-VK.api.object_management.single.event
+dEQP-VK.api.object_management.single.fence
+dEQP-VK.api.object_management.single.fence_signaled
+dEQP-VK.api.object_management.single.query_pool
+dEQP-VK.api.object_management.single.sampler
+dEQP-VK.api.object_management.single.shader_module
+dEQP-VK.api.object_management.single.pipeline_cache
+dEQP-VK.api.object_management.single.pipeline_layout_empty
+dEQP-VK.api.object_management.single.pipeline_layout_single
+dEQP-VK.api.object_management.single.render_pass
+dEQP-VK.api.object_management.single.graphics_pipeline
+dEQP-VK.api.object_management.single.compute_pipeline
+dEQP-VK.api.object_management.single.descriptor_set_layout_empty
+dEQP-VK.api.object_management.single.descriptor_set_layout_single
+dEQP-VK.api.object_management.single.descriptor_pool
+dEQP-VK.api.object_management.single.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.single.descriptor_set
+dEQP-VK.api.object_management.single.framebuffer
+dEQP-VK.api.object_management.single.command_pool
+dEQP-VK.api.object_management.single.command_pool_transient
+dEQP-VK.api.object_management.single.command_buffer_primary
+dEQP-VK.api.object_management.single.command_buffer_secondary
+dEQP-VK.api.object_management.multiple_unique_resources.instance
+dEQP-VK.api.object_management.multiple_unique_resources.device
+dEQP-VK.api.object_management.multiple_unique_resources.device_memory_small
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_uniform_small
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_uniform_large
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_storage_small
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_storage_large
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multiple_unique_resources.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multiple_unique_resources.image_1d
+dEQP-VK.api.object_management.multiple_unique_resources.image_2d
+dEQP-VK.api.object_management.multiple_unique_resources.image_3d
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_1d
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_1d_arr
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_2d
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_2d_arr
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_cube
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_cube_arr
+dEQP-VK.api.object_management.multiple_unique_resources.image_view_3d
+dEQP-VK.api.object_management.multiple_unique_resources.semaphore
+dEQP-VK.api.object_management.multiple_unique_resources.event
+dEQP-VK.api.object_management.multiple_unique_resources.fence
+dEQP-VK.api.object_management.multiple_unique_resources.fence_signaled
+dEQP-VK.api.object_management.multiple_unique_resources.query_pool
+dEQP-VK.api.object_management.multiple_unique_resources.sampler
+dEQP-VK.api.object_management.multiple_unique_resources.shader_module
+dEQP-VK.api.object_management.multiple_unique_resources.pipeline_cache
+dEQP-VK.api.object_management.multiple_unique_resources.pipeline_layout_empty
+dEQP-VK.api.object_management.multiple_unique_resources.pipeline_layout_single
+dEQP-VK.api.object_management.multiple_unique_resources.render_pass
+dEQP-VK.api.object_management.multiple_unique_resources.graphics_pipeline
+dEQP-VK.api.object_management.multiple_unique_resources.compute_pipeline
+dEQP-VK.api.object_management.multiple_unique_resources.descriptor_set_layout_empty
+dEQP-VK.api.object_management.multiple_unique_resources.descriptor_set_layout_single
+dEQP-VK.api.object_management.multiple_unique_resources.descriptor_pool
+dEQP-VK.api.object_management.multiple_unique_resources.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.multiple_unique_resources.descriptor_set
+dEQP-VK.api.object_management.multiple_unique_resources.framebuffer
+dEQP-VK.api.object_management.multiple_unique_resources.command_pool
+dEQP-VK.api.object_management.multiple_unique_resources.command_pool_transient
+dEQP-VK.api.object_management.multiple_unique_resources.command_buffer_primary
+dEQP-VK.api.object_management.multiple_unique_resources.command_buffer_secondary
+dEQP-VK.api.object_management.multiple_shared_resources.device
+dEQP-VK.api.object_management.multiple_shared_resources.device_memory_small
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_uniform_small
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_uniform_large
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_storage_small
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_storage_large
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multiple_shared_resources.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multiple_shared_resources.image_1d
+dEQP-VK.api.object_management.multiple_shared_resources.image_2d
+dEQP-VK.api.object_management.multiple_shared_resources.image_3d
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_1d
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_1d_arr
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_2d
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_2d_arr
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_cube
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_cube_arr
+dEQP-VK.api.object_management.multiple_shared_resources.image_view_3d
+dEQP-VK.api.object_management.multiple_shared_resources.semaphore
+dEQP-VK.api.object_management.multiple_shared_resources.event
+dEQP-VK.api.object_management.multiple_shared_resources.fence
+dEQP-VK.api.object_management.multiple_shared_resources.fence_signaled
+dEQP-VK.api.object_management.multiple_shared_resources.query_pool
+dEQP-VK.api.object_management.multiple_shared_resources.sampler
+dEQP-VK.api.object_management.multiple_shared_resources.shader_module
+dEQP-VK.api.object_management.multiple_shared_resources.pipeline_cache
+dEQP-VK.api.object_management.multiple_shared_resources.pipeline_layout_empty
+dEQP-VK.api.object_management.multiple_shared_resources.pipeline_layout_single
+dEQP-VK.api.object_management.multiple_shared_resources.render_pass
+dEQP-VK.api.object_management.multiple_shared_resources.graphics_pipeline
+dEQP-VK.api.object_management.multiple_shared_resources.compute_pipeline
+dEQP-VK.api.object_management.multiple_shared_resources.descriptor_set_layout_empty
+dEQP-VK.api.object_management.multiple_shared_resources.descriptor_set_layout_single
+dEQP-VK.api.object_management.multiple_shared_resources.descriptor_pool
+dEQP-VK.api.object_management.multiple_shared_resources.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.multiple_shared_resources.descriptor_set
+dEQP-VK.api.object_management.multiple_shared_resources.framebuffer
+dEQP-VK.api.object_management.multiple_shared_resources.command_pool
+dEQP-VK.api.object_management.multiple_shared_resources.command_pool_transient
+dEQP-VK.api.object_management.multiple_shared_resources.command_buffer_primary
+dEQP-VK.api.object_management.multiple_shared_resources.command_buffer_secondary
+dEQP-VK.api.object_management.max_concurrent.instance
+dEQP-VK.api.object_management.max_concurrent.device
+dEQP-VK.api.object_management.max_concurrent.device_memory_small
+dEQP-VK.api.object_management.max_concurrent.buffer_uniform_small
+dEQP-VK.api.object_management.max_concurrent.buffer_uniform_large
+dEQP-VK.api.object_management.max_concurrent.buffer_storage_small
+dEQP-VK.api.object_management.max_concurrent.buffer_storage_large
+dEQP-VK.api.object_management.max_concurrent.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.max_concurrent.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.max_concurrent.image_1d
+dEQP-VK.api.object_management.max_concurrent.image_2d
+dEQP-VK.api.object_management.max_concurrent.image_3d
+dEQP-VK.api.object_management.max_concurrent.image_view_1d
+dEQP-VK.api.object_management.max_concurrent.image_view_1d_arr
+dEQP-VK.api.object_management.max_concurrent.image_view_2d
+dEQP-VK.api.object_management.max_concurrent.image_view_2d_arr
+dEQP-VK.api.object_management.max_concurrent.image_view_cube
+dEQP-VK.api.object_management.max_concurrent.image_view_cube_arr
+dEQP-VK.api.object_management.max_concurrent.image_view_3d
+dEQP-VK.api.object_management.max_concurrent.semaphore
+dEQP-VK.api.object_management.max_concurrent.event
+dEQP-VK.api.object_management.max_concurrent.fence
+dEQP-VK.api.object_management.max_concurrent.fence_signaled
+dEQP-VK.api.object_management.max_concurrent.query_pool
+dEQP-VK.api.object_management.max_concurrent.sampler
+dEQP-VK.api.object_management.max_concurrent.shader_module
+dEQP-VK.api.object_management.max_concurrent.pipeline_cache
+dEQP-VK.api.object_management.max_concurrent.pipeline_layout_empty
+dEQP-VK.api.object_management.max_concurrent.pipeline_layout_single
+dEQP-VK.api.object_management.max_concurrent.render_pass
+dEQP-VK.api.object_management.max_concurrent.graphics_pipeline
+dEQP-VK.api.object_management.max_concurrent.compute_pipeline
+dEQP-VK.api.object_management.max_concurrent.descriptor_set_layout_empty
+dEQP-VK.api.object_management.max_concurrent.descriptor_set_layout_single
+dEQP-VK.api.object_management.max_concurrent.descriptor_pool
+dEQP-VK.api.object_management.max_concurrent.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.max_concurrent.descriptor_set
+dEQP-VK.api.object_management.max_concurrent.framebuffer
+dEQP-VK.api.object_management.max_concurrent.command_pool
+dEQP-VK.api.object_management.max_concurrent.command_pool_transient
+dEQP-VK.api.object_management.max_concurrent.command_buffer_primary
+dEQP-VK.api.object_management.max_concurrent.command_buffer_secondary
+dEQP-VK.api.object_management.multithreaded_per_thread_device.device_memory_small
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_uniform_small
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_uniform_large
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_storage_small
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_storage_large
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_per_thread_device.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_1d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_2d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_3d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_1d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_1d_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_2d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_2d_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_cube
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_cube_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_device.image_view_3d
+dEQP-VK.api.object_management.multithreaded_per_thread_device.semaphore
+dEQP-VK.api.object_management.multithreaded_per_thread_device.event
+dEQP-VK.api.object_management.multithreaded_per_thread_device.fence
+dEQP-VK.api.object_management.multithreaded_per_thread_device.fence_signaled
+dEQP-VK.api.object_management.multithreaded_per_thread_device.query_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_device.sampler
+dEQP-VK.api.object_management.multithreaded_per_thread_device.shader_module
+dEQP-VK.api.object_management.multithreaded_per_thread_device.pipeline_cache
+dEQP-VK.api.object_management.multithreaded_per_thread_device.pipeline_layout_empty
+dEQP-VK.api.object_management.multithreaded_per_thread_device.pipeline_layout_single
+dEQP-VK.api.object_management.multithreaded_per_thread_device.render_pass
+dEQP-VK.api.object_management.multithreaded_per_thread_device.graphics_pipeline
+dEQP-VK.api.object_management.multithreaded_per_thread_device.compute_pipeline
+dEQP-VK.api.object_management.multithreaded_per_thread_device.descriptor_set_layout_empty
+dEQP-VK.api.object_management.multithreaded_per_thread_device.descriptor_set_layout_single
+dEQP-VK.api.object_management.multithreaded_per_thread_device.descriptor_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_device.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.multithreaded_per_thread_device.descriptor_set
+dEQP-VK.api.object_management.multithreaded_per_thread_device.framebuffer
+dEQP-VK.api.object_management.multithreaded_per_thread_device.command_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_device.command_pool_transient
+dEQP-VK.api.object_management.multithreaded_per_thread_device.command_buffer_primary
+dEQP-VK.api.object_management.multithreaded_per_thread_device.command_buffer_secondary
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.instance
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.device
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.device_memory_small
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_uniform_small
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_uniform_large
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_storage_small
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_storage_large
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_1d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_2d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_3d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_1d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_1d_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_2d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_2d_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_cube
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_cube_arr
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.image_view_3d
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.semaphore
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.event
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.fence
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.fence_signaled
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.query_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.sampler
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.shader_module
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.pipeline_cache
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.pipeline_layout_empty
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.pipeline_layout_single
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.render_pass
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.graphics_pipeline
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.compute_pipeline
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.descriptor_set_layout_empty
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.descriptor_set_layout_single
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.descriptor_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.descriptor_set
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.framebuffer
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.command_pool
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.command_pool_transient
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.command_buffer_primary
+dEQP-VK.api.object_management.multithreaded_per_thread_resources.command_buffer_secondary
+dEQP-VK.api.object_management.multithreaded_shared_resources.device
+dEQP-VK.api.object_management.multithreaded_shared_resources.device_memory_small
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_uniform_small
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_uniform_large
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_storage_small
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_storage_large
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_shared_resources.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_1d
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_2d
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_3d
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_1d
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_1d_arr
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_2d
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_2d_arr
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_cube
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_cube_arr
+dEQP-VK.api.object_management.multithreaded_shared_resources.image_view_3d
+dEQP-VK.api.object_management.multithreaded_shared_resources.semaphore
+dEQP-VK.api.object_management.multithreaded_shared_resources.event
+dEQP-VK.api.object_management.multithreaded_shared_resources.fence
+dEQP-VK.api.object_management.multithreaded_shared_resources.fence_signaled
+dEQP-VK.api.object_management.multithreaded_shared_resources.query_pool
+dEQP-VK.api.object_management.multithreaded_shared_resources.sampler
+dEQP-VK.api.object_management.multithreaded_shared_resources.shader_module
+dEQP-VK.api.object_management.multithreaded_shared_resources.pipeline_cache
+dEQP-VK.api.object_management.multithreaded_shared_resources.pipeline_layout_empty
+dEQP-VK.api.object_management.multithreaded_shared_resources.pipeline_layout_single
+dEQP-VK.api.object_management.multithreaded_shared_resources.render_pass
+dEQP-VK.api.object_management.multithreaded_shared_resources.graphics_pipeline
+dEQP-VK.api.object_management.multithreaded_shared_resources.compute_pipeline
+dEQP-VK.api.object_management.multithreaded_shared_resources.descriptor_set_layout_empty
+dEQP-VK.api.object_management.multithreaded_shared_resources.descriptor_set_layout_single
+dEQP-VK.api.object_management.multithreaded_shared_resources.descriptor_pool
+dEQP-VK.api.object_management.multithreaded_shared_resources.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.multithreaded_shared_resources.framebuffer
+dEQP-VK.api.object_management.multithreaded_shared_resources.command_pool
+dEQP-VK.api.object_management.multithreaded_shared_resources.command_pool_transient
+dEQP-VK.api.object_management.single_alloc_callbacks.instance
+dEQP-VK.api.object_management.single_alloc_callbacks.device
+dEQP-VK.api.object_management.single_alloc_callbacks.device_memory_small
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_uniform_small
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_uniform_large
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_storage_small
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_storage_large
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.single_alloc_callbacks.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.single_alloc_callbacks.image_1d
+dEQP-VK.api.object_management.single_alloc_callbacks.image_2d
+dEQP-VK.api.object_management.single_alloc_callbacks.image_3d
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_1d
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_1d_arr
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_2d
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_2d_arr
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_cube
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_cube_arr
+dEQP-VK.api.object_management.single_alloc_callbacks.image_view_3d
+dEQP-VK.api.object_management.single_alloc_callbacks.semaphore
+dEQP-VK.api.object_management.single_alloc_callbacks.event
+dEQP-VK.api.object_management.single_alloc_callbacks.fence
+dEQP-VK.api.object_management.single_alloc_callbacks.fence_signaled
+dEQP-VK.api.object_management.single_alloc_callbacks.query_pool
+dEQP-VK.api.object_management.single_alloc_callbacks.sampler
+dEQP-VK.api.object_management.single_alloc_callbacks.shader_module
+dEQP-VK.api.object_management.single_alloc_callbacks.pipeline_cache
+dEQP-VK.api.object_management.single_alloc_callbacks.pipeline_layout_empty
+dEQP-VK.api.object_management.single_alloc_callbacks.pipeline_layout_single
+dEQP-VK.api.object_management.single_alloc_callbacks.render_pass
+dEQP-VK.api.object_management.single_alloc_callbacks.graphics_pipeline
+dEQP-VK.api.object_management.single_alloc_callbacks.compute_pipeline
+dEQP-VK.api.object_management.single_alloc_callbacks.descriptor_set_layout_empty
+dEQP-VK.api.object_management.single_alloc_callbacks.descriptor_set_layout_single
+dEQP-VK.api.object_management.single_alloc_callbacks.descriptor_pool
+dEQP-VK.api.object_management.single_alloc_callbacks.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.single_alloc_callbacks.descriptor_set
+dEQP-VK.api.object_management.single_alloc_callbacks.framebuffer
+dEQP-VK.api.object_management.single_alloc_callbacks.command_pool
+dEQP-VK.api.object_management.single_alloc_callbacks.command_pool_transient
+dEQP-VK.api.object_management.single_alloc_callbacks.command_buffer_primary
+dEQP-VK.api.object_management.single_alloc_callbacks.command_buffer_secondary
+dEQP-VK.api.object_management.alloc_callback_fail.instance
+dEQP-VK.api.object_management.alloc_callback_fail.device
+dEQP-VK.api.object_management.alloc_callback_fail.device_memory_small
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_uniform_small
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_uniform_large
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_storage_small
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_storage_large
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_view_uniform_r8g8b8a8_unorm
+dEQP-VK.api.object_management.alloc_callback_fail.buffer_view_storage_r8g8b8a8_unorm
+dEQP-VK.api.object_management.alloc_callback_fail.image_1d
+dEQP-VK.api.object_management.alloc_callback_fail.image_2d
+dEQP-VK.api.object_management.alloc_callback_fail.image_3d
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_1d
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_1d_arr
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_2d
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_2d_arr
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_cube
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_cube_arr
+dEQP-VK.api.object_management.alloc_callback_fail.image_view_3d
+dEQP-VK.api.object_management.alloc_callback_fail.semaphore
+dEQP-VK.api.object_management.alloc_callback_fail.event
+dEQP-VK.api.object_management.alloc_callback_fail.fence
+dEQP-VK.api.object_management.alloc_callback_fail.fence_signaled
+dEQP-VK.api.object_management.alloc_callback_fail.query_pool
+dEQP-VK.api.object_management.alloc_callback_fail.sampler
+dEQP-VK.api.object_management.alloc_callback_fail.shader_module
+dEQP-VK.api.object_management.alloc_callback_fail.pipeline_cache
+dEQP-VK.api.object_management.alloc_callback_fail.pipeline_layout_empty
+dEQP-VK.api.object_management.alloc_callback_fail.pipeline_layout_single
+dEQP-VK.api.object_management.alloc_callback_fail.render_pass
+dEQP-VK.api.object_management.alloc_callback_fail.graphics_pipeline
+dEQP-VK.api.object_management.alloc_callback_fail.compute_pipeline
+dEQP-VK.api.object_management.alloc_callback_fail.descriptor_set_layout_empty
+dEQP-VK.api.object_management.alloc_callback_fail.descriptor_set_layout_single
+dEQP-VK.api.object_management.alloc_callback_fail.descriptor_pool
+dEQP-VK.api.object_management.alloc_callback_fail.descriptor_pool_free_descriptor_set
+dEQP-VK.api.object_management.alloc_callback_fail.descriptor_set
+dEQP-VK.api.object_management.alloc_callback_fail.framebuffer
+dEQP-VK.api.object_management.alloc_callback_fail.command_pool
+dEQP-VK.api.object_management.alloc_callback_fail.command_pool_transient
+dEQP-VK.api.object_management.alloc_callback_fail.command_buffer_primary
+dEQP-VK.api.object_management.alloc_callback_fail.command_buffer_secondary
+dEQP-VK.api.buffer.createBuffer_1_0
+dEQP-VK.api.buffer.createBuffer_2_0
+dEQP-VK.api.buffer.createBuffer_3_0
+dEQP-VK.api.buffer.createBuffer_4_0
+dEQP-VK.api.buffer.createBuffer_5_0
+dEQP-VK.api.buffer.createBuffer_6_0
+dEQP-VK.api.buffer.createBuffer_7_0
+dEQP-VK.api.buffer.createBuffer_8_0
+dEQP-VK.api.buffer.createBuffer_9_0
+dEQP-VK.api.buffer.createBuffer_10_0
+dEQP-VK.api.buffer.createBuffer_11_0
+dEQP-VK.api.buffer.createBuffer_12_0
+dEQP-VK.api.buffer.createBuffer_13_0
+dEQP-VK.api.buffer.createBuffer_14_0
+dEQP-VK.api.buffer.createBuffer_15_0
+dEQP-VK.api.buffer.createBuffer_16_0
+dEQP-VK.api.buffer.createBuffer_17_0
+dEQP-VK.api.buffer.createBuffer_18_0
+dEQP-VK.api.buffer.createBuffer_19_0
+dEQP-VK.api.buffer.createBuffer_20_0
+dEQP-VK.api.buffer.createBuffer_21_0
+dEQP-VK.api.buffer.createBuffer_22_0
+dEQP-VK.api.buffer.createBuffer_23_0
+dEQP-VK.api.buffer.createBuffer_24_0
+dEQP-VK.api.buffer.createBuffer_25_0
+dEQP-VK.api.buffer.createBuffer_26_0
+dEQP-VK.api.buffer.createBuffer_27_0
+dEQP-VK.api.buffer.createBuffer_28_0
+dEQP-VK.api.buffer.createBuffer_29_0
+dEQP-VK.api.buffer.createBuffer_30_0
+dEQP-VK.api.buffer.createBuffer_31_0
+dEQP-VK.api.buffer.createBuffer_32_0
+dEQP-VK.api.buffer.createBuffer_33_0
+dEQP-VK.api.buffer.createBuffer_34_0
+dEQP-VK.api.buffer.createBuffer_35_0
+dEQP-VK.api.buffer.createBuffer_36_0
+dEQP-VK.api.buffer.createBuffer_37_0
+dEQP-VK.api.buffer.createBuffer_38_0
+dEQP-VK.api.buffer.createBuffer_39_0
+dEQP-VK.api.buffer.createBuffer_40_0
+dEQP-VK.api.buffer.createBuffer_41_0
+dEQP-VK.api.buffer.createBuffer_42_0
+dEQP-VK.api.buffer.createBuffer_43_0
+dEQP-VK.api.buffer.createBuffer_44_0
+dEQP-VK.api.buffer.createBuffer_45_0
+dEQP-VK.api.buffer.createBuffer_46_0
+dEQP-VK.api.buffer.createBuffer_47_0
+dEQP-VK.api.buffer.createBuffer_48_0
+dEQP-VK.api.buffer.createBuffer_49_0
+dEQP-VK.api.buffer.createBuffer_50_0
+dEQP-VK.api.buffer.createBuffer_51_0
+dEQP-VK.api.buffer.createBuffer_52_0
+dEQP-VK.api.buffer.createBuffer_53_0
+dEQP-VK.api.buffer.createBuffer_54_0
+dEQP-VK.api.buffer.createBuffer_55_0
+dEQP-VK.api.buffer.createBuffer_56_0
+dEQP-VK.api.buffer.createBuffer_57_0
+dEQP-VK.api.buffer.createBuffer_58_0
+dEQP-VK.api.buffer.createBuffer_59_0
+dEQP-VK.api.buffer.createBuffer_60_0
+dEQP-VK.api.buffer.createBuffer_61_0
+dEQP-VK.api.buffer.createBuffer_62_0
+dEQP-VK.api.buffer.createBuffer_63_0
+dEQP-VK.api.buffer.createBuffer_64_0
+dEQP-VK.api.buffer.createBuffer_65_0
+dEQP-VK.api.buffer.createBuffer_66_0
+dEQP-VK.api.buffer.createBuffer_67_0
+dEQP-VK.api.buffer.createBuffer_68_0
+dEQP-VK.api.buffer.createBuffer_69_0
+dEQP-VK.api.buffer.createBuffer_70_0
+dEQP-VK.api.buffer.createBuffer_71_0
+dEQP-VK.api.buffer.createBuffer_72_0
+dEQP-VK.api.buffer.createBuffer_73_0
+dEQP-VK.api.buffer.createBuffer_74_0
+dEQP-VK.api.buffer.createBuffer_75_0
+dEQP-VK.api.buffer.createBuffer_76_0
+dEQP-VK.api.buffer.createBuffer_77_0
+dEQP-VK.api.buffer.createBuffer_78_0
+dEQP-VK.api.buffer.createBuffer_79_0
+dEQP-VK.api.buffer.createBuffer_80_0
+dEQP-VK.api.buffer.createBuffer_81_0
+dEQP-VK.api.buffer.createBuffer_82_0
+dEQP-VK.api.buffer.createBuffer_83_0
+dEQP-VK.api.buffer.createBuffer_84_0
+dEQP-VK.api.buffer.createBuffer_85_0
+dEQP-VK.api.buffer.createBuffer_86_0
+dEQP-VK.api.buffer.createBuffer_87_0
+dEQP-VK.api.buffer.createBuffer_88_0
+dEQP-VK.api.buffer.createBuffer_89_0
+dEQP-VK.api.buffer.createBuffer_90_0
+dEQP-VK.api.buffer.createBuffer_91_0
+dEQP-VK.api.buffer.createBuffer_92_0
+dEQP-VK.api.buffer.createBuffer_93_0
+dEQP-VK.api.buffer.createBuffer_94_0
+dEQP-VK.api.buffer.createBuffer_95_0
+dEQP-VK.api.buffer.createBuffer_96_0
+dEQP-VK.api.buffer.createBuffer_97_0
+dEQP-VK.api.buffer.createBuffer_98_0
+dEQP-VK.api.buffer.createBuffer_99_0
+dEQP-VK.api.buffer.createBuffer_100_0
+dEQP-VK.api.buffer.createBuffer_101_0
+dEQP-VK.api.buffer.createBuffer_102_0
+dEQP-VK.api.buffer.createBuffer_103_0
+dEQP-VK.api.buffer.createBuffer_104_0
+dEQP-VK.api.buffer.createBuffer_105_0
+dEQP-VK.api.buffer.createBuffer_106_0
+dEQP-VK.api.buffer.createBuffer_107_0
+dEQP-VK.api.buffer.createBuffer_108_0
+dEQP-VK.api.buffer.createBuffer_109_0
+dEQP-VK.api.buffer.createBuffer_110_0
+dEQP-VK.api.buffer.createBuffer_111_0
+dEQP-VK.api.buffer.createBuffer_112_0
+dEQP-VK.api.buffer.createBuffer_113_0
+dEQP-VK.api.buffer.createBuffer_114_0
+dEQP-VK.api.buffer.createBuffer_115_0
+dEQP-VK.api.buffer.createBuffer_116_0
+dEQP-VK.api.buffer.createBuffer_117_0
+dEQP-VK.api.buffer.createBuffer_118_0
+dEQP-VK.api.buffer.createBuffer_119_0
+dEQP-VK.api.buffer.createBuffer_120_0
+dEQP-VK.api.buffer.createBuffer_121_0
+dEQP-VK.api.buffer.createBuffer_122_0
+dEQP-VK.api.buffer.createBuffer_123_0
+dEQP-VK.api.buffer.createBuffer_124_0
+dEQP-VK.api.buffer.createBuffer_125_0
+dEQP-VK.api.buffer.createBuffer_126_0
+dEQP-VK.api.buffer.createBuffer_127_0
+dEQP-VK.api.buffer.createBuffer_128_0
+dEQP-VK.api.buffer.createBuffer_129_0
+dEQP-VK.api.buffer.createBuffer_130_0
+dEQP-VK.api.buffer.createBuffer_131_0
+dEQP-VK.api.buffer.createBuffer_132_0
+dEQP-VK.api.buffer.createBuffer_133_0
+dEQP-VK.api.buffer.createBuffer_134_0
+dEQP-VK.api.buffer.createBuffer_135_0
+dEQP-VK.api.buffer.createBuffer_136_0
+dEQP-VK.api.buffer.createBuffer_137_0
+dEQP-VK.api.buffer.createBuffer_138_0
+dEQP-VK.api.buffer.createBuffer_139_0
+dEQP-VK.api.buffer.createBuffer_140_0
+dEQP-VK.api.buffer.createBuffer_141_0
+dEQP-VK.api.buffer.createBuffer_142_0
+dEQP-VK.api.buffer.createBuffer_143_0
+dEQP-VK.api.buffer.createBuffer_144_0
+dEQP-VK.api.buffer.createBuffer_145_0
+dEQP-VK.api.buffer.createBuffer_146_0
+dEQP-VK.api.buffer.createBuffer_147_0
+dEQP-VK.api.buffer.createBuffer_148_0
+dEQP-VK.api.buffer.createBuffer_149_0
+dEQP-VK.api.buffer.createBuffer_150_0
+dEQP-VK.api.buffer.createBuffer_151_0
+dEQP-VK.api.buffer.createBuffer_152_0
+dEQP-VK.api.buffer.createBuffer_153_0
+dEQP-VK.api.buffer.createBuffer_154_0
+dEQP-VK.api.buffer.createBuffer_155_0
+dEQP-VK.api.buffer.createBuffer_156_0
+dEQP-VK.api.buffer.createBuffer_157_0
+dEQP-VK.api.buffer.createBuffer_158_0
+dEQP-VK.api.buffer.createBuffer_159_0
+dEQP-VK.api.buffer.createBuffer_160_0
+dEQP-VK.api.buffer.createBuffer_161_0
+dEQP-VK.api.buffer.createBuffer_162_0
+dEQP-VK.api.buffer.createBuffer_163_0
+dEQP-VK.api.buffer.createBuffer_164_0
+dEQP-VK.api.buffer.createBuffer_165_0
+dEQP-VK.api.buffer.createBuffer_166_0
+dEQP-VK.api.buffer.createBuffer_167_0
+dEQP-VK.api.buffer.createBuffer_168_0
+dEQP-VK.api.buffer.createBuffer_169_0
+dEQP-VK.api.buffer.createBuffer_170_0
+dEQP-VK.api.buffer.createBuffer_171_0
+dEQP-VK.api.buffer.createBuffer_172_0
+dEQP-VK.api.buffer.createBuffer_173_0
+dEQP-VK.api.buffer.createBuffer_174_0
+dEQP-VK.api.buffer.createBuffer_175_0
+dEQP-VK.api.buffer.createBuffer_176_0
+dEQP-VK.api.buffer.createBuffer_177_0
+dEQP-VK.api.buffer.createBuffer_178_0
+dEQP-VK.api.buffer.createBuffer_179_0
+dEQP-VK.api.buffer.createBuffer_180_0
+dEQP-VK.api.buffer.createBuffer_181_0
+dEQP-VK.api.buffer.createBuffer_182_0
+dEQP-VK.api.buffer.createBuffer_183_0
+dEQP-VK.api.buffer.createBuffer_184_0
+dEQP-VK.api.buffer.createBuffer_185_0
+dEQP-VK.api.buffer.createBuffer_186_0
+dEQP-VK.api.buffer.createBuffer_187_0
+dEQP-VK.api.buffer.createBuffer_188_0
+dEQP-VK.api.buffer.createBuffer_189_0
+dEQP-VK.api.buffer.createBuffer_190_0
+dEQP-VK.api.buffer.createBuffer_191_0
+dEQP-VK.api.buffer.createBuffer_192_0
+dEQP-VK.api.buffer.createBuffer_193_0
+dEQP-VK.api.buffer.createBuffer_194_0
+dEQP-VK.api.buffer.createBuffer_195_0
+dEQP-VK.api.buffer.createBuffer_196_0
+dEQP-VK.api.buffer.createBuffer_197_0
+dEQP-VK.api.buffer.createBuffer_198_0
+dEQP-VK.api.buffer.createBuffer_199_0
+dEQP-VK.api.buffer.createBuffer_200_0
+dEQP-VK.api.buffer.createBuffer_201_0
+dEQP-VK.api.buffer.createBuffer_202_0
+dEQP-VK.api.buffer.createBuffer_203_0
+dEQP-VK.api.buffer.createBuffer_204_0
+dEQP-VK.api.buffer.createBuffer_205_0
+dEQP-VK.api.buffer.createBuffer_206_0
+dEQP-VK.api.buffer.createBuffer_207_0
+dEQP-VK.api.buffer.createBuffer_208_0
+dEQP-VK.api.buffer.createBuffer_209_0
+dEQP-VK.api.buffer.createBuffer_210_0
+dEQP-VK.api.buffer.createBuffer_211_0
+dEQP-VK.api.buffer.createBuffer_212_0
+dEQP-VK.api.buffer.createBuffer_213_0
+dEQP-VK.api.buffer.createBuffer_214_0
+dEQP-VK.api.buffer.createBuffer_215_0
+dEQP-VK.api.buffer.createBuffer_216_0
+dEQP-VK.api.buffer.createBuffer_217_0
+dEQP-VK.api.buffer.createBuffer_218_0
+dEQP-VK.api.buffer.createBuffer_219_0
+dEQP-VK.api.buffer.createBuffer_220_0
+dEQP-VK.api.buffer.createBuffer_221_0
+dEQP-VK.api.buffer.createBuffer_222_0
+dEQP-VK.api.buffer.createBuffer_223_0
+dEQP-VK.api.buffer.createBuffer_224_0
+dEQP-VK.api.buffer.createBuffer_225_0
+dEQP-VK.api.buffer.createBuffer_226_0
+dEQP-VK.api.buffer.createBuffer_227_0
+dEQP-VK.api.buffer.createBuffer_228_0
+dEQP-VK.api.buffer.createBuffer_229_0
+dEQP-VK.api.buffer.createBuffer_230_0
+dEQP-VK.api.buffer.createBuffer_231_0
+dEQP-VK.api.buffer.createBuffer_232_0
+dEQP-VK.api.buffer.createBuffer_233_0
+dEQP-VK.api.buffer.createBuffer_234_0
+dEQP-VK.api.buffer.createBuffer_235_0
+dEQP-VK.api.buffer.createBuffer_236_0
+dEQP-VK.api.buffer.createBuffer_237_0
+dEQP-VK.api.buffer.createBuffer_238_0
+dEQP-VK.api.buffer.createBuffer_239_0
+dEQP-VK.api.buffer.createBuffer_240_0
+dEQP-VK.api.buffer.createBuffer_241_0
+dEQP-VK.api.buffer.createBuffer_242_0
+dEQP-VK.api.buffer.createBuffer_243_0
+dEQP-VK.api.buffer.createBuffer_244_0
+dEQP-VK.api.buffer.createBuffer_245_0
+dEQP-VK.api.buffer.createBuffer_246_0
+dEQP-VK.api.buffer.createBuffer_247_0
+dEQP-VK.api.buffer.createBuffer_248_0
+dEQP-VK.api.buffer.createBuffer_249_0
+dEQP-VK.api.buffer.createBuffer_250_0
+dEQP-VK.api.buffer.createBuffer_251_0
+dEQP-VK.api.buffer.createBuffer_252_0
+dEQP-VK.api.buffer.createBuffer_253_0
+dEQP-VK.api.buffer.createBuffer_254_0
+dEQP-VK.api.buffer.createBuffer_255_0
+dEQP-VK.api.buffer.createBuffer_1_1
+dEQP-VK.api.buffer.createBuffer_2_1
+dEQP-VK.api.buffer.createBuffer_3_1
+dEQP-VK.api.buffer.createBuffer_4_1
+dEQP-VK.api.buffer.createBuffer_5_1
+dEQP-VK.api.buffer.createBuffer_6_1
+dEQP-VK.api.buffer.createBuffer_7_1
+dEQP-VK.api.buffer.createBuffer_8_1
+dEQP-VK.api.buffer.createBuffer_9_1
+dEQP-VK.api.buffer.createBuffer_10_1
+dEQP-VK.api.buffer.createBuffer_11_1
+dEQP-VK.api.buffer.createBuffer_12_1
+dEQP-VK.api.buffer.createBuffer_13_1
+dEQP-VK.api.buffer.createBuffer_14_1
+dEQP-VK.api.buffer.createBuffer_15_1
+dEQP-VK.api.buffer.createBuffer_16_1
+dEQP-VK.api.buffer.createBuffer_17_1
+dEQP-VK.api.buffer.createBuffer_18_1
+dEQP-VK.api.buffer.createBuffer_19_1
+dEQP-VK.api.buffer.createBuffer_20_1
+dEQP-VK.api.buffer.createBuffer_21_1
+dEQP-VK.api.buffer.createBuffer_22_1
+dEQP-VK.api.buffer.createBuffer_23_1
+dEQP-VK.api.buffer.createBuffer_24_1
+dEQP-VK.api.buffer.createBuffer_25_1
+dEQP-VK.api.buffer.createBuffer_26_1
+dEQP-VK.api.buffer.createBuffer_27_1
+dEQP-VK.api.buffer.createBuffer_28_1
+dEQP-VK.api.buffer.createBuffer_29_1
+dEQP-VK.api.buffer.createBuffer_30_1
+dEQP-VK.api.buffer.createBuffer_31_1
+dEQP-VK.api.buffer.createBuffer_32_1
+dEQP-VK.api.buffer.createBuffer_33_1
+dEQP-VK.api.buffer.createBuffer_34_1
+dEQP-VK.api.buffer.createBuffer_35_1
+dEQP-VK.api.buffer.createBuffer_36_1
+dEQP-VK.api.buffer.createBuffer_37_1
+dEQP-VK.api.buffer.createBuffer_38_1
+dEQP-VK.api.buffer.createBuffer_39_1
+dEQP-VK.api.buffer.createBuffer_40_1
+dEQP-VK.api.buffer.createBuffer_41_1
+dEQP-VK.api.buffer.createBuffer_42_1
+dEQP-VK.api.buffer.createBuffer_43_1
+dEQP-VK.api.buffer.createBuffer_44_1
+dEQP-VK.api.buffer.createBuffer_45_1
+dEQP-VK.api.buffer.createBuffer_46_1
+dEQP-VK.api.buffer.createBuffer_47_1
+dEQP-VK.api.buffer.createBuffer_48_1
+dEQP-VK.api.buffer.createBuffer_49_1
+dEQP-VK.api.buffer.createBuffer_50_1
+dEQP-VK.api.buffer.createBuffer_51_1
+dEQP-VK.api.buffer.createBuffer_52_1
+dEQP-VK.api.buffer.createBuffer_53_1
+dEQP-VK.api.buffer.createBuffer_54_1
+dEQP-VK.api.buffer.createBuffer_55_1
+dEQP-VK.api.buffer.createBuffer_56_1
+dEQP-VK.api.buffer.createBuffer_57_1
+dEQP-VK.api.buffer.createBuffer_58_1
+dEQP-VK.api.buffer.createBuffer_59_1
+dEQP-VK.api.buffer.createBuffer_60_1
+dEQP-VK.api.buffer.createBuffer_61_1
+dEQP-VK.api.buffer.createBuffer_62_1
+dEQP-VK.api.buffer.createBuffer_63_1
+dEQP-VK.api.buffer.createBuffer_64_1
+dEQP-VK.api.buffer.createBuffer_65_1
+dEQP-VK.api.buffer.createBuffer_66_1
+dEQP-VK.api.buffer.createBuffer_67_1
+dEQP-VK.api.buffer.createBuffer_68_1
+dEQP-VK.api.buffer.createBuffer_69_1
+dEQP-VK.api.buffer.createBuffer_70_1
+dEQP-VK.api.buffer.createBuffer_71_1
+dEQP-VK.api.buffer.createBuffer_72_1
+dEQP-VK.api.buffer.createBuffer_73_1
+dEQP-VK.api.buffer.createBuffer_74_1
+dEQP-VK.api.buffer.createBuffer_75_1
+dEQP-VK.api.buffer.createBuffer_76_1
+dEQP-VK.api.buffer.createBuffer_77_1
+dEQP-VK.api.buffer.createBuffer_78_1
+dEQP-VK.api.buffer.createBuffer_79_1
+dEQP-VK.api.buffer.createBuffer_80_1
+dEQP-VK.api.buffer.createBuffer_81_1
+dEQP-VK.api.buffer.createBuffer_82_1
+dEQP-VK.api.buffer.createBuffer_83_1
+dEQP-VK.api.buffer.createBuffer_84_1
+dEQP-VK.api.buffer.createBuffer_85_1
+dEQP-VK.api.buffer.createBuffer_86_1
+dEQP-VK.api.buffer.createBuffer_87_1
+dEQP-VK.api.buffer.createBuffer_88_1
+dEQP-VK.api.buffer.createBuffer_89_1
+dEQP-VK.api.buffer.createBuffer_90_1
+dEQP-VK.api.buffer.createBuffer_91_1
+dEQP-VK.api.buffer.createBuffer_92_1
+dEQP-VK.api.buffer.createBuffer_93_1
+dEQP-VK.api.buffer.createBuffer_94_1
+dEQP-VK.api.buffer.createBuffer_95_1
+dEQP-VK.api.buffer.createBuffer_96_1
+dEQP-VK.api.buffer.createBuffer_97_1
+dEQP-VK.api.buffer.createBuffer_98_1
+dEQP-VK.api.buffer.createBuffer_99_1
+dEQP-VK.api.buffer.createBuffer_100_1
+dEQP-VK.api.buffer.createBuffer_101_1
+dEQP-VK.api.buffer.createBuffer_102_1
+dEQP-VK.api.buffer.createBuffer_103_1
+dEQP-VK.api.buffer.createBuffer_104_1
+dEQP-VK.api.buffer.createBuffer_105_1
+dEQP-VK.api.buffer.createBuffer_106_1
+dEQP-VK.api.buffer.createBuffer_107_1
+dEQP-VK.api.buffer.createBuffer_108_1
+dEQP-VK.api.buffer.createBuffer_109_1
+dEQP-VK.api.buffer.createBuffer_110_1
+dEQP-VK.api.buffer.createBuffer_111_1
+dEQP-VK.api.buffer.createBuffer_112_1
+dEQP-VK.api.buffer.createBuffer_113_1
+dEQP-VK.api.buffer.createBuffer_114_1
+dEQP-VK.api.buffer.createBuffer_115_1
+dEQP-VK.api.buffer.createBuffer_116_1
+dEQP-VK.api.buffer.createBuffer_117_1
+dEQP-VK.api.buffer.createBuffer_118_1
+dEQP-VK.api.buffer.createBuffer_119_1
+dEQP-VK.api.buffer.createBuffer_120_1
+dEQP-VK.api.buffer.createBuffer_121_1
+dEQP-VK.api.buffer.createBuffer_122_1
+dEQP-VK.api.buffer.createBuffer_123_1
+dEQP-VK.api.buffer.createBuffer_124_1
+dEQP-VK.api.buffer.createBuffer_125_1
+dEQP-VK.api.buffer.createBuffer_126_1
+dEQP-VK.api.buffer.createBuffer_127_1
+dEQP-VK.api.buffer.createBuffer_128_1
+dEQP-VK.api.buffer.createBuffer_129_1
+dEQP-VK.api.buffer.createBuffer_130_1
+dEQP-VK.api.buffer.createBuffer_131_1
+dEQP-VK.api.buffer.createBuffer_132_1
+dEQP-VK.api.buffer.createBuffer_133_1
+dEQP-VK.api.buffer.createBuffer_134_1
+dEQP-VK.api.buffer.createBuffer_135_1
+dEQP-VK.api.buffer.createBuffer_136_1
+dEQP-VK.api.buffer.createBuffer_137_1
+dEQP-VK.api.buffer.createBuffer_138_1
+dEQP-VK.api.buffer.createBuffer_139_1
+dEQP-VK.api.buffer.createBuffer_140_1
+dEQP-VK.api.buffer.createBuffer_141_1
+dEQP-VK.api.buffer.createBuffer_142_1
+dEQP-VK.api.buffer.createBuffer_143_1
+dEQP-VK.api.buffer.createBuffer_144_1
+dEQP-VK.api.buffer.createBuffer_145_1
+dEQP-VK.api.buffer.createBuffer_146_1
+dEQP-VK.api.buffer.createBuffer_147_1
+dEQP-VK.api.buffer.createBuffer_148_1
+dEQP-VK.api.buffer.createBuffer_149_1
+dEQP-VK.api.buffer.createBuffer_150_1
+dEQP-VK.api.buffer.createBuffer_151_1
+dEQP-VK.api.buffer.createBuffer_152_1
+dEQP-VK.api.buffer.createBuffer_153_1
+dEQP-VK.api.buffer.createBuffer_154_1
+dEQP-VK.api.buffer.createBuffer_155_1
+dEQP-VK.api.buffer.createBuffer_156_1
+dEQP-VK.api.buffer.createBuffer_157_1
+dEQP-VK.api.buffer.createBuffer_158_1
+dEQP-VK.api.buffer.createBuffer_159_1
+dEQP-VK.api.buffer.createBuffer_160_1
+dEQP-VK.api.buffer.createBuffer_161_1
+dEQP-VK.api.buffer.createBuffer_162_1
+dEQP-VK.api.buffer.createBuffer_163_1
+dEQP-VK.api.buffer.createBuffer_164_1
+dEQP-VK.api.buffer.createBuffer_165_1
+dEQP-VK.api.buffer.createBuffer_166_1
+dEQP-VK.api.buffer.createBuffer_167_1
+dEQP-VK.api.buffer.createBuffer_168_1
+dEQP-VK.api.buffer.createBuffer_169_1
+dEQP-VK.api.buffer.createBuffer_170_1
+dEQP-VK.api.buffer.createBuffer_171_1
+dEQP-VK.api.buffer.createBuffer_172_1
+dEQP-VK.api.buffer.createBuffer_173_1
+dEQP-VK.api.buffer.createBuffer_174_1
+dEQP-VK.api.buffer.createBuffer_175_1
+dEQP-VK.api.buffer.createBuffer_176_1
+dEQP-VK.api.buffer.createBuffer_177_1
+dEQP-VK.api.buffer.createBuffer_178_1
+dEQP-VK.api.buffer.createBuffer_179_1
+dEQP-VK.api.buffer.createBuffer_180_1
+dEQP-VK.api.buffer.createBuffer_181_1
+dEQP-VK.api.buffer.createBuffer_182_1
+dEQP-VK.api.buffer.createBuffer_183_1
+dEQP-VK.api.buffer.createBuffer_184_1
+dEQP-VK.api.buffer.createBuffer_185_1
+dEQP-VK.api.buffer.createBuffer_186_1
+dEQP-VK.api.buffer.createBuffer_187_1
+dEQP-VK.api.buffer.createBuffer_188_1
+dEQP-VK.api.buffer.createBuffer_189_1
+dEQP-VK.api.buffer.createBuffer_190_1
+dEQP-VK.api.buffer.createBuffer_191_1
+dEQP-VK.api.buffer.createBuffer_192_1
+dEQP-VK.api.buffer.createBuffer_193_1
+dEQP-VK.api.buffer.createBuffer_194_1
+dEQP-VK.api.buffer.createBuffer_195_1
+dEQP-VK.api.buffer.createBuffer_196_1
+dEQP-VK.api.buffer.createBuffer_197_1
+dEQP-VK.api.buffer.createBuffer_198_1
+dEQP-VK.api.buffer.createBuffer_199_1
+dEQP-VK.api.buffer.createBuffer_200_1
+dEQP-VK.api.buffer.createBuffer_201_1
+dEQP-VK.api.buffer.createBuffer_202_1
+dEQP-VK.api.buffer.createBuffer_203_1
+dEQP-VK.api.buffer.createBuffer_204_1
+dEQP-VK.api.buffer.createBuffer_205_1
+dEQP-VK.api.buffer.createBuffer_206_1
+dEQP-VK.api.buffer.createBuffer_207_1
+dEQP-VK.api.buffer.createBuffer_208_1
+dEQP-VK.api.buffer.createBuffer_209_1
+dEQP-VK.api.buffer.createBuffer_210_1
+dEQP-VK.api.buffer.createBuffer_211_1
+dEQP-VK.api.buffer.createBuffer_212_1
+dEQP-VK.api.buffer.createBuffer_213_1
+dEQP-VK.api.buffer.createBuffer_214_1
+dEQP-VK.api.buffer.createBuffer_215_1
+dEQP-VK.api.buffer.createBuffer_216_1
+dEQP-VK.api.buffer.createBuffer_217_1
+dEQP-VK.api.buffer.createBuffer_218_1
+dEQP-VK.api.buffer.createBuffer_219_1
+dEQP-VK.api.buffer.createBuffer_220_1
+dEQP-VK.api.buffer.createBuffer_221_1
+dEQP-VK.api.buffer.createBuffer_222_1
+dEQP-VK.api.buffer.createBuffer_223_1
+dEQP-VK.api.buffer.createBuffer_224_1
+dEQP-VK.api.buffer.createBuffer_225_1
+dEQP-VK.api.buffer.createBuffer_226_1
+dEQP-VK.api.buffer.createBuffer_227_1
+dEQP-VK.api.buffer.createBuffer_228_1
+dEQP-VK.api.buffer.createBuffer_229_1
+dEQP-VK.api.buffer.createBuffer_230_1
+dEQP-VK.api.buffer.createBuffer_231_1
+dEQP-VK.api.buffer.createBuffer_232_1
+dEQP-VK.api.buffer.createBuffer_233_1
+dEQP-VK.api.buffer.createBuffer_234_1
+dEQP-VK.api.buffer.createBuffer_235_1
+dEQP-VK.api.buffer.createBuffer_236_1
+dEQP-VK.api.buffer.createBuffer_237_1
+dEQP-VK.api.buffer.createBuffer_238_1
+dEQP-VK.api.buffer.createBuffer_239_1
+dEQP-VK.api.buffer.createBuffer_240_1
+dEQP-VK.api.buffer.createBuffer_241_1
+dEQP-VK.api.buffer.createBuffer_242_1
+dEQP-VK.api.buffer.createBuffer_243_1
+dEQP-VK.api.buffer.createBuffer_244_1
+dEQP-VK.api.buffer.createBuffer_245_1
+dEQP-VK.api.buffer.createBuffer_246_1
+dEQP-VK.api.buffer.createBuffer_247_1
+dEQP-VK.api.buffer.createBuffer_248_1
+dEQP-VK.api.buffer.createBuffer_249_1
+dEQP-VK.api.buffer.createBuffer_250_1
+dEQP-VK.api.buffer.createBuffer_251_1
+dEQP-VK.api.buffer.createBuffer_252_1
+dEQP-VK.api.buffer.createBuffer_253_1
+dEQP-VK.api.buffer.createBuffer_254_1
+dEQP-VK.api.buffer.createBuffer_255_1
+dEQP-VK.api.buffer.createBuffer_1_2
+dEQP-VK.api.buffer.createBuffer_2_2
+dEQP-VK.api.buffer.createBuffer_3_2
+dEQP-VK.api.buffer.createBuffer_4_2
+dEQP-VK.api.buffer.createBuffer_5_2
+dEQP-VK.api.buffer.createBuffer_6_2
+dEQP-VK.api.buffer.createBuffer_7_2
+dEQP-VK.api.buffer.createBuffer_8_2
+dEQP-VK.api.buffer.createBuffer_9_2
+dEQP-VK.api.buffer.createBuffer_10_2
+dEQP-VK.api.buffer.createBuffer_11_2
+dEQP-VK.api.buffer.createBuffer_12_2
+dEQP-VK.api.buffer.createBuffer_13_2
+dEQP-VK.api.buffer.createBuffer_14_2
+dEQP-VK.api.buffer.createBuffer_15_2
+dEQP-VK.api.buffer.createBuffer_16_2
+dEQP-VK.api.buffer.createBuffer_17_2
+dEQP-VK.api.buffer.createBuffer_18_2
+dEQP-VK.api.buffer.createBuffer_19_2
+dEQP-VK.api.buffer.createBuffer_20_2
+dEQP-VK.api.buffer.createBuffer_21_2
+dEQP-VK.api.buffer.createBuffer_22_2
+dEQP-VK.api.buffer.createBuffer_23_2
+dEQP-VK.api.buffer.createBuffer_24_2
+dEQP-VK.api.buffer.createBuffer_25_2
+dEQP-VK.api.buffer.createBuffer_26_2
+dEQP-VK.api.buffer.createBuffer_27_2
+dEQP-VK.api.buffer.createBuffer_28_2
+dEQP-VK.api.buffer.createBuffer_29_2
+dEQP-VK.api.buffer.createBuffer_30_2
+dEQP-VK.api.buffer.createBuffer_31_2
+dEQP-VK.api.buffer.createBuffer_32_2
+dEQP-VK.api.buffer.createBuffer_33_2
+dEQP-VK.api.buffer.createBuffer_34_2
+dEQP-VK.api.buffer.createBuffer_35_2
+dEQP-VK.api.buffer.createBuffer_36_2
+dEQP-VK.api.buffer.createBuffer_37_2
+dEQP-VK.api.buffer.createBuffer_38_2
+dEQP-VK.api.buffer.createBuffer_39_2
+dEQP-VK.api.buffer.createBuffer_40_2
+dEQP-VK.api.buffer.createBuffer_41_2
+dEQP-VK.api.buffer.createBuffer_42_2
+dEQP-VK.api.buffer.createBuffer_43_2
+dEQP-VK.api.buffer.createBuffer_44_2
+dEQP-VK.api.buffer.createBuffer_45_2
+dEQP-VK.api.buffer.createBuffer_46_2
+dEQP-VK.api.buffer.createBuffer_47_2
+dEQP-VK.api.buffer.createBuffer_48_2
+dEQP-VK.api.buffer.createBuffer_49_2
+dEQP-VK.api.buffer.createBuffer_50_2
+dEQP-VK.api.buffer.createBuffer_51_2
+dEQP-VK.api.buffer.createBuffer_52_2
+dEQP-VK.api.buffer.createBuffer_53_2
+dEQP-VK.api.buffer.createBuffer_54_2
+dEQP-VK.api.buffer.createBuffer_55_2
+dEQP-VK.api.buffer.createBuffer_56_2
+dEQP-VK.api.buffer.createBuffer_57_2
+dEQP-VK.api.buffer.createBuffer_58_2
+dEQP-VK.api.buffer.createBuffer_59_2
+dEQP-VK.api.buffer.createBuffer_60_2
+dEQP-VK.api.buffer.createBuffer_61_2
+dEQP-VK.api.buffer.createBuffer_62_2
+dEQP-VK.api.buffer.createBuffer_63_2
+dEQP-VK.api.buffer.createBuffer_64_2
+dEQP-VK.api.buffer.createBuffer_65_2
+dEQP-VK.api.buffer.createBuffer_66_2
+dEQP-VK.api.buffer.createBuffer_67_2
+dEQP-VK.api.buffer.createBuffer_68_2
+dEQP-VK.api.buffer.createBuffer_69_2
+dEQP-VK.api.buffer.createBuffer_70_2
+dEQP-VK.api.buffer.createBuffer_71_2
+dEQP-VK.api.buffer.createBuffer_72_2
+dEQP-VK.api.buffer.createBuffer_73_2
+dEQP-VK.api.buffer.createBuffer_74_2
+dEQP-VK.api.buffer.createBuffer_75_2
+dEQP-VK.api.buffer.createBuffer_76_2
+dEQP-VK.api.buffer.createBuffer_77_2
+dEQP-VK.api.buffer.createBuffer_78_2
+dEQP-VK.api.buffer.createBuffer_79_2
+dEQP-VK.api.buffer.createBuffer_80_2
+dEQP-VK.api.buffer.createBuffer_81_2
+dEQP-VK.api.buffer.createBuffer_82_2
+dEQP-VK.api.buffer.createBuffer_83_2
+dEQP-VK.api.buffer.createBuffer_84_2
+dEQP-VK.api.buffer.createBuffer_85_2
+dEQP-VK.api.buffer.createBuffer_86_2
+dEQP-VK.api.buffer.createBuffer_87_2
+dEQP-VK.api.buffer.createBuffer_88_2
+dEQP-VK.api.buffer.createBuffer_89_2
+dEQP-VK.api.buffer.createBuffer_90_2
+dEQP-VK.api.buffer.createBuffer_91_2
+dEQP-VK.api.buffer.createBuffer_92_2
+dEQP-VK.api.buffer.createBuffer_93_2
+dEQP-VK.api.buffer.createBuffer_94_2
+dEQP-VK.api.buffer.createBuffer_95_2
+dEQP-VK.api.buffer.createBuffer_96_2
+dEQP-VK.api.buffer.createBuffer_97_2
+dEQP-VK.api.buffer.createBuffer_98_2
+dEQP-VK.api.buffer.createBuffer_99_2
+dEQP-VK.api.buffer.createBuffer_100_2
+dEQP-VK.api.buffer.createBuffer_101_2
+dEQP-VK.api.buffer.createBuffer_102_2
+dEQP-VK.api.buffer.createBuffer_103_2
+dEQP-VK.api.buffer.createBuffer_104_2
+dEQP-VK.api.buffer.createBuffer_105_2
+dEQP-VK.api.buffer.createBuffer_106_2
+dEQP-VK.api.buffer.createBuffer_107_2
+dEQP-VK.api.buffer.createBuffer_108_2
+dEQP-VK.api.buffer.createBuffer_109_2
+dEQP-VK.api.buffer.createBuffer_110_2
+dEQP-VK.api.buffer.createBuffer_111_2
+dEQP-VK.api.buffer.createBuffer_112_2
+dEQP-VK.api.buffer.createBuffer_113_2
+dEQP-VK.api.buffer.createBuffer_114_2
+dEQP-VK.api.buffer.createBuffer_115_2
+dEQP-VK.api.buffer.createBuffer_116_2
+dEQP-VK.api.buffer.createBuffer_117_2
+dEQP-VK.api.buffer.createBuffer_118_2
+dEQP-VK.api.buffer.createBuffer_119_2
+dEQP-VK.api.buffer.createBuffer_120_2
+dEQP-VK.api.buffer.createBuffer_121_2
+dEQP-VK.api.buffer.createBuffer_122_2
+dEQP-VK.api.buffer.createBuffer_123_2
+dEQP-VK.api.buffer.createBuffer_124_2
+dEQP-VK.api.buffer.createBuffer_125_2
+dEQP-VK.api.buffer.createBuffer_126_2
+dEQP-VK.api.buffer.createBuffer_127_2
+dEQP-VK.api.buffer.createBuffer_128_2
+dEQP-VK.api.buffer.createBuffer_129_2
+dEQP-VK.api.buffer.createBuffer_130_2
+dEQP-VK.api.buffer.createBuffer_131_2
+dEQP-VK.api.buffer.createBuffer_132_2
+dEQP-VK.api.buffer.createBuffer_133_2
+dEQP-VK.api.buffer.createBuffer_134_2
+dEQP-VK.api.buffer.createBuffer_135_2
+dEQP-VK.api.buffer.createBuffer_136_2
+dEQP-VK.api.buffer.createBuffer_137_2
+dEQP-VK.api.buffer.createBuffer_138_2
+dEQP-VK.api.buffer.createBuffer_139_2
+dEQP-VK.api.buffer.createBuffer_140_2
+dEQP-VK.api.buffer.createBuffer_141_2
+dEQP-VK.api.buffer.createBuffer_142_2
+dEQP-VK.api.buffer.createBuffer_143_2
+dEQP-VK.api.buffer.createBuffer_144_2
+dEQP-VK.api.buffer.createBuffer_145_2
+dEQP-VK.api.buffer.createBuffer_146_2
+dEQP-VK.api.buffer.createBuffer_147_2
+dEQP-VK.api.buffer.createBuffer_148_2
+dEQP-VK.api.buffer.createBuffer_149_2
+dEQP-VK.api.buffer.createBuffer_150_2
+dEQP-VK.api.buffer.createBuffer_151_2
+dEQP-VK.api.buffer.createBuffer_152_2
+dEQP-VK.api.buffer.createBuffer_153_2
+dEQP-VK.api.buffer.createBuffer_154_2
+dEQP-VK.api.buffer.createBuffer_155_2
+dEQP-VK.api.buffer.createBuffer_156_2
+dEQP-VK.api.buffer.createBuffer_157_2
+dEQP-VK.api.buffer.createBuffer_158_2
+dEQP-VK.api.buffer.createBuffer_159_2
+dEQP-VK.api.buffer.createBuffer_160_2
+dEQP-VK.api.buffer.createBuffer_161_2
+dEQP-VK.api.buffer.createBuffer_162_2
+dEQP-VK.api.buffer.createBuffer_163_2
+dEQP-VK.api.buffer.createBuffer_164_2
+dEQP-VK.api.buffer.createBuffer_165_2
+dEQP-VK.api.buffer.createBuffer_166_2
+dEQP-VK.api.buffer.createBuffer_167_2
+dEQP-VK.api.buffer.createBuffer_168_2
+dEQP-VK.api.buffer.createBuffer_169_2
+dEQP-VK.api.buffer.createBuffer_170_2
+dEQP-VK.api.buffer.createBuffer_171_2
+dEQP-VK.api.buffer.createBuffer_172_2
+dEQP-VK.api.buffer.createBuffer_173_2
+dEQP-VK.api.buffer.createBuffer_174_2
+dEQP-VK.api.buffer.createBuffer_175_2
+dEQP-VK.api.buffer.createBuffer_176_2
+dEQP-VK.api.buffer.createBuffer_177_2
+dEQP-VK.api.buffer.createBuffer_178_2
+dEQP-VK.api.buffer.createBuffer_179_2
+dEQP-VK.api.buffer.createBuffer_180_2
+dEQP-VK.api.buffer.createBuffer_181_2
+dEQP-VK.api.buffer.createBuffer_182_2
+dEQP-VK.api.buffer.createBuffer_183_2
+dEQP-VK.api.buffer.createBuffer_184_2
+dEQP-VK.api.buffer.createBuffer_185_2
+dEQP-VK.api.buffer.createBuffer_186_2
+dEQP-VK.api.buffer.createBuffer_187_2
+dEQP-VK.api.buffer.createBuffer_188_2
+dEQP-VK.api.buffer.createBuffer_189_2
+dEQP-VK.api.buffer.createBuffer_190_2
+dEQP-VK.api.buffer.createBuffer_191_2
+dEQP-VK.api.buffer.createBuffer_192_2
+dEQP-VK.api.buffer.createBuffer_193_2
+dEQP-VK.api.buffer.createBuffer_194_2
+dEQP-VK.api.buffer.createBuffer_195_2
+dEQP-VK.api.buffer.createBuffer_196_2
+dEQP-VK.api.buffer.createBuffer_197_2
+dEQP-VK.api.buffer.createBuffer_198_2
+dEQP-VK.api.buffer.createBuffer_199_2
+dEQP-VK.api.buffer.createBuffer_200_2
+dEQP-VK.api.buffer.createBuffer_201_2
+dEQP-VK.api.buffer.createBuffer_202_2
+dEQP-VK.api.buffer.createBuffer_203_2
+dEQP-VK.api.buffer.createBuffer_204_2
+dEQP-VK.api.buffer.createBuffer_205_2
+dEQP-VK.api.buffer.createBuffer_206_2
+dEQP-VK.api.buffer.createBuffer_207_2
+dEQP-VK.api.buffer.createBuffer_208_2
+dEQP-VK.api.buffer.createBuffer_209_2
+dEQP-VK.api.buffer.createBuffer_210_2
+dEQP-VK.api.buffer.createBuffer_211_2
+dEQP-VK.api.buffer.createBuffer_212_2
+dEQP-VK.api.buffer.createBuffer_213_2
+dEQP-VK.api.buffer.createBuffer_214_2
+dEQP-VK.api.buffer.createBuffer_215_2
+dEQP-VK.api.buffer.createBuffer_216_2
+dEQP-VK.api.buffer.createBuffer_217_2
+dEQP-VK.api.buffer.createBuffer_218_2
+dEQP-VK.api.buffer.createBuffer_219_2
+dEQP-VK.api.buffer.createBuffer_220_2
+dEQP-VK.api.buffer.createBuffer_221_2
+dEQP-VK.api.buffer.createBuffer_222_2
+dEQP-VK.api.buffer.createBuffer_223_2
+dEQP-VK.api.buffer.createBuffer_224_2
+dEQP-VK.api.buffer.createBuffer_225_2
+dEQP-VK.api.buffer.createBuffer_226_2
+dEQP-VK.api.buffer.createBuffer_227_2
+dEQP-VK.api.buffer.createBuffer_228_2
+dEQP-VK.api.buffer.createBuffer_229_2
+dEQP-VK.api.buffer.createBuffer_230_2
+dEQP-VK.api.buffer.createBuffer_231_2
+dEQP-VK.api.buffer.createBuffer_232_2
+dEQP-VK.api.buffer.createBuffer_233_2
+dEQP-VK.api.buffer.createBuffer_234_2
+dEQP-VK.api.buffer.createBuffer_235_2
+dEQP-VK.api.buffer.createBuffer_236_2
+dEQP-VK.api.buffer.createBuffer_237_2
+dEQP-VK.api.buffer.createBuffer_238_2
+dEQP-VK.api.buffer.createBuffer_239_2
+dEQP-VK.api.buffer.createBuffer_240_2
+dEQP-VK.api.buffer.createBuffer_241_2
+dEQP-VK.api.buffer.createBuffer_242_2
+dEQP-VK.api.buffer.createBuffer_243_2
+dEQP-VK.api.buffer.createBuffer_244_2
+dEQP-VK.api.buffer.createBuffer_245_2
+dEQP-VK.api.buffer.createBuffer_246_2
+dEQP-VK.api.buffer.createBuffer_247_2
+dEQP-VK.api.buffer.createBuffer_248_2
+dEQP-VK.api.buffer.createBuffer_249_2
+dEQP-VK.api.buffer.createBuffer_250_2
+dEQP-VK.api.buffer.createBuffer_251_2
+dEQP-VK.api.buffer.createBuffer_252_2
+dEQP-VK.api.buffer.createBuffer_253_2
+dEQP-VK.api.buffer.createBuffer_254_2
+dEQP-VK.api.buffer.createBuffer_255_2
+dEQP-VK.api.buffer.createBuffer_1_3
+dEQP-VK.api.buffer.createBuffer_2_3
+dEQP-VK.api.buffer.createBuffer_3_3
+dEQP-VK.api.buffer.createBuffer_4_3
+dEQP-VK.api.buffer.createBuffer_5_3
+dEQP-VK.api.buffer.createBuffer_6_3
+dEQP-VK.api.buffer.createBuffer_7_3
+dEQP-VK.api.buffer.createBuffer_8_3
+dEQP-VK.api.buffer.createBuffer_9_3
+dEQP-VK.api.buffer.createBuffer_10_3
+dEQP-VK.api.buffer.createBuffer_11_3
+dEQP-VK.api.buffer.createBuffer_12_3
+dEQP-VK.api.buffer.createBuffer_13_3
+dEQP-VK.api.buffer.createBuffer_14_3
+dEQP-VK.api.buffer.createBuffer_15_3
+dEQP-VK.api.buffer.createBuffer_16_3
+dEQP-VK.api.buffer.createBuffer_17_3
+dEQP-VK.api.buffer.createBuffer_18_3
+dEQP-VK.api.buffer.createBuffer_19_3
+dEQP-VK.api.buffer.createBuffer_20_3
+dEQP-VK.api.buffer.createBuffer_21_3
+dEQP-VK.api.buffer.createBuffer_22_3
+dEQP-VK.api.buffer.createBuffer_23_3
+dEQP-VK.api.buffer.createBuffer_24_3
+dEQP-VK.api.buffer.createBuffer_25_3
+dEQP-VK.api.buffer.createBuffer_26_3
+dEQP-VK.api.buffer.createBuffer_27_3
+dEQP-VK.api.buffer.createBuffer_28_3
+dEQP-VK.api.buffer.createBuffer_29_3
+dEQP-VK.api.buffer.createBuffer_30_3
+dEQP-VK.api.buffer.createBuffer_31_3
+dEQP-VK.api.buffer.createBuffer_32_3
+dEQP-VK.api.buffer.createBuffer_33_3
+dEQP-VK.api.buffer.createBuffer_34_3
+dEQP-VK.api.buffer.createBuffer_35_3
+dEQP-VK.api.buffer.createBuffer_36_3
+dEQP-VK.api.buffer.createBuffer_37_3
+dEQP-VK.api.buffer.createBuffer_38_3
+dEQP-VK.api.buffer.createBuffer_39_3
+dEQP-VK.api.buffer.createBuffer_40_3
+dEQP-VK.api.buffer.createBuffer_41_3
+dEQP-VK.api.buffer.createBuffer_42_3
+dEQP-VK.api.buffer.createBuffer_43_3
+dEQP-VK.api.buffer.createBuffer_44_3
+dEQP-VK.api.buffer.createBuffer_45_3
+dEQP-VK.api.buffer.createBuffer_46_3
+dEQP-VK.api.buffer.createBuffer_47_3
+dEQP-VK.api.buffer.createBuffer_48_3
+dEQP-VK.api.buffer.createBuffer_49_3
+dEQP-VK.api.buffer.createBuffer_50_3
+dEQP-VK.api.buffer.createBuffer_51_3
+dEQP-VK.api.buffer.createBuffer_52_3
+dEQP-VK.api.buffer.createBuffer_53_3
+dEQP-VK.api.buffer.createBuffer_54_3
+dEQP-VK.api.buffer.createBuffer_55_3
+dEQP-VK.api.buffer.createBuffer_56_3
+dEQP-VK.api.buffer.createBuffer_57_3
+dEQP-VK.api.buffer.createBuffer_58_3
+dEQP-VK.api.buffer.createBuffer_59_3
+dEQP-VK.api.buffer.createBuffer_60_3
+dEQP-VK.api.buffer.createBuffer_61_3
+dEQP-VK.api.buffer.createBuffer_62_3
+dEQP-VK.api.buffer.createBuffer_63_3
+dEQP-VK.api.buffer.createBuffer_64_3
+dEQP-VK.api.buffer.createBuffer_65_3
+dEQP-VK.api.buffer.createBuffer_66_3
+dEQP-VK.api.buffer.createBuffer_67_3
+dEQP-VK.api.buffer.createBuffer_68_3
+dEQP-VK.api.buffer.createBuffer_69_3
+dEQP-VK.api.buffer.createBuffer_70_3
+dEQP-VK.api.buffer.createBuffer_71_3
+dEQP-VK.api.buffer.createBuffer_72_3
+dEQP-VK.api.buffer.createBuffer_73_3
+dEQP-VK.api.buffer.createBuffer_74_3
+dEQP-VK.api.buffer.createBuffer_75_3
+dEQP-VK.api.buffer.createBuffer_76_3
+dEQP-VK.api.buffer.createBuffer_77_3
+dEQP-VK.api.buffer.createBuffer_78_3
+dEQP-VK.api.buffer.createBuffer_79_3
+dEQP-VK.api.buffer.createBuffer_80_3
+dEQP-VK.api.buffer.createBuffer_81_3
+dEQP-VK.api.buffer.createBuffer_82_3
+dEQP-VK.api.buffer.createBuffer_83_3
+dEQP-VK.api.buffer.createBuffer_84_3
+dEQP-VK.api.buffer.createBuffer_85_3
+dEQP-VK.api.buffer.createBuffer_86_3
+dEQP-VK.api.buffer.createBuffer_87_3
+dEQP-VK.api.buffer.createBuffer_88_3
+dEQP-VK.api.buffer.createBuffer_89_3
+dEQP-VK.api.buffer.createBuffer_90_3
+dEQP-VK.api.buffer.createBuffer_91_3
+dEQP-VK.api.buffer.createBuffer_92_3
+dEQP-VK.api.buffer.createBuffer_93_3
+dEQP-VK.api.buffer.createBuffer_94_3
+dEQP-VK.api.buffer.createBuffer_95_3
+dEQP-VK.api.buffer.createBuffer_96_3
+dEQP-VK.api.buffer.createBuffer_97_3
+dEQP-VK.api.buffer.createBuffer_98_3
+dEQP-VK.api.buffer.createBuffer_99_3
+dEQP-VK.api.buffer.createBuffer_100_3
+dEQP-VK.api.buffer.createBuffer_101_3
+dEQP-VK.api.buffer.createBuffer_102_3
+dEQP-VK.api.buffer.createBuffer_103_3
+dEQP-VK.api.buffer.createBuffer_104_3
+dEQP-VK.api.buffer.createBuffer_105_3
+dEQP-VK.api.buffer.createBuffer_106_3
+dEQP-VK.api.buffer.createBuffer_107_3
+dEQP-VK.api.buffer.createBuffer_108_3
+dEQP-VK.api.buffer.createBuffer_109_3
+dEQP-VK.api.buffer.createBuffer_110_3
+dEQP-VK.api.buffer.createBuffer_111_3
+dEQP-VK.api.buffer.createBuffer_112_3
+dEQP-VK.api.buffer.createBuffer_113_3
+dEQP-VK.api.buffer.createBuffer_114_3
+dEQP-VK.api.buffer.createBuffer_115_3
+dEQP-VK.api.buffer.createBuffer_116_3
+dEQP-VK.api.buffer.createBuffer_117_3
+dEQP-VK.api.buffer.createBuffer_118_3
+dEQP-VK.api.buffer.createBuffer_119_3
+dEQP-VK.api.buffer.createBuffer_120_3
+dEQP-VK.api.buffer.createBuffer_121_3
+dEQP-VK.api.buffer.createBuffer_122_3
+dEQP-VK.api.buffer.createBuffer_123_3
+dEQP-VK.api.buffer.createBuffer_124_3
+dEQP-VK.api.buffer.createBuffer_125_3
+dEQP-VK.api.buffer.createBuffer_126_3
+dEQP-VK.api.buffer.createBuffer_127_3
+dEQP-VK.api.buffer.createBuffer_128_3
+dEQP-VK.api.buffer.createBuffer_129_3
+dEQP-VK.api.buffer.createBuffer_130_3
+dEQP-VK.api.buffer.createBuffer_131_3
+dEQP-VK.api.buffer.createBuffer_132_3
+dEQP-VK.api.buffer.createBuffer_133_3
+dEQP-VK.api.buffer.createBuffer_134_3
+dEQP-VK.api.buffer.createBuffer_135_3
+dEQP-VK.api.buffer.createBuffer_136_3
+dEQP-VK.api.buffer.createBuffer_137_3
+dEQP-VK.api.buffer.createBuffer_138_3
+dEQP-VK.api.buffer.createBuffer_139_3
+dEQP-VK.api.buffer.createBuffer_140_3
+dEQP-VK.api.buffer.createBuffer_141_3
+dEQP-VK.api.buffer.createBuffer_142_3
+dEQP-VK.api.buffer.createBuffer_143_3
+dEQP-VK.api.buffer.createBuffer_144_3
+dEQP-VK.api.buffer.createBuffer_145_3
+dEQP-VK.api.buffer.createBuffer_146_3
+dEQP-VK.api.buffer.createBuffer_147_3
+dEQP-VK.api.buffer.createBuffer_148_3
+dEQP-VK.api.buffer.createBuffer_149_3
+dEQP-VK.api.buffer.createBuffer_150_3
+dEQP-VK.api.buffer.createBuffer_151_3
+dEQP-VK.api.buffer.createBuffer_152_3
+dEQP-VK.api.buffer.createBuffer_153_3
+dEQP-VK.api.buffer.createBuffer_154_3
+dEQP-VK.api.buffer.createBuffer_155_3
+dEQP-VK.api.buffer.createBuffer_156_3
+dEQP-VK.api.buffer.createBuffer_157_3
+dEQP-VK.api.buffer.createBuffer_158_3
+dEQP-VK.api.buffer.createBuffer_159_3
+dEQP-VK.api.buffer.createBuffer_160_3
+dEQP-VK.api.buffer.createBuffer_161_3
+dEQP-VK.api.buffer.createBuffer_162_3
+dEQP-VK.api.buffer.createBuffer_163_3
+dEQP-VK.api.buffer.createBuffer_164_3
+dEQP-VK.api.buffer.createBuffer_165_3
+dEQP-VK.api.buffer.createBuffer_166_3
+dEQP-VK.api.buffer.createBuffer_167_3
+dEQP-VK.api.buffer.createBuffer_168_3
+dEQP-VK.api.buffer.createBuffer_169_3
+dEQP-VK.api.buffer.createBuffer_170_3
+dEQP-VK.api.buffer.createBuffer_171_3
+dEQP-VK.api.buffer.createBuffer_172_3
+dEQP-VK.api.buffer.createBuffer_173_3
+dEQP-VK.api.buffer.createBuffer_174_3
+dEQP-VK.api.buffer.createBuffer_175_3
+dEQP-VK.api.buffer.createBuffer_176_3
+dEQP-VK.api.buffer.createBuffer_177_3
+dEQP-VK.api.buffer.createBuffer_178_3
+dEQP-VK.api.buffer.createBuffer_179_3
+dEQP-VK.api.buffer.createBuffer_180_3
+dEQP-VK.api.buffer.createBuffer_181_3
+dEQP-VK.api.buffer.createBuffer_182_3
+dEQP-VK.api.buffer.createBuffer_183_3
+dEQP-VK.api.buffer.createBuffer_184_3
+dEQP-VK.api.buffer.createBuffer_185_3
+dEQP-VK.api.buffer.createBuffer_186_3
+dEQP-VK.api.buffer.createBuffer_187_3
+dEQP-VK.api.buffer.createBuffer_188_3
+dEQP-VK.api.buffer.createBuffer_189_3
+dEQP-VK.api.buffer.createBuffer_190_3
+dEQP-VK.api.buffer.createBuffer_191_3
+dEQP-VK.api.buffer.createBuffer_192_3
+dEQP-VK.api.buffer.createBuffer_193_3
+dEQP-VK.api.buffer.createBuffer_194_3
+dEQP-VK.api.buffer.createBuffer_195_3
+dEQP-VK.api.buffer.createBuffer_196_3
+dEQP-VK.api.buffer.createBuffer_197_3
+dEQP-VK.api.buffer.createBuffer_198_3
+dEQP-VK.api.buffer.createBuffer_199_3
+dEQP-VK.api.buffer.createBuffer_200_3
+dEQP-VK.api.buffer.createBuffer_201_3
+dEQP-VK.api.buffer.createBuffer_202_3
+dEQP-VK.api.buffer.createBuffer_203_3
+dEQP-VK.api.buffer.createBuffer_204_3
+dEQP-VK.api.buffer.createBuffer_205_3
+dEQP-VK.api.buffer.createBuffer_206_3
+dEQP-VK.api.buffer.createBuffer_207_3
+dEQP-VK.api.buffer.createBuffer_208_3
+dEQP-VK.api.buffer.createBuffer_209_3
+dEQP-VK.api.buffer.createBuffer_210_3
+dEQP-VK.api.buffer.createBuffer_211_3
+dEQP-VK.api.buffer.createBuffer_212_3
+dEQP-VK.api.buffer.createBuffer_213_3
+dEQP-VK.api.buffer.createBuffer_214_3
+dEQP-VK.api.buffer.createBuffer_215_3
+dEQP-VK.api.buffer.createBuffer_216_3
+dEQP-VK.api.buffer.createBuffer_217_3
+dEQP-VK.api.buffer.createBuffer_218_3
+dEQP-VK.api.buffer.createBuffer_219_3
+dEQP-VK.api.buffer.createBuffer_220_3
+dEQP-VK.api.buffer.createBuffer_221_3
+dEQP-VK.api.buffer.createBuffer_222_3
+dEQP-VK.api.buffer.createBuffer_223_3
+dEQP-VK.api.buffer.createBuffer_224_3
+dEQP-VK.api.buffer.createBuffer_225_3
+dEQP-VK.api.buffer.createBuffer_226_3
+dEQP-VK.api.buffer.createBuffer_227_3
+dEQP-VK.api.buffer.createBuffer_228_3
+dEQP-VK.api.buffer.createBuffer_229_3
+dEQP-VK.api.buffer.createBuffer_230_3
+dEQP-VK.api.buffer.createBuffer_231_3
+dEQP-VK.api.buffer.createBuffer_232_3
+dEQP-VK.api.buffer.createBuffer_233_3
+dEQP-VK.api.buffer.createBuffer_234_3
+dEQP-VK.api.buffer.createBuffer_235_3
+dEQP-VK.api.buffer.createBuffer_236_3
+dEQP-VK.api.buffer.createBuffer_237_3
+dEQP-VK.api.buffer.createBuffer_238_3
+dEQP-VK.api.buffer.createBuffer_239_3
+dEQP-VK.api.buffer.createBuffer_240_3
+dEQP-VK.api.buffer.createBuffer_241_3
+dEQP-VK.api.buffer.createBuffer_242_3
+dEQP-VK.api.buffer.createBuffer_243_3
+dEQP-VK.api.buffer.createBuffer_244_3
+dEQP-VK.api.buffer.createBuffer_245_3
+dEQP-VK.api.buffer.createBuffer_246_3
+dEQP-VK.api.buffer.createBuffer_247_3
+dEQP-VK.api.buffer.createBuffer_248_3
+dEQP-VK.api.buffer.createBuffer_249_3
+dEQP-VK.api.buffer.createBuffer_250_3
+dEQP-VK.api.buffer.createBuffer_251_3
+dEQP-VK.api.buffer.createBuffer_252_3
+dEQP-VK.api.buffer.createBuffer_253_3
+dEQP-VK.api.buffer.createBuffer_254_3
+dEQP-VK.api.buffer.createBuffer_255_3
+dEQP-VK.api.buffer.createBuffer_1_5
+dEQP-VK.api.buffer.createBuffer_2_5
+dEQP-VK.api.buffer.createBuffer_3_5
+dEQP-VK.api.buffer.createBuffer_4_5
+dEQP-VK.api.buffer.createBuffer_5_5
+dEQP-VK.api.buffer.createBuffer_6_5
+dEQP-VK.api.buffer.createBuffer_7_5
+dEQP-VK.api.buffer.createBuffer_8_5
+dEQP-VK.api.buffer.createBuffer_9_5
+dEQP-VK.api.buffer.createBuffer_10_5
+dEQP-VK.api.buffer.createBuffer_11_5
+dEQP-VK.api.buffer.createBuffer_12_5
+dEQP-VK.api.buffer.createBuffer_13_5
+dEQP-VK.api.buffer.createBuffer_14_5
+dEQP-VK.api.buffer.createBuffer_15_5
+dEQP-VK.api.buffer.createBuffer_16_5
+dEQP-VK.api.buffer.createBuffer_17_5
+dEQP-VK.api.buffer.createBuffer_18_5
+dEQP-VK.api.buffer.createBuffer_19_5
+dEQP-VK.api.buffer.createBuffer_20_5
+dEQP-VK.api.buffer.createBuffer_21_5
+dEQP-VK.api.buffer.createBuffer_22_5
+dEQP-VK.api.buffer.createBuffer_23_5
+dEQP-VK.api.buffer.createBuffer_24_5
+dEQP-VK.api.buffer.createBuffer_25_5
+dEQP-VK.api.buffer.createBuffer_26_5
+dEQP-VK.api.buffer.createBuffer_27_5
+dEQP-VK.api.buffer.createBuffer_28_5
+dEQP-VK.api.buffer.createBuffer_29_5
+dEQP-VK.api.buffer.createBuffer_30_5
+dEQP-VK.api.buffer.createBuffer_31_5
+dEQP-VK.api.buffer.createBuffer_32_5
+dEQP-VK.api.buffer.createBuffer_33_5
+dEQP-VK.api.buffer.createBuffer_34_5
+dEQP-VK.api.buffer.createBuffer_35_5
+dEQP-VK.api.buffer.createBuffer_36_5
+dEQP-VK.api.buffer.createBuffer_37_5
+dEQP-VK.api.buffer.createBuffer_38_5
+dEQP-VK.api.buffer.createBuffer_39_5
+dEQP-VK.api.buffer.createBuffer_40_5
+dEQP-VK.api.buffer.createBuffer_41_5
+dEQP-VK.api.buffer.createBuffer_42_5
+dEQP-VK.api.buffer.createBuffer_43_5
+dEQP-VK.api.buffer.createBuffer_44_5
+dEQP-VK.api.buffer.createBuffer_45_5
+dEQP-VK.api.buffer.createBuffer_46_5
+dEQP-VK.api.buffer.createBuffer_47_5
+dEQP-VK.api.buffer.createBuffer_48_5
+dEQP-VK.api.buffer.createBuffer_49_5
+dEQP-VK.api.buffer.createBuffer_50_5
+dEQP-VK.api.buffer.createBuffer_51_5
+dEQP-VK.api.buffer.createBuffer_52_5
+dEQP-VK.api.buffer.createBuffer_53_5
+dEQP-VK.api.buffer.createBuffer_54_5
+dEQP-VK.api.buffer.createBuffer_55_5
+dEQP-VK.api.buffer.createBuffer_56_5
+dEQP-VK.api.buffer.createBuffer_57_5
+dEQP-VK.api.buffer.createBuffer_58_5
+dEQP-VK.api.buffer.createBuffer_59_5
+dEQP-VK.api.buffer.createBuffer_60_5
+dEQP-VK.api.buffer.createBuffer_61_5
+dEQP-VK.api.buffer.createBuffer_62_5
+dEQP-VK.api.buffer.createBuffer_63_5
+dEQP-VK.api.buffer.createBuffer_64_5
+dEQP-VK.api.buffer.createBuffer_65_5
+dEQP-VK.api.buffer.createBuffer_66_5
+dEQP-VK.api.buffer.createBuffer_67_5
+dEQP-VK.api.buffer.createBuffer_68_5
+dEQP-VK.api.buffer.createBuffer_69_5
+dEQP-VK.api.buffer.createBuffer_70_5
+dEQP-VK.api.buffer.createBuffer_71_5
+dEQP-VK.api.buffer.createBuffer_72_5
+dEQP-VK.api.buffer.createBuffer_73_5
+dEQP-VK.api.buffer.createBuffer_74_5
+dEQP-VK.api.buffer.createBuffer_75_5
+dEQP-VK.api.buffer.createBuffer_76_5
+dEQP-VK.api.buffer.createBuffer_77_5
+dEQP-VK.api.buffer.createBuffer_78_5
+dEQP-VK.api.buffer.createBuffer_79_5
+dEQP-VK.api.buffer.createBuffer_80_5
+dEQP-VK.api.buffer.createBuffer_81_5
+dEQP-VK.api.buffer.createBuffer_82_5
+dEQP-VK.api.buffer.createBuffer_83_5
+dEQP-VK.api.buffer.createBuffer_84_5
+dEQP-VK.api.buffer.createBuffer_85_5
+dEQP-VK.api.buffer.createBuffer_86_5
+dEQP-VK.api.buffer.createBuffer_87_5
+dEQP-VK.api.buffer.createBuffer_88_5
+dEQP-VK.api.buffer.createBuffer_89_5
+dEQP-VK.api.buffer.createBuffer_90_5
+dEQP-VK.api.buffer.createBuffer_91_5
+dEQP-VK.api.buffer.createBuffer_92_5
+dEQP-VK.api.buffer.createBuffer_93_5
+dEQP-VK.api.buffer.createBuffer_94_5
+dEQP-VK.api.buffer.createBuffer_95_5
+dEQP-VK.api.buffer.createBuffer_96_5
+dEQP-VK.api.buffer.createBuffer_97_5
+dEQP-VK.api.buffer.createBuffer_98_5
+dEQP-VK.api.buffer.createBuffer_99_5
+dEQP-VK.api.buffer.createBuffer_100_5
+dEQP-VK.api.buffer.createBuffer_101_5
+dEQP-VK.api.buffer.createBuffer_102_5
+dEQP-VK.api.buffer.createBuffer_103_5
+dEQP-VK.api.buffer.createBuffer_104_5
+dEQP-VK.api.buffer.createBuffer_105_5
+dEQP-VK.api.buffer.createBuffer_106_5
+dEQP-VK.api.buffer.createBuffer_107_5
+dEQP-VK.api.buffer.createBuffer_108_5
+dEQP-VK.api.buffer.createBuffer_109_5
+dEQP-VK.api.buffer.createBuffer_110_5
+dEQP-VK.api.buffer.createBuffer_111_5
+dEQP-VK.api.buffer.createBuffer_112_5
+dEQP-VK.api.buffer.createBuffer_113_5
+dEQP-VK.api.buffer.createBuffer_114_5
+dEQP-VK.api.buffer.createBuffer_115_5
+dEQP-VK.api.buffer.createBuffer_116_5
+dEQP-VK.api.buffer.createBuffer_117_5
+dEQP-VK.api.buffer.createBuffer_118_5
+dEQP-VK.api.buffer.createBuffer_119_5
+dEQP-VK.api.buffer.createBuffer_120_5
+dEQP-VK.api.buffer.createBuffer_121_5
+dEQP-VK.api.buffer.createBuffer_122_5
+dEQP-VK.api.buffer.createBuffer_123_5
+dEQP-VK.api.buffer.createBuffer_124_5
+dEQP-VK.api.buffer.createBuffer_125_5
+dEQP-VK.api.buffer.createBuffer_126_5
+dEQP-VK.api.buffer.createBuffer_127_5
+dEQP-VK.api.buffer.createBuffer_128_5
+dEQP-VK.api.buffer.createBuffer_129_5
+dEQP-VK.api.buffer.createBuffer_130_5
+dEQP-VK.api.buffer.createBuffer_131_5
+dEQP-VK.api.buffer.createBuffer_132_5
+dEQP-VK.api.buffer.createBuffer_133_5
+dEQP-VK.api.buffer.createBuffer_134_5
+dEQP-VK.api.buffer.createBuffer_135_5
+dEQP-VK.api.buffer.createBuffer_136_5
+dEQP-VK.api.buffer.createBuffer_137_5
+dEQP-VK.api.buffer.createBuffer_138_5
+dEQP-VK.api.buffer.createBuffer_139_5
+dEQP-VK.api.buffer.createBuffer_140_5
+dEQP-VK.api.buffer.createBuffer_141_5
+dEQP-VK.api.buffer.createBuffer_142_5
+dEQP-VK.api.buffer.createBuffer_143_5
+dEQP-VK.api.buffer.createBuffer_144_5
+dEQP-VK.api.buffer.createBuffer_145_5
+dEQP-VK.api.buffer.createBuffer_146_5
+dEQP-VK.api.buffer.createBuffer_147_5
+dEQP-VK.api.buffer.createBuffer_148_5
+dEQP-VK.api.buffer.createBuffer_149_5
+dEQP-VK.api.buffer.createBuffer_150_5
+dEQP-VK.api.buffer.createBuffer_151_5
+dEQP-VK.api.buffer.createBuffer_152_5
+dEQP-VK.api.buffer.createBuffer_153_5
+dEQP-VK.api.buffer.createBuffer_154_5
+dEQP-VK.api.buffer.createBuffer_155_5
+dEQP-VK.api.buffer.createBuffer_156_5
+dEQP-VK.api.buffer.createBuffer_157_5
+dEQP-VK.api.buffer.createBuffer_158_5
+dEQP-VK.api.buffer.createBuffer_159_5
+dEQP-VK.api.buffer.createBuffer_160_5
+dEQP-VK.api.buffer.createBuffer_161_5
+dEQP-VK.api.buffer.createBuffer_162_5
+dEQP-VK.api.buffer.createBuffer_163_5
+dEQP-VK.api.buffer.createBuffer_164_5
+dEQP-VK.api.buffer.createBuffer_165_5
+dEQP-VK.api.buffer.createBuffer_166_5
+dEQP-VK.api.buffer.createBuffer_167_5
+dEQP-VK.api.buffer.createBuffer_168_5
+dEQP-VK.api.buffer.createBuffer_169_5
+dEQP-VK.api.buffer.createBuffer_170_5
+dEQP-VK.api.buffer.createBuffer_171_5
+dEQP-VK.api.buffer.createBuffer_172_5
+dEQP-VK.api.buffer.createBuffer_173_5
+dEQP-VK.api.buffer.createBuffer_174_5
+dEQP-VK.api.buffer.createBuffer_175_5
+dEQP-VK.api.buffer.createBuffer_176_5
+dEQP-VK.api.buffer.createBuffer_177_5
+dEQP-VK.api.buffer.createBuffer_178_5
+dEQP-VK.api.buffer.createBuffer_179_5
+dEQP-VK.api.buffer.createBuffer_180_5
+dEQP-VK.api.buffer.createBuffer_181_5
+dEQP-VK.api.buffer.createBuffer_182_5
+dEQP-VK.api.buffer.createBuffer_183_5
+dEQP-VK.api.buffer.createBuffer_184_5
+dEQP-VK.api.buffer.createBuffer_185_5
+dEQP-VK.api.buffer.createBuffer_186_5
+dEQP-VK.api.buffer.createBuffer_187_5
+dEQP-VK.api.buffer.createBuffer_188_5
+dEQP-VK.api.buffer.createBuffer_189_5
+dEQP-VK.api.buffer.createBuffer_190_5
+dEQP-VK.api.buffer.createBuffer_191_5
+dEQP-VK.api.buffer.createBuffer_192_5
+dEQP-VK.api.buffer.createBuffer_193_5
+dEQP-VK.api.buffer.createBuffer_194_5
+dEQP-VK.api.buffer.createBuffer_195_5
+dEQP-VK.api.buffer.createBuffer_196_5
+dEQP-VK.api.buffer.createBuffer_197_5
+dEQP-VK.api.buffer.createBuffer_198_5
+dEQP-VK.api.buffer.createBuffer_199_5
+dEQP-VK.api.buffer.createBuffer_200_5
+dEQP-VK.api.buffer.createBuffer_201_5
+dEQP-VK.api.buffer.createBuffer_202_5
+dEQP-VK.api.buffer.createBuffer_203_5
+dEQP-VK.api.buffer.createBuffer_204_5
+dEQP-VK.api.buffer.createBuffer_205_5
+dEQP-VK.api.buffer.createBuffer_206_5
+dEQP-VK.api.buffer.createBuffer_207_5
+dEQP-VK.api.buffer.createBuffer_208_5
+dEQP-VK.api.buffer.createBuffer_209_5
+dEQP-VK.api.buffer.createBuffer_210_5
+dEQP-VK.api.buffer.createBuffer_211_5
+dEQP-VK.api.buffer.createBuffer_212_5
+dEQP-VK.api.buffer.createBuffer_213_5
+dEQP-VK.api.buffer.createBuffer_214_5
+dEQP-VK.api.buffer.createBuffer_215_5
+dEQP-VK.api.buffer.createBuffer_216_5
+dEQP-VK.api.buffer.createBuffer_217_5
+dEQP-VK.api.buffer.createBuffer_218_5
+dEQP-VK.api.buffer.createBuffer_219_5
+dEQP-VK.api.buffer.createBuffer_220_5
+dEQP-VK.api.buffer.createBuffer_221_5
+dEQP-VK.api.buffer.createBuffer_222_5
+dEQP-VK.api.buffer.createBuffer_223_5
+dEQP-VK.api.buffer.createBuffer_224_5
+dEQP-VK.api.buffer.createBuffer_225_5
+dEQP-VK.api.buffer.createBuffer_226_5
+dEQP-VK.api.buffer.createBuffer_227_5
+dEQP-VK.api.buffer.createBuffer_228_5
+dEQP-VK.api.buffer.createBuffer_229_5
+dEQP-VK.api.buffer.createBuffer_230_5
+dEQP-VK.api.buffer.createBuffer_231_5
+dEQP-VK.api.buffer.createBuffer_232_5
+dEQP-VK.api.buffer.createBuffer_233_5
+dEQP-VK.api.buffer.createBuffer_234_5
+dEQP-VK.api.buffer.createBuffer_235_5
+dEQP-VK.api.buffer.createBuffer_236_5
+dEQP-VK.api.buffer.createBuffer_237_5
+dEQP-VK.api.buffer.createBuffer_238_5
+dEQP-VK.api.buffer.createBuffer_239_5
+dEQP-VK.api.buffer.createBuffer_240_5
+dEQP-VK.api.buffer.createBuffer_241_5
+dEQP-VK.api.buffer.createBuffer_242_5
+dEQP-VK.api.buffer.createBuffer_243_5
+dEQP-VK.api.buffer.createBuffer_244_5
+dEQP-VK.api.buffer.createBuffer_245_5
+dEQP-VK.api.buffer.createBuffer_246_5
+dEQP-VK.api.buffer.createBuffer_247_5
+dEQP-VK.api.buffer.createBuffer_248_5
+dEQP-VK.api.buffer.createBuffer_249_5
+dEQP-VK.api.buffer.createBuffer_250_5
+dEQP-VK.api.buffer.createBuffer_251_5
+dEQP-VK.api.buffer.createBuffer_252_5
+dEQP-VK.api.buffer.createBuffer_253_5
+dEQP-VK.api.buffer.createBuffer_254_5
+dEQP-VK.api.buffer.createBuffer_255_5
+dEQP-VK.api.buffer.createBuffer_1_6
+dEQP-VK.api.buffer.createBuffer_2_6
+dEQP-VK.api.buffer.createBuffer_3_6
+dEQP-VK.api.buffer.createBuffer_4_6
+dEQP-VK.api.buffer.createBuffer_5_6
+dEQP-VK.api.buffer.createBuffer_6_6
+dEQP-VK.api.buffer.createBuffer_7_6
+dEQP-VK.api.buffer.createBuffer_8_6
+dEQP-VK.api.buffer.createBuffer_9_6
+dEQP-VK.api.buffer.createBuffer_10_6
+dEQP-VK.api.buffer.createBuffer_11_6
+dEQP-VK.api.buffer.createBuffer_12_6
+dEQP-VK.api.buffer.createBuffer_13_6
+dEQP-VK.api.buffer.createBuffer_14_6
+dEQP-VK.api.buffer.createBuffer_15_6
+dEQP-VK.api.buffer.createBuffer_16_6
+dEQP-VK.api.buffer.createBuffer_17_6
+dEQP-VK.api.buffer.createBuffer_18_6
+dEQP-VK.api.buffer.createBuffer_19_6
+dEQP-VK.api.buffer.createBuffer_20_6
+dEQP-VK.api.buffer.createBuffer_21_6
+dEQP-VK.api.buffer.createBuffer_22_6
+dEQP-VK.api.buffer.createBuffer_23_6
+dEQP-VK.api.buffer.createBuffer_24_6
+dEQP-VK.api.buffer.createBuffer_25_6
+dEQP-VK.api.buffer.createBuffer_26_6
+dEQP-VK.api.buffer.createBuffer_27_6
+dEQP-VK.api.buffer.createBuffer_28_6
+dEQP-VK.api.buffer.createBuffer_29_6
+dEQP-VK.api.buffer.createBuffer_30_6
+dEQP-VK.api.buffer.createBuffer_31_6
+dEQP-VK.api.buffer.createBuffer_32_6
+dEQP-VK.api.buffer.createBuffer_33_6
+dEQP-VK.api.buffer.createBuffer_34_6
+dEQP-VK.api.buffer.createBuffer_35_6
+dEQP-VK.api.buffer.createBuffer_36_6
+dEQP-VK.api.buffer.createBuffer_37_6
+dEQP-VK.api.buffer.createBuffer_38_6
+dEQP-VK.api.buffer.createBuffer_39_6
+dEQP-VK.api.buffer.createBuffer_40_6
+dEQP-VK.api.buffer.createBuffer_41_6
+dEQP-VK.api.buffer.createBuffer_42_6
+dEQP-VK.api.buffer.createBuffer_43_6
+dEQP-VK.api.buffer.createBuffer_44_6
+dEQP-VK.api.buffer.createBuffer_45_6
+dEQP-VK.api.buffer.createBuffer_46_6
+dEQP-VK.api.buffer.createBuffer_47_6
+dEQP-VK.api.buffer.createBuffer_48_6
+dEQP-VK.api.buffer.createBuffer_49_6
+dEQP-VK.api.buffer.createBuffer_50_6
+dEQP-VK.api.buffer.createBuffer_51_6
+dEQP-VK.api.buffer.createBuffer_52_6
+dEQP-VK.api.buffer.createBuffer_53_6
+dEQP-VK.api.buffer.createBuffer_54_6
+dEQP-VK.api.buffer.createBuffer_55_6
+dEQP-VK.api.buffer.createBuffer_56_6
+dEQP-VK.api.buffer.createBuffer_57_6
+dEQP-VK.api.buffer.createBuffer_58_6
+dEQP-VK.api.buffer.createBuffer_59_6
+dEQP-VK.api.buffer.createBuffer_60_6
+dEQP-VK.api.buffer.createBuffer_61_6
+dEQP-VK.api.buffer.createBuffer_62_6
+dEQP-VK.api.buffer.createBuffer_63_6
+dEQP-VK.api.buffer.createBuffer_64_6
+dEQP-VK.api.buffer.createBuffer_65_6
+dEQP-VK.api.buffer.createBuffer_66_6
+dEQP-VK.api.buffer.createBuffer_67_6
+dEQP-VK.api.buffer.createBuffer_68_6
+dEQP-VK.api.buffer.createBuffer_69_6
+dEQP-VK.api.buffer.createBuffer_70_6
+dEQP-VK.api.buffer.createBuffer_71_6
+dEQP-VK.api.buffer.createBuffer_72_6
+dEQP-VK.api.buffer.createBuffer_73_6
+dEQP-VK.api.buffer.createBuffer_74_6
+dEQP-VK.api.buffer.createBuffer_75_6
+dEQP-VK.api.buffer.createBuffer_76_6
+dEQP-VK.api.buffer.createBuffer_77_6
+dEQP-VK.api.buffer.createBuffer_78_6
+dEQP-VK.api.buffer.createBuffer_79_6
+dEQP-VK.api.buffer.createBuffer_80_6
+dEQP-VK.api.buffer.createBuffer_81_6
+dEQP-VK.api.buffer.createBuffer_82_6
+dEQP-VK.api.buffer.createBuffer_83_6
+dEQP-VK.api.buffer.createBuffer_84_6
+dEQP-VK.api.buffer.createBuffer_85_6
+dEQP-VK.api.buffer.createBuffer_86_6
+dEQP-VK.api.buffer.createBuffer_87_6
+dEQP-VK.api.buffer.createBuffer_88_6
+dEQP-VK.api.buffer.createBuffer_89_6
+dEQP-VK.api.buffer.createBuffer_90_6
+dEQP-VK.api.buffer.createBuffer_91_6
+dEQP-VK.api.buffer.createBuffer_92_6
+dEQP-VK.api.buffer.createBuffer_93_6
+dEQP-VK.api.buffer.createBuffer_94_6
+dEQP-VK.api.buffer.createBuffer_95_6
+dEQP-VK.api.buffer.createBuffer_96_6
+dEQP-VK.api.buffer.createBuffer_97_6
+dEQP-VK.api.buffer.createBuffer_98_6
+dEQP-VK.api.buffer.createBuffer_99_6
+dEQP-VK.api.buffer.createBuffer_100_6
+dEQP-VK.api.buffer.createBuffer_101_6
+dEQP-VK.api.buffer.createBuffer_102_6
+dEQP-VK.api.buffer.createBuffer_103_6
+dEQP-VK.api.buffer.createBuffer_104_6
+dEQP-VK.api.buffer.createBuffer_105_6
+dEQP-VK.api.buffer.createBuffer_106_6
+dEQP-VK.api.buffer.createBuffer_107_6
+dEQP-VK.api.buffer.createBuffer_108_6
+dEQP-VK.api.buffer.createBuffer_109_6
+dEQP-VK.api.buffer.createBuffer_110_6
+dEQP-VK.api.buffer.createBuffer_111_6
+dEQP-VK.api.buffer.createBuffer_112_6
+dEQP-VK.api.buffer.createBuffer_113_6
+dEQP-VK.api.buffer.createBuffer_114_6
+dEQP-VK.api.buffer.createBuffer_115_6
+dEQP-VK.api.buffer.createBuffer_116_6
+dEQP-VK.api.buffer.createBuffer_117_6
+dEQP-VK.api.buffer.createBuffer_118_6
+dEQP-VK.api.buffer.createBuffer_119_6
+dEQP-VK.api.buffer.createBuffer_120_6
+dEQP-VK.api.buffer.createBuffer_121_6
+dEQP-VK.api.buffer.createBuffer_122_6
+dEQP-VK.api.buffer.createBuffer_123_6
+dEQP-VK.api.buffer.createBuffer_124_6
+dEQP-VK.api.buffer.createBuffer_125_6
+dEQP-VK.api.buffer.createBuffer_126_6
+dEQP-VK.api.buffer.createBuffer_127_6
+dEQP-VK.api.buffer.createBuffer_128_6
+dEQP-VK.api.buffer.createBuffer_129_6
+dEQP-VK.api.buffer.createBuffer_130_6
+dEQP-VK.api.buffer.createBuffer_131_6
+dEQP-VK.api.buffer.createBuffer_132_6
+dEQP-VK.api.buffer.createBuffer_133_6
+dEQP-VK.api.buffer.createBuffer_134_6
+dEQP-VK.api.buffer.createBuffer_135_6
+dEQP-VK.api.buffer.createBuffer_136_6
+dEQP-VK.api.buffer.createBuffer_137_6
+dEQP-VK.api.buffer.createBuffer_138_6
+dEQP-VK.api.buffer.createBuffer_139_6
+dEQP-VK.api.buffer.createBuffer_140_6
+dEQP-VK.api.buffer.createBuffer_141_6
+dEQP-VK.api.buffer.createBuffer_142_6
+dEQP-VK.api.buffer.createBuffer_143_6
+dEQP-VK.api.buffer.createBuffer_144_6
+dEQP-VK.api.buffer.createBuffer_145_6
+dEQP-VK.api.buffer.createBuffer_146_6
+dEQP-VK.api.buffer.createBuffer_147_6
+dEQP-VK.api.buffer.createBuffer_148_6
+dEQP-VK.api.buffer.createBuffer_149_6
+dEQP-VK.api.buffer.createBuffer_150_6
+dEQP-VK.api.buffer.createBuffer_151_6
+dEQP-VK.api.buffer.createBuffer_152_6
+dEQP-VK.api.buffer.createBuffer_153_6
+dEQP-VK.api.buffer.createBuffer_154_6
+dEQP-VK.api.buffer.createBuffer_155_6
+dEQP-VK.api.buffer.createBuffer_156_6
+dEQP-VK.api.buffer.createBuffer_157_6
+dEQP-VK.api.buffer.createBuffer_158_6
+dEQP-VK.api.buffer.createBuffer_159_6
+dEQP-VK.api.buffer.createBuffer_160_6
+dEQP-VK.api.buffer.createBuffer_161_6
+dEQP-VK.api.buffer.createBuffer_162_6
+dEQP-VK.api.buffer.createBuffer_163_6
+dEQP-VK.api.buffer.createBuffer_164_6
+dEQP-VK.api.buffer.createBuffer_165_6
+dEQP-VK.api.buffer.createBuffer_166_6
+dEQP-VK.api.buffer.createBuffer_167_6
+dEQP-VK.api.buffer.createBuffer_168_6
+dEQP-VK.api.buffer.createBuffer_169_6
+dEQP-VK.api.buffer.createBuffer_170_6
+dEQP-VK.api.buffer.createBuffer_171_6
+dEQP-VK.api.buffer.createBuffer_172_6
+dEQP-VK.api.buffer.createBuffer_173_6
+dEQP-VK.api.buffer.createBuffer_174_6
+dEQP-VK.api.buffer.createBuffer_175_6
+dEQP-VK.api.buffer.createBuffer_176_6
+dEQP-VK.api.buffer.createBuffer_177_6
+dEQP-VK.api.buffer.createBuffer_178_6
+dEQP-VK.api.buffer.createBuffer_179_6
+dEQP-VK.api.buffer.createBuffer_180_6
+dEQP-VK.api.buffer.createBuffer_181_6
+dEQP-VK.api.buffer.createBuffer_182_6
+dEQP-VK.api.buffer.createBuffer_183_6
+dEQP-VK.api.buffer.createBuffer_184_6
+dEQP-VK.api.buffer.createBuffer_185_6
+dEQP-VK.api.buffer.createBuffer_186_6
+dEQP-VK.api.buffer.createBuffer_187_6
+dEQP-VK.api.buffer.createBuffer_188_6
+dEQP-VK.api.buffer.createBuffer_189_6
+dEQP-VK.api.buffer.createBuffer_190_6
+dEQP-VK.api.buffer.createBuffer_191_6
+dEQP-VK.api.buffer.createBuffer_192_6
+dEQP-VK.api.buffer.createBuffer_193_6
+dEQP-VK.api.buffer.createBuffer_194_6
+dEQP-VK.api.buffer.createBuffer_195_6
+dEQP-VK.api.buffer.createBuffer_196_6
+dEQP-VK.api.buffer.createBuffer_197_6
+dEQP-VK.api.buffer.createBuffer_198_6
+dEQP-VK.api.buffer.createBuffer_199_6
+dEQP-VK.api.buffer.createBuffer_200_6
+dEQP-VK.api.buffer.createBuffer_201_6
+dEQP-VK.api.buffer.createBuffer_202_6
+dEQP-VK.api.buffer.createBuffer_203_6
+dEQP-VK.api.buffer.createBuffer_204_6
+dEQP-VK.api.buffer.createBuffer_205_6
+dEQP-VK.api.buffer.createBuffer_206_6
+dEQP-VK.api.buffer.createBuffer_207_6
+dEQP-VK.api.buffer.createBuffer_208_6
+dEQP-VK.api.buffer.createBuffer_209_6
+dEQP-VK.api.buffer.createBuffer_210_6
+dEQP-VK.api.buffer.createBuffer_211_6
+dEQP-VK.api.buffer.createBuffer_212_6
+dEQP-VK.api.buffer.createBuffer_213_6
+dEQP-VK.api.buffer.createBuffer_214_6
+dEQP-VK.api.buffer.createBuffer_215_6
+dEQP-VK.api.buffer.createBuffer_216_6
+dEQP-VK.api.buffer.createBuffer_217_6
+dEQP-VK.api.buffer.createBuffer_218_6
+dEQP-VK.api.buffer.createBuffer_219_6
+dEQP-VK.api.buffer.createBuffer_220_6
+dEQP-VK.api.buffer.createBuffer_221_6
+dEQP-VK.api.buffer.createBuffer_222_6
+dEQP-VK.api.buffer.createBuffer_223_6
+dEQP-VK.api.buffer.createBuffer_224_6
+dEQP-VK.api.buffer.createBuffer_225_6
+dEQP-VK.api.buffer.createBuffer_226_6
+dEQP-VK.api.buffer.createBuffer_227_6
+dEQP-VK.api.buffer.createBuffer_228_6
+dEQP-VK.api.buffer.createBuffer_229_6
+dEQP-VK.api.buffer.createBuffer_230_6
+dEQP-VK.api.buffer.createBuffer_231_6
+dEQP-VK.api.buffer.createBuffer_232_6
+dEQP-VK.api.buffer.createBuffer_233_6
+dEQP-VK.api.buffer.createBuffer_234_6
+dEQP-VK.api.buffer.createBuffer_235_6
+dEQP-VK.api.buffer.createBuffer_236_6
+dEQP-VK.api.buffer.createBuffer_237_6
+dEQP-VK.api.buffer.createBuffer_238_6
+dEQP-VK.api.buffer.createBuffer_239_6
+dEQP-VK.api.buffer.createBuffer_240_6
+dEQP-VK.api.buffer.createBuffer_241_6
+dEQP-VK.api.buffer.createBuffer_242_6
+dEQP-VK.api.buffer.createBuffer_243_6
+dEQP-VK.api.buffer.createBuffer_244_6
+dEQP-VK.api.buffer.createBuffer_245_6
+dEQP-VK.api.buffer.createBuffer_246_6
+dEQP-VK.api.buffer.createBuffer_247_6
+dEQP-VK.api.buffer.createBuffer_248_6
+dEQP-VK.api.buffer.createBuffer_249_6
+dEQP-VK.api.buffer.createBuffer_250_6
+dEQP-VK.api.buffer.createBuffer_251_6
+dEQP-VK.api.buffer.createBuffer_252_6
+dEQP-VK.api.buffer.createBuffer_253_6
+dEQP-VK.api.buffer.createBuffer_254_6
+dEQP-VK.api.buffer.createBuffer_255_6
+dEQP-VK.api.buffer.createBuffer_1_7
+dEQP-VK.api.buffer.createBuffer_2_7
+dEQP-VK.api.buffer.createBuffer_3_7
+dEQP-VK.api.buffer.createBuffer_4_7
+dEQP-VK.api.buffer.createBuffer_5_7
+dEQP-VK.api.buffer.createBuffer_6_7
+dEQP-VK.api.buffer.createBuffer_7_7
+dEQP-VK.api.buffer.createBuffer_8_7
+dEQP-VK.api.buffer.createBuffer_9_7
+dEQP-VK.api.buffer.createBuffer_10_7
+dEQP-VK.api.buffer.createBuffer_11_7
+dEQP-VK.api.buffer.createBuffer_12_7
+dEQP-VK.api.buffer.createBuffer_13_7
+dEQP-VK.api.buffer.createBuffer_14_7
+dEQP-VK.api.buffer.createBuffer_15_7
+dEQP-VK.api.buffer.createBuffer_16_7
+dEQP-VK.api.buffer.createBuffer_17_7
+dEQP-VK.api.buffer.createBuffer_18_7
+dEQP-VK.api.buffer.createBuffer_19_7
+dEQP-VK.api.buffer.createBuffer_20_7
+dEQP-VK.api.buffer.createBuffer_21_7
+dEQP-VK.api.buffer.createBuffer_22_7
+dEQP-VK.api.buffer.createBuffer_23_7
+dEQP-VK.api.buffer.createBuffer_24_7
+dEQP-VK.api.buffer.createBuffer_25_7
+dEQP-VK.api.buffer.createBuffer_26_7
+dEQP-VK.api.buffer.createBuffer_27_7
+dEQP-VK.api.buffer.createBuffer_28_7
+dEQP-VK.api.buffer.createBuffer_29_7
+dEQP-VK.api.buffer.createBuffer_30_7
+dEQP-VK.api.buffer.createBuffer_31_7
+dEQP-VK.api.buffer.createBuffer_32_7
+dEQP-VK.api.buffer.createBuffer_33_7
+dEQP-VK.api.buffer.createBuffer_34_7
+dEQP-VK.api.buffer.createBuffer_35_7
+dEQP-VK.api.buffer.createBuffer_36_7
+dEQP-VK.api.buffer.createBuffer_37_7
+dEQP-VK.api.buffer.createBuffer_38_7
+dEQP-VK.api.buffer.createBuffer_39_7
+dEQP-VK.api.buffer.createBuffer_40_7
+dEQP-VK.api.buffer.createBuffer_41_7
+dEQP-VK.api.buffer.createBuffer_42_7
+dEQP-VK.api.buffer.createBuffer_43_7
+dEQP-VK.api.buffer.createBuffer_44_7
+dEQP-VK.api.buffer.createBuffer_45_7
+dEQP-VK.api.buffer.createBuffer_46_7
+dEQP-VK.api.buffer.createBuffer_47_7
+dEQP-VK.api.buffer.createBuffer_48_7
+dEQP-VK.api.buffer.createBuffer_49_7
+dEQP-VK.api.buffer.createBuffer_50_7
+dEQP-VK.api.buffer.createBuffer_51_7
+dEQP-VK.api.buffer.createBuffer_52_7
+dEQP-VK.api.buffer.createBuffer_53_7
+dEQP-VK.api.buffer.createBuffer_54_7
+dEQP-VK.api.buffer.createBuffer_55_7
+dEQP-VK.api.buffer.createBuffer_56_7
+dEQP-VK.api.buffer.createBuffer_57_7
+dEQP-VK.api.buffer.createBuffer_58_7
+dEQP-VK.api.buffer.createBuffer_59_7
+dEQP-VK.api.buffer.createBuffer_60_7
+dEQP-VK.api.buffer.createBuffer_61_7
+dEQP-VK.api.buffer.createBuffer_62_7
+dEQP-VK.api.buffer.createBuffer_63_7
+dEQP-VK.api.buffer.createBuffer_64_7
+dEQP-VK.api.buffer.createBuffer_65_7
+dEQP-VK.api.buffer.createBuffer_66_7
+dEQP-VK.api.buffer.createBuffer_67_7
+dEQP-VK.api.buffer.createBuffer_68_7
+dEQP-VK.api.buffer.createBuffer_69_7
+dEQP-VK.api.buffer.createBuffer_70_7
+dEQP-VK.api.buffer.createBuffer_71_7
+dEQP-VK.api.buffer.createBuffer_72_7
+dEQP-VK.api.buffer.createBuffer_73_7
+dEQP-VK.api.buffer.createBuffer_74_7
+dEQP-VK.api.buffer.createBuffer_75_7
+dEQP-VK.api.buffer.createBuffer_76_7
+dEQP-VK.api.buffer.createBuffer_77_7
+dEQP-VK.api.buffer.createBuffer_78_7
+dEQP-VK.api.buffer.createBuffer_79_7
+dEQP-VK.api.buffer.createBuffer_80_7
+dEQP-VK.api.buffer.createBuffer_81_7
+dEQP-VK.api.buffer.createBuffer_82_7
+dEQP-VK.api.buffer.createBuffer_83_7
+dEQP-VK.api.buffer.createBuffer_84_7
+dEQP-VK.api.buffer.createBuffer_85_7
+dEQP-VK.api.buffer.createBuffer_86_7
+dEQP-VK.api.buffer.createBuffer_87_7
+dEQP-VK.api.buffer.createBuffer_88_7
+dEQP-VK.api.buffer.createBuffer_89_7
+dEQP-VK.api.buffer.createBuffer_90_7
+dEQP-VK.api.buffer.createBuffer_91_7
+dEQP-VK.api.buffer.createBuffer_92_7
+dEQP-VK.api.buffer.createBuffer_93_7
+dEQP-VK.api.buffer.createBuffer_94_7
+dEQP-VK.api.buffer.createBuffer_95_7
+dEQP-VK.api.buffer.createBuffer_96_7
+dEQP-VK.api.buffer.createBuffer_97_7
+dEQP-VK.api.buffer.createBuffer_98_7
+dEQP-VK.api.buffer.createBuffer_99_7
+dEQP-VK.api.buffer.createBuffer_100_7
+dEQP-VK.api.buffer.createBuffer_101_7
+dEQP-VK.api.buffer.createBuffer_102_7
+dEQP-VK.api.buffer.createBuffer_103_7
+dEQP-VK.api.buffer.createBuffer_104_7
+dEQP-VK.api.buffer.createBuffer_105_7
+dEQP-VK.api.buffer.createBuffer_106_7
+dEQP-VK.api.buffer.createBuffer_107_7
+dEQP-VK.api.buffer.createBuffer_108_7
+dEQP-VK.api.buffer.createBuffer_109_7
+dEQP-VK.api.buffer.createBuffer_110_7
+dEQP-VK.api.buffer.createBuffer_111_7
+dEQP-VK.api.buffer.createBuffer_112_7
+dEQP-VK.api.buffer.createBuffer_113_7
+dEQP-VK.api.buffer.createBuffer_114_7
+dEQP-VK.api.buffer.createBuffer_115_7
+dEQP-VK.api.buffer.createBuffer_116_7
+dEQP-VK.api.buffer.createBuffer_117_7
+dEQP-VK.api.buffer.createBuffer_118_7
+dEQP-VK.api.buffer.createBuffer_119_7
+dEQP-VK.api.buffer.createBuffer_120_7
+dEQP-VK.api.buffer.createBuffer_121_7
+dEQP-VK.api.buffer.createBuffer_122_7
+dEQP-VK.api.buffer.createBuffer_123_7
+dEQP-VK.api.buffer.createBuffer_124_7
+dEQP-VK.api.buffer.createBuffer_125_7
+dEQP-VK.api.buffer.createBuffer_126_7
+dEQP-VK.api.buffer.createBuffer_127_7
+dEQP-VK.api.buffer.createBuffer_128_7
+dEQP-VK.api.buffer.createBuffer_129_7
+dEQP-VK.api.buffer.createBuffer_130_7
+dEQP-VK.api.buffer.createBuffer_131_7
+dEQP-VK.api.buffer.createBuffer_132_7
+dEQP-VK.api.buffer.createBuffer_133_7
+dEQP-VK.api.buffer.createBuffer_134_7
+dEQP-VK.api.buffer.createBuffer_135_7
+dEQP-VK.api.buffer.createBuffer_136_7
+dEQP-VK.api.buffer.createBuffer_137_7
+dEQP-VK.api.buffer.createBuffer_138_7
+dEQP-VK.api.buffer.createBuffer_139_7
+dEQP-VK.api.buffer.createBuffer_140_7
+dEQP-VK.api.buffer.createBuffer_141_7
+dEQP-VK.api.buffer.createBuffer_142_7
+dEQP-VK.api.buffer.createBuffer_143_7
+dEQP-VK.api.buffer.createBuffer_144_7
+dEQP-VK.api.buffer.createBuffer_145_7
+dEQP-VK.api.buffer.createBuffer_146_7
+dEQP-VK.api.buffer.createBuffer_147_7
+dEQP-VK.api.buffer.createBuffer_148_7
+dEQP-VK.api.buffer.createBuffer_149_7
+dEQP-VK.api.buffer.createBuffer_150_7
+dEQP-VK.api.buffer.createBuffer_151_7
+dEQP-VK.api.buffer.createBuffer_152_7
+dEQP-VK.api.buffer.createBuffer_153_7
+dEQP-VK.api.buffer.createBuffer_154_7
+dEQP-VK.api.buffer.createBuffer_155_7
+dEQP-VK.api.buffer.createBuffer_156_7
+dEQP-VK.api.buffer.createBuffer_157_7
+dEQP-VK.api.buffer.createBuffer_158_7
+dEQP-VK.api.buffer.createBuffer_159_7
+dEQP-VK.api.buffer.createBuffer_160_7
+dEQP-VK.api.buffer.createBuffer_161_7
+dEQP-VK.api.buffer.createBuffer_162_7
+dEQP-VK.api.buffer.createBuffer_163_7
+dEQP-VK.api.buffer.createBuffer_164_7
+dEQP-VK.api.buffer.createBuffer_165_7
+dEQP-VK.api.buffer.createBuffer_166_7
+dEQP-VK.api.buffer.createBuffer_167_7
+dEQP-VK.api.buffer.createBuffer_168_7
+dEQP-VK.api.buffer.createBuffer_169_7
+dEQP-VK.api.buffer.createBuffer_170_7
+dEQP-VK.api.buffer.createBuffer_171_7
+dEQP-VK.api.buffer.createBuffer_172_7
+dEQP-VK.api.buffer.createBuffer_173_7
+dEQP-VK.api.buffer.createBuffer_174_7
+dEQP-VK.api.buffer.createBuffer_175_7
+dEQP-VK.api.buffer.createBuffer_176_7
+dEQP-VK.api.buffer.createBuffer_177_7
+dEQP-VK.api.buffer.createBuffer_178_7
+dEQP-VK.api.buffer.createBuffer_179_7
+dEQP-VK.api.buffer.createBuffer_180_7
+dEQP-VK.api.buffer.createBuffer_181_7
+dEQP-VK.api.buffer.createBuffer_182_7
+dEQP-VK.api.buffer.createBuffer_183_7
+dEQP-VK.api.buffer.createBuffer_184_7
+dEQP-VK.api.buffer.createBuffer_185_7
+dEQP-VK.api.buffer.createBuffer_186_7
+dEQP-VK.api.buffer.createBuffer_187_7
+dEQP-VK.api.buffer.createBuffer_188_7
+dEQP-VK.api.buffer.createBuffer_189_7
+dEQP-VK.api.buffer.createBuffer_190_7
+dEQP-VK.api.buffer.createBuffer_191_7
+dEQP-VK.api.buffer.createBuffer_192_7
+dEQP-VK.api.buffer.createBuffer_193_7
+dEQP-VK.api.buffer.createBuffer_194_7
+dEQP-VK.api.buffer.createBuffer_195_7
+dEQP-VK.api.buffer.createBuffer_196_7
+dEQP-VK.api.buffer.createBuffer_197_7
+dEQP-VK.api.buffer.createBuffer_198_7
+dEQP-VK.api.buffer.createBuffer_199_7
+dEQP-VK.api.buffer.createBuffer_200_7
+dEQP-VK.api.buffer.createBuffer_201_7
+dEQP-VK.api.buffer.createBuffer_202_7
+dEQP-VK.api.buffer.createBuffer_203_7
+dEQP-VK.api.buffer.createBuffer_204_7
+dEQP-VK.api.buffer.createBuffer_205_7
+dEQP-VK.api.buffer.createBuffer_206_7
+dEQP-VK.api.buffer.createBuffer_207_7
+dEQP-VK.api.buffer.createBuffer_208_7
+dEQP-VK.api.buffer.createBuffer_209_7
+dEQP-VK.api.buffer.createBuffer_210_7
+dEQP-VK.api.buffer.createBuffer_211_7
+dEQP-VK.api.buffer.createBuffer_212_7
+dEQP-VK.api.buffer.createBuffer_213_7
+dEQP-VK.api.buffer.createBuffer_214_7
+dEQP-VK.api.buffer.createBuffer_215_7
+dEQP-VK.api.buffer.createBuffer_216_7
+dEQP-VK.api.buffer.createBuffer_217_7
+dEQP-VK.api.buffer.createBuffer_218_7
+dEQP-VK.api.buffer.createBuffer_219_7
+dEQP-VK.api.buffer.createBuffer_220_7
+dEQP-VK.api.buffer.createBuffer_221_7
+dEQP-VK.api.buffer.createBuffer_222_7
+dEQP-VK.api.buffer.createBuffer_223_7
+dEQP-VK.api.buffer.createBuffer_224_7
+dEQP-VK.api.buffer.createBuffer_225_7
+dEQP-VK.api.buffer.createBuffer_226_7
+dEQP-VK.api.buffer.createBuffer_227_7
+dEQP-VK.api.buffer.createBuffer_228_7
+dEQP-VK.api.buffer.createBuffer_229_7
+dEQP-VK.api.buffer.createBuffer_230_7
+dEQP-VK.api.buffer.createBuffer_231_7
+dEQP-VK.api.buffer.createBuffer_232_7
+dEQP-VK.api.buffer.createBuffer_233_7
+dEQP-VK.api.buffer.createBuffer_234_7
+dEQP-VK.api.buffer.createBuffer_235_7
+dEQP-VK.api.buffer.createBuffer_236_7
+dEQP-VK.api.buffer.createBuffer_237_7
+dEQP-VK.api.buffer.createBuffer_238_7
+dEQP-VK.api.buffer.createBuffer_239_7
+dEQP-VK.api.buffer.createBuffer_240_7
+dEQP-VK.api.buffer.createBuffer_241_7
+dEQP-VK.api.buffer.createBuffer_242_7
+dEQP-VK.api.buffer.createBuffer_243_7
+dEQP-VK.api.buffer.createBuffer_244_7
+dEQP-VK.api.buffer.createBuffer_245_7
+dEQP-VK.api.buffer.createBuffer_246_7
+dEQP-VK.api.buffer.createBuffer_247_7
+dEQP-VK.api.buffer.createBuffer_248_7
+dEQP-VK.api.buffer.createBuffer_249_7
+dEQP-VK.api.buffer.createBuffer_250_7
+dEQP-VK.api.buffer.createBuffer_251_7
+dEQP-VK.api.buffer.createBuffer_252_7
+dEQP-VK.api.buffer.createBuffer_253_7
+dEQP-VK.api.buffer.createBuffer_254_7
+dEQP-VK.api.buffer.createBuffer_255_7
+dEQP-VK.api.buffer_view.create.createBufferView_1_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_1_storage
+dEQP-VK.api.buffer_view.create.createBufferView_2_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_2_storage
+dEQP-VK.api.buffer_view.create.createBufferView_3_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_3_storage
+dEQP-VK.api.buffer_view.create.createBufferView_4_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_4_storage
+dEQP-VK.api.buffer_view.create.createBufferView_5_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_5_storage
+dEQP-VK.api.buffer_view.create.createBufferView_6_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_6_storage
+dEQP-VK.api.buffer_view.create.createBufferView_7_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_7_storage
+dEQP-VK.api.buffer_view.create.createBufferView_8_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_8_storage
+dEQP-VK.api.buffer_view.create.createBufferView_9_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_9_storage
+dEQP-VK.api.buffer_view.create.createBufferView_10_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_10_storage
+dEQP-VK.api.buffer_view.create.createBufferView_11_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_11_storage
+dEQP-VK.api.buffer_view.create.createBufferView_12_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_12_storage
+dEQP-VK.api.buffer_view.create.createBufferView_13_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_13_storage
+dEQP-VK.api.buffer_view.create.createBufferView_14_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_14_storage
+dEQP-VK.api.buffer_view.create.createBufferView_15_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_15_storage
+dEQP-VK.api.buffer_view.create.createBufferView_16_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_16_storage
+dEQP-VK.api.buffer_view.create.createBufferView_17_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_17_storage
+dEQP-VK.api.buffer_view.create.createBufferView_18_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_18_storage
+dEQP-VK.api.buffer_view.create.createBufferView_19_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_19_storage
+dEQP-VK.api.buffer_view.create.createBufferView_20_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_20_storage
+dEQP-VK.api.buffer_view.create.createBufferView_21_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_21_storage
+dEQP-VK.api.buffer_view.create.createBufferView_22_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_22_storage
+dEQP-VK.api.buffer_view.create.createBufferView_23_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_23_storage
+dEQP-VK.api.buffer_view.create.createBufferView_24_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_24_storage
+dEQP-VK.api.buffer_view.create.createBufferView_25_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_25_storage
+dEQP-VK.api.buffer_view.create.createBufferView_26_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_26_storage
+dEQP-VK.api.buffer_view.create.createBufferView_27_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_27_storage
+dEQP-VK.api.buffer_view.create.createBufferView_28_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_28_storage
+dEQP-VK.api.buffer_view.create.createBufferView_29_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_29_storage
+dEQP-VK.api.buffer_view.create.createBufferView_30_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_30_storage
+dEQP-VK.api.buffer_view.create.createBufferView_31_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_31_storage
+dEQP-VK.api.buffer_view.create.createBufferView_32_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_32_storage
+dEQP-VK.api.buffer_view.create.createBufferView_33_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_33_storage
+dEQP-VK.api.buffer_view.create.createBufferView_34_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_34_storage
+dEQP-VK.api.buffer_view.create.createBufferView_35_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_35_storage
+dEQP-VK.api.buffer_view.create.createBufferView_36_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_36_storage
+dEQP-VK.api.buffer_view.create.createBufferView_37_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_37_storage
+dEQP-VK.api.buffer_view.create.createBufferView_38_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_38_storage
+dEQP-VK.api.buffer_view.create.createBufferView_39_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_39_storage
+dEQP-VK.api.buffer_view.create.createBufferView_40_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_40_storage
+dEQP-VK.api.buffer_view.create.createBufferView_41_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_41_storage
+dEQP-VK.api.buffer_view.create.createBufferView_42_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_42_storage
+dEQP-VK.api.buffer_view.create.createBufferView_43_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_43_storage
+dEQP-VK.api.buffer_view.create.createBufferView_44_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_44_storage
+dEQP-VK.api.buffer_view.create.createBufferView_45_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_45_storage
+dEQP-VK.api.buffer_view.create.createBufferView_46_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_46_storage
+dEQP-VK.api.buffer_view.create.createBufferView_47_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_47_storage
+dEQP-VK.api.buffer_view.create.createBufferView_48_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_48_storage
+dEQP-VK.api.buffer_view.create.createBufferView_49_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_49_storage
+dEQP-VK.api.buffer_view.create.createBufferView_50_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_50_storage
+dEQP-VK.api.buffer_view.create.createBufferView_51_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_51_storage
+dEQP-VK.api.buffer_view.create.createBufferView_52_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_52_storage
+dEQP-VK.api.buffer_view.create.createBufferView_53_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_53_storage
+dEQP-VK.api.buffer_view.create.createBufferView_54_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_54_storage
+dEQP-VK.api.buffer_view.create.createBufferView_55_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_55_storage
+dEQP-VK.api.buffer_view.create.createBufferView_56_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_56_storage
+dEQP-VK.api.buffer_view.create.createBufferView_57_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_57_storage
+dEQP-VK.api.buffer_view.create.createBufferView_58_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_58_storage
+dEQP-VK.api.buffer_view.create.createBufferView_59_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_59_storage
+dEQP-VK.api.buffer_view.create.createBufferView_60_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_60_storage
+dEQP-VK.api.buffer_view.create.createBufferView_61_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_61_storage
+dEQP-VK.api.buffer_view.create.createBufferView_62_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_62_storage
+dEQP-VK.api.buffer_view.create.createBufferView_63_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_63_storage
+dEQP-VK.api.buffer_view.create.createBufferView_64_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_64_storage
+dEQP-VK.api.buffer_view.create.createBufferView_65_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_65_storage
+dEQP-VK.api.buffer_view.create.createBufferView_66_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_66_storage
+dEQP-VK.api.buffer_view.create.createBufferView_67_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_67_storage
+dEQP-VK.api.buffer_view.create.createBufferView_68_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_68_storage
+dEQP-VK.api.buffer_view.create.createBufferView_69_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_69_storage
+dEQP-VK.api.buffer_view.create.createBufferView_70_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_70_storage
+dEQP-VK.api.buffer_view.create.createBufferView_71_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_71_storage
+dEQP-VK.api.buffer_view.create.createBufferView_72_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_72_storage
+dEQP-VK.api.buffer_view.create.createBufferView_73_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_73_storage
+dEQP-VK.api.buffer_view.create.createBufferView_74_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_74_storage
+dEQP-VK.api.buffer_view.create.createBufferView_75_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_75_storage
+dEQP-VK.api.buffer_view.create.createBufferView_76_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_76_storage
+dEQP-VK.api.buffer_view.create.createBufferView_77_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_77_storage
+dEQP-VK.api.buffer_view.create.createBufferView_78_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_78_storage
+dEQP-VK.api.buffer_view.create.createBufferView_79_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_79_storage
+dEQP-VK.api.buffer_view.create.createBufferView_80_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_80_storage
+dEQP-VK.api.buffer_view.create.createBufferView_81_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_81_storage
+dEQP-VK.api.buffer_view.create.createBufferView_82_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_82_storage
+dEQP-VK.api.buffer_view.create.createBufferView_83_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_83_storage
+dEQP-VK.api.buffer_view.create.createBufferView_84_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_84_storage
+dEQP-VK.api.buffer_view.create.createBufferView_85_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_85_storage
+dEQP-VK.api.buffer_view.create.createBufferView_86_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_86_storage
+dEQP-VK.api.buffer_view.create.createBufferView_87_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_87_storage
+dEQP-VK.api.buffer_view.create.createBufferView_88_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_88_storage
+dEQP-VK.api.buffer_view.create.createBufferView_89_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_89_storage
+dEQP-VK.api.buffer_view.create.createBufferView_90_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_90_storage
+dEQP-VK.api.buffer_view.create.createBufferView_91_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_91_storage
+dEQP-VK.api.buffer_view.create.createBufferView_92_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_92_storage
+dEQP-VK.api.buffer_view.create.createBufferView_93_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_93_storage
+dEQP-VK.api.buffer_view.create.createBufferView_94_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_94_storage
+dEQP-VK.api.buffer_view.create.createBufferView_95_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_95_storage
+dEQP-VK.api.buffer_view.create.createBufferView_96_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_96_storage
+dEQP-VK.api.buffer_view.create.createBufferView_97_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_97_storage
+dEQP-VK.api.buffer_view.create.createBufferView_98_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_98_storage
+dEQP-VK.api.buffer_view.create.createBufferView_99_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_99_storage
+dEQP-VK.api.buffer_view.create.createBufferView_100_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_100_storage
+dEQP-VK.api.buffer_view.create.createBufferView_101_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_101_storage
+dEQP-VK.api.buffer_view.create.createBufferView_102_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_102_storage
+dEQP-VK.api.buffer_view.create.createBufferView_103_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_103_storage
+dEQP-VK.api.buffer_view.create.createBufferView_104_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_104_storage
+dEQP-VK.api.buffer_view.create.createBufferView_105_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_105_storage
+dEQP-VK.api.buffer_view.create.createBufferView_106_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_106_storage
+dEQP-VK.api.buffer_view.create.createBufferView_107_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_107_storage
+dEQP-VK.api.buffer_view.create.createBufferView_108_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_108_storage
+dEQP-VK.api.buffer_view.create.createBufferView_109_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_109_storage
+dEQP-VK.api.buffer_view.create.createBufferView_110_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_110_storage
+dEQP-VK.api.buffer_view.create.createBufferView_111_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_111_storage
+dEQP-VK.api.buffer_view.create.createBufferView_112_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_112_storage
+dEQP-VK.api.buffer_view.create.createBufferView_113_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_113_storage
+dEQP-VK.api.buffer_view.create.createBufferView_114_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_114_storage
+dEQP-VK.api.buffer_view.create.createBufferView_115_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_115_storage
+dEQP-VK.api.buffer_view.create.createBufferView_116_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_116_storage
+dEQP-VK.api.buffer_view.create.createBufferView_117_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_117_storage
+dEQP-VK.api.buffer_view.create.createBufferView_118_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_118_storage
+dEQP-VK.api.buffer_view.create.createBufferView_119_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_119_storage
+dEQP-VK.api.buffer_view.create.createBufferView_120_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_120_storage
+dEQP-VK.api.buffer_view.create.createBufferView_121_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_121_storage
+dEQP-VK.api.buffer_view.create.createBufferView_122_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_122_storage
+dEQP-VK.api.buffer_view.create.createBufferView_123_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_123_storage
+dEQP-VK.api.buffer_view.create.createBufferView_124_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_124_storage
+dEQP-VK.api.buffer_view.create.createBufferView_125_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_125_storage
+dEQP-VK.api.buffer_view.create.createBufferView_126_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_126_storage
+dEQP-VK.api.buffer_view.create.createBufferView_127_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_127_storage
+dEQP-VK.api.buffer_view.create.createBufferView_128_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_128_storage
+dEQP-VK.api.buffer_view.create.createBufferView_129_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_129_storage
+dEQP-VK.api.buffer_view.create.createBufferView_130_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_130_storage
+dEQP-VK.api.buffer_view.create.createBufferView_131_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_131_storage
+dEQP-VK.api.buffer_view.create.createBufferView_132_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_132_storage
+dEQP-VK.api.buffer_view.create.createBufferView_133_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_133_storage
+dEQP-VK.api.buffer_view.create.createBufferView_134_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_134_storage
+dEQP-VK.api.buffer_view.create.createBufferView_135_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_135_storage
+dEQP-VK.api.buffer_view.create.createBufferView_136_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_136_storage
+dEQP-VK.api.buffer_view.create.createBufferView_137_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_137_storage
+dEQP-VK.api.buffer_view.create.createBufferView_138_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_138_storage
+dEQP-VK.api.buffer_view.create.createBufferView_139_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_139_storage
+dEQP-VK.api.buffer_view.create.createBufferView_140_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_140_storage
+dEQP-VK.api.buffer_view.create.createBufferView_141_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_141_storage
+dEQP-VK.api.buffer_view.create.createBufferView_142_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_142_storage
+dEQP-VK.api.buffer_view.create.createBufferView_143_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_143_storage
+dEQP-VK.api.buffer_view.create.createBufferView_144_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_144_storage
+dEQP-VK.api.buffer_view.create.createBufferView_145_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_145_storage
+dEQP-VK.api.buffer_view.create.createBufferView_146_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_146_storage
+dEQP-VK.api.buffer_view.create.createBufferView_147_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_147_storage
+dEQP-VK.api.buffer_view.create.createBufferView_148_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_148_storage
+dEQP-VK.api.buffer_view.create.createBufferView_149_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_149_storage
+dEQP-VK.api.buffer_view.create.createBufferView_150_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_150_storage
+dEQP-VK.api.buffer_view.create.createBufferView_151_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_151_storage
+dEQP-VK.api.buffer_view.create.createBufferView_152_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_152_storage
+dEQP-VK.api.buffer_view.create.createBufferView_153_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_153_storage
+dEQP-VK.api.buffer_view.create.createBufferView_154_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_154_storage
+dEQP-VK.api.buffer_view.create.createBufferView_155_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_155_storage
+dEQP-VK.api.buffer_view.create.createBufferView_156_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_156_storage
+dEQP-VK.api.buffer_view.create.createBufferView_157_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_157_storage
+dEQP-VK.api.buffer_view.create.createBufferView_158_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_158_storage
+dEQP-VK.api.buffer_view.create.createBufferView_159_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_159_storage
+dEQP-VK.api.buffer_view.create.createBufferView_160_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_160_storage
+dEQP-VK.api.buffer_view.create.createBufferView_161_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_161_storage
+dEQP-VK.api.buffer_view.create.createBufferView_162_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_162_storage
+dEQP-VK.api.buffer_view.create.createBufferView_163_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_163_storage
+dEQP-VK.api.buffer_view.create.createBufferView_164_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_164_storage
+dEQP-VK.api.buffer_view.create.createBufferView_165_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_165_storage
+dEQP-VK.api.buffer_view.create.createBufferView_166_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_166_storage
+dEQP-VK.api.buffer_view.create.createBufferView_167_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_167_storage
+dEQP-VK.api.buffer_view.create.createBufferView_168_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_168_storage
+dEQP-VK.api.buffer_view.create.createBufferView_169_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_169_storage
+dEQP-VK.api.buffer_view.create.createBufferView_170_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_170_storage
+dEQP-VK.api.buffer_view.create.createBufferView_171_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_171_storage
+dEQP-VK.api.buffer_view.create.createBufferView_172_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_172_storage
+dEQP-VK.api.buffer_view.create.createBufferView_173_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_173_storage
+dEQP-VK.api.buffer_view.create.createBufferView_174_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_174_storage
+dEQP-VK.api.buffer_view.create.createBufferView_175_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_175_storage
+dEQP-VK.api.buffer_view.create.createBufferView_176_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_176_storage
+dEQP-VK.api.buffer_view.create.createBufferView_177_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_177_storage
+dEQP-VK.api.buffer_view.create.createBufferView_178_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_178_storage
+dEQP-VK.api.buffer_view.create.createBufferView_179_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_179_storage
+dEQP-VK.api.buffer_view.create.createBufferView_180_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_180_storage
+dEQP-VK.api.buffer_view.create.createBufferView_181_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_181_storage
+dEQP-VK.api.buffer_view.create.createBufferView_182_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_182_storage
+dEQP-VK.api.buffer_view.create.createBufferView_183_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_183_storage
+dEQP-VK.api.buffer_view.create.createBufferView_184_uniform
+dEQP-VK.api.buffer_view.create.createBufferView_184_storage
+dEQP-VK.api.buffer_view.access.buffer_view_memory_test_complete
+dEQP-VK.api.buffer_view.access.buffer_view_memory_test_partial_offset0
+dEQP-VK.api.buffer_view.access.buffer_view_memory_test_partial_offset1
+dEQP-VK.api.command_buffers.pool_create_null_params
+dEQP-VK.api.command_buffers.pool_create_non_null_allocator
+dEQP-VK.api.command_buffers.pool_create_transient_bit
+dEQP-VK.api.command_buffers.pool_create_reset_bit
+dEQP-VK.api.command_buffers.pool_reset_release_res
+dEQP-VK.api.command_buffers.pool_reset_no_flags_res
+dEQP-VK.api.command_buffers.allocate_single_primary
+dEQP-VK.api.command_buffers.allocate_many_primary
+dEQP-VK.api.command_buffers.allocate_zero_primary
+dEQP-VK.api.command_buffers.allocate_single_secondary
+dEQP-VK.api.command_buffers.allocate_many_secondary
+dEQP-VK.api.command_buffers.allocate_zero_secondary
+dEQP-VK.api.command_buffers.execute_small_primary
+dEQP-VK.api.command_buffers.execute_large_primary
+dEQP-VK.api.command_buffers.reset_implicit
+dEQP-VK.api.command_buffers.record_single_primary
+dEQP-VK.api.command_buffers.record_many_primary
+dEQP-VK.api.command_buffers.record_single_secondary
+dEQP-VK.api.command_buffers.record_many_secondary
+dEQP-VK.api.command_buffers.submit_twice_primary
+dEQP-VK.api.command_buffers.submit_twice_secondary
+dEQP-VK.api.command_buffers.record_one_time_submit_primary
+dEQP-VK.api.command_buffers.record_one_time_submit_secondary
+dEQP-VK.api.command_buffers.record_simul_use_primary
+dEQP-VK.api.command_buffers.record_simul_use_secondary
+dEQP-VK.api.command_buffers.record_query_precise_w_flag
+dEQP-VK.api.command_buffers.record_query_imprecise_w_flag
+dEQP-VK.api.command_buffers.record_query_imprecise_wo_flag
+dEQP-VK.api.command_buffers.submit_count_non_zero
+dEQP-VK.api.command_buffers.submit_count_equal_zero
+dEQP-VK.api.command_buffers.submit_null_fence
+dEQP-VK.api.command_buffers.secondary_execute
+dEQP-VK.api.command_buffers.secondary_execute_twice
+dEQP-VK.api.command_buffers.order_bind_pipeline
+dEQP-VK.api.copy_and_blit.imageToImage_whole
+dEQP-VK.api.copy_and_blit.image_to_image_whole_different_format_uncompressed
+dEQP-VK.api.copy_and_blit.image_to_image_partial
+dEQP-VK.api.copy_and_blit.image_to_image_partial_multiple
+dEQP-VK.api.copy_and_blit.image_to_buffer
+dEQP-VK.api.copy_and_blit.buffer_to_image
+dEQP-VK.api.copy_and_blit.buffer_to_buffer_whole
+dEQP-VK.api.copy_and_blit.buffer_to_buffer_small
+dEQP-VK.api.copy_and_blit.buffer_to_buffer_regions
+dEQP-VK.api.copy_and_blit.image_to_image_depth
+dEQP-VK.api.copy_and_blit.image_to_image_stencil
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_back_fail_keep_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_back_fail_zero_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_always_back_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_not_equal_back_fail_keep_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_back_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_always_back_fail_repl_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_equal_back_fail_decw_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_always_back_fail_keep_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_equal_back_fail_decc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_never_back_fail_keep_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_not_equal_back_fail_zero_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_always_back_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_equal_back_fail_inv_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_not_equal_back_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_never_back_fail_keep_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_always_back_fail_repl_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_equal_back_fail_incc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_equal_back_fail_keep_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_back_fail_decc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_back_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_not_equal_back_fail_decc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_back_fail_decc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_equal_back_fail_wrap_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_not_equal_back_fail_keep_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_never_back_fail_keep_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_never_back_fail_repl_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_back_fail_decw_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_always_back_fail_incc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_back_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_always_back_fail_incc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_back_fail_repl_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_not_equal_back_fail_decw_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_back_fail_wrap_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_never_back_fail_keep_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_always_back_fail_keep_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_never_back_fail_wrap_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_always_back_fail_zero_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_never_back_fail_decw_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_never_back_fail_decc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_never_back_fail_keep_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_never_back_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_not_equal_back_fail_zero_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_back_fail_incc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_never_back_fail_zero_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_back_fail_zero_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_equal_back_fail_keep_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_back_fail_inv_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_never_back_fail_inv_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_equal_back_fail_zero_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_back_fail_wrap_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_not_equal_back_fail_incc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_not_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_not_equal_back_fail_inv_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_always_back_fail_zero_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_always_back_fail_zero_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_not_equal_back_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_back_fail_keep_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_back_fail_decc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_back_fail_keep_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_back_fail_inv_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_equal_back_fail_repl_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_equal_back_fail_zero_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_back_fail_decw_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_back_fail_keep_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_equal_back_fail_keep_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_back_fail_wrap_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_back_fail_decw_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_back_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_back_fail_repl_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_back_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_back_fail_incc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_back_fail_zero_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_always_back_fail_repl_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_not_equal_back_fail_decw_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_not_equal_back_fail_repl_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_never_back_fail_repl_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_back_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_never_back_fail_decc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_always_back_fail_keep_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_back_fail_wrap_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_always_back_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_back_fail_decw_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_not_equal_back_fail_repl_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_not_equal_back_fail_repl_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_never_back_fail_wrap_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_back_fail_keep_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_always_back_fail_repl_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_back_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_not_equal_back_fail_decc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_back_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_always_back_fail_incc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_always_back_fail_inv_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_always_back_fail_repl_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_equal_back_fail_decc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal_back_fail_zero_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_back_fail_decc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_back_fail_decw_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_not_equal_back_fail_inv_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_back_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_always_back_fail_wrap_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_equal_back_fail_decc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_back_fail_zero_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_never_back_fail_zero_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_always_back_fail_inv_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_back_fail_decw_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_back_fail_incc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_never_back_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_back_fail_wrap_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_back_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_always_back_fail_repl_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_not_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_equal_back_fail_decc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_equal_back_fail_decc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_back_fail_incc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_equal_back_fail_repl_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_not_equal_back_fail_repl_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_back_fail_decw_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_not_equal_back_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_back_fail_keep_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_back_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_always_back_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_back_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_equal_back_fail_decw_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_not_equal_back_fail_incc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_always_back_fail_wrap_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_back_fail_decw_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_always_back_fail_repl_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_never_back_fail_wrap_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_always_back_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_always_back_fail_wrap_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_equal_back_fail_decw_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_always_back_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_back_fail_decw_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_not_equal_back_fail_inv_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_always_back_fail_zero_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_always_back_fail_incc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_not_equal_back_fail_inv_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_back_fail_wrap_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_always_back_fail_wrap_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_back_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_not_equal_back_fail_incc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_not_equal_back_fail_repl_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_not_equal_back_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_back_fail_wrap_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_or_equal_back_fail_repl_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_always_back_fail_wrap_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_never_back_fail_decc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_equal_back_fail_incc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_always_back_fail_zero_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_back_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_always_back_fail_inv_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_never_back_fail_decc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_equal_back_fail_repl_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_back_fail_keep_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_back_fail_inv_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_not_equal_back_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_back_fail_decc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_back_fail_decc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_not_equal_back_fail_decc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_not_equal_back_fail_inv_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_never_back_fail_repl_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_always_back_fail_repl_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_equal_back_fail_keep_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_back_fail_keep_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_back_fail_repl_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_always_back_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_equal_back_fail_wrap_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_equal_back_fail_keep_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_always_back_fail_keep_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_always_back_fail_incc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_always_back_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_always_back_fail_inv_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_equal_back_fail_keep_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_back_fail_keep_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_back_fail_wrap_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_back_fail_incc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_equal_back_fail_repl_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_always_back_fail_repl_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_never_back_fail_repl_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_back_fail_decw_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_back_fail_decc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_always_back_fail_zero_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_not_equal_back_fail_repl_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_always_back_fail_repl_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_never_back_fail_keep_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_equal_back_fail_zero_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_equal_back_fail_zero_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_back_fail_wrap_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_never_back_fail_inv_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_back_fail_keep_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_equal_back_fail_incc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_never_back_fail_wrap_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_never_back_fail_repl_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_back_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_never_back_fail_decc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_equal_back_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_back_fail_keep_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_never_back_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_equal_back_fail_wrap_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_not_equal_back_fail_decw_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_not_equal_back_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_always_back_fail_incc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_not_equal_back_fail_repl_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_equal_back_fail_inv_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_back_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_equal_back_fail_decc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_back_fail_incc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_back_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_back_fail_inv_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_never_back_fail_repl_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_not_equal_back_fail_decw_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_never_back_fail_decc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_back_fail_zero_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_back_fail_repl_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_back_fail_wrap_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_back_fail_repl_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_equal_back_fail_incc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_always_back_fail_wrap_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_not_equal_back_fail_decw_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_back_fail_incc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_back_fail_wrap_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_always_back_fail_wrap_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_never_back_fail_wrap_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_not_equal_back_fail_repl_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_back_fail_keep_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_back_fail_zero_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_always_back_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_not_equal_back_fail_keep_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_back_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_always_back_fail_repl_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_equal_back_fail_decw_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_always_back_fail_keep_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_equal_back_fail_decc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_never_back_fail_keep_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_not_equal_back_fail_zero_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_always_back_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_equal_back_fail_inv_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_not_equal_back_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_never_back_fail_keep_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_always_back_fail_repl_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_equal_back_fail_incc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_equal_back_fail_keep_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_back_fail_decc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_back_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_not_equal_back_fail_decc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_back_fail_decc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_equal_back_fail_wrap_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_not_equal_back_fail_keep_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_never_back_fail_keep_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_never_back_fail_repl_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_back_fail_decw_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_always_back_fail_incc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_back_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_always_back_fail_incc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_back_fail_repl_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_not_equal_back_fail_decw_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_back_fail_wrap_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_never_back_fail_keep_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_always_back_fail_keep_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_never_back_fail_wrap_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_always_back_fail_zero_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_never_back_fail_decw_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_never_back_fail_decc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_never_back_fail_keep_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_never_back_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_not_equal_back_fail_zero_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_back_fail_incc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_never_back_fail_zero_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_back_fail_zero_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_equal_back_fail_keep_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_back_fail_inv_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_never_back_fail_inv_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_equal_back_fail_zero_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_back_fail_wrap_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_not_equal_back_fail_incc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_not_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_not_equal_back_fail_inv_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_always_back_fail_zero_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_always_back_fail_zero_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_not_equal_back_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_back_fail_keep_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_back_fail_decc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_back_fail_keep_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_back_fail_inv_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_equal_back_fail_repl_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_equal_back_fail_zero_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_back_fail_decw_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_back_fail_keep_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_equal_back_fail_keep_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_back_fail_wrap_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_back_fail_decw_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_back_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_back_fail_repl_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_back_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_back_fail_incc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_back_fail_zero_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_always_back_fail_repl_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_not_equal_back_fail_decw_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_not_equal_back_fail_repl_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_never_back_fail_repl_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_back_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_never_back_fail_decc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_always_back_fail_keep_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_back_fail_wrap_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_always_back_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_back_fail_decw_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_not_equal_back_fail_repl_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_not_equal_back_fail_repl_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_never_back_fail_wrap_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_back_fail_keep_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_always_back_fail_repl_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_back_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_not_equal_back_fail_decc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_back_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_always_back_fail_incc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_always_back_fail_inv_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_always_back_fail_repl_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_equal_back_fail_decc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal_back_fail_zero_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_back_fail_decc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_back_fail_decw_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_not_equal_back_fail_inv_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_back_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_always_back_fail_wrap_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_equal_back_fail_decc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_back_fail_zero_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_never_back_fail_zero_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_always_back_fail_inv_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_back_fail_decw_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_back_fail_incc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_never_back_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_back_fail_wrap_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_back_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_always_back_fail_repl_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_not_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_equal_back_fail_decc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_equal_back_fail_decc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_back_fail_incc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_equal_back_fail_repl_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_not_equal_back_fail_repl_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_back_fail_decw_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_not_equal_back_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_back_fail_keep_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_back_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_always_back_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_back_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_equal_back_fail_decw_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_not_equal_back_fail_incc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_always_back_fail_wrap_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_back_fail_decw_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_always_back_fail_repl_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_never_back_fail_wrap_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_always_back_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_always_back_fail_wrap_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_equal_back_fail_decw_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_always_back_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_back_fail_decw_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_not_equal_back_fail_inv_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_always_back_fail_zero_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_always_back_fail_incc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_not_equal_back_fail_inv_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_back_fail_wrap_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_always_back_fail_wrap_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_back_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_not_equal_back_fail_incc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_not_equal_back_fail_repl_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_not_equal_back_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_back_fail_wrap_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_or_equal_back_fail_repl_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_always_back_fail_wrap_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_never_back_fail_decc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_equal_back_fail_incc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_always_back_fail_zero_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_back_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_always_back_fail_inv_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_never_back_fail_decc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_equal_back_fail_repl_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_back_fail_keep_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_back_fail_inv_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_not_equal_back_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_back_fail_decc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_back_fail_decc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_not_equal_back_fail_decc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_not_equal_back_fail_inv_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_never_back_fail_repl_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_always_back_fail_repl_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_equal_back_fail_keep_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_back_fail_keep_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_back_fail_repl_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_always_back_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_equal_back_fail_wrap_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_equal_back_fail_keep_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_always_back_fail_keep_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_always_back_fail_incc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_always_back_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_always_back_fail_inv_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_equal_back_fail_keep_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_back_fail_keep_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_back_fail_wrap_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_back_fail_incc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_equal_back_fail_repl_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_always_back_fail_repl_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_never_back_fail_repl_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_back_fail_decw_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_back_fail_decc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_always_back_fail_zero_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_not_equal_back_fail_repl_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_always_back_fail_repl_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_never_back_fail_keep_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_equal_back_fail_zero_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_equal_back_fail_zero_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_back_fail_wrap_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_never_back_fail_inv_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_back_fail_keep_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_equal_back_fail_incc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_never_back_fail_wrap_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_never_back_fail_repl_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_back_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_never_back_fail_decc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_equal_back_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_back_fail_keep_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_never_back_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_equal_back_fail_wrap_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_not_equal_back_fail_decw_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_not_equal_back_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_always_back_fail_incc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_not_equal_back_fail_repl_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_equal_back_fail_inv_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_back_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_equal_back_fail_decc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_back_fail_incc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_back_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_back_fail_inv_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_never_back_fail_repl_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_not_equal_back_fail_decw_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_never_back_fail_decc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_back_fail_zero_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_back_fail_repl_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_back_fail_wrap_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_back_fail_repl_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_equal_back_fail_incc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_always_back_fail_wrap_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_not_equal_back_fail_decw_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_back_fail_incc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_back_fail_wrap_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_always_back_fail_wrap_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_never_back_fail_wrap_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_not_equal_back_fail_repl_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d16_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_back_fail_keep_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_back_fail_zero_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_always_back_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_not_equal_back_fail_keep_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_back_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_always_back_fail_repl_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_equal_back_fail_decw_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_always_back_fail_keep_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_equal_back_fail_decc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_never_back_fail_keep_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_not_equal_back_fail_zero_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_always_back_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_equal_back_fail_inv_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_not_equal_back_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_never_back_fail_keep_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_always_back_fail_repl_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_equal_back_fail_incc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_equal_back_fail_keep_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_back_fail_decc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_back_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_not_equal_back_fail_decc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_back_fail_decc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_equal_back_fail_wrap_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_not_equal_back_fail_keep_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_never_back_fail_keep_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_never_back_fail_repl_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_back_fail_decw_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_always_back_fail_incc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_back_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_always_back_fail_incc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_back_fail_repl_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_not_equal_back_fail_decw_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_back_fail_wrap_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_never_back_fail_keep_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_always_back_fail_keep_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_never_back_fail_wrap_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_always_back_fail_zero_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_never_back_fail_decw_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_never_back_fail_decc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_never_back_fail_keep_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_never_back_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_not_equal_back_fail_zero_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_back_fail_incc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_never_back_fail_zero_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_back_fail_zero_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_equal_back_fail_keep_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_back_fail_inv_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_never_back_fail_inv_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_equal_back_fail_zero_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_back_fail_wrap_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_not_equal_back_fail_incc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_not_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_not_equal_back_fail_inv_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_always_back_fail_zero_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_always_back_fail_zero_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_not_equal_back_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_back_fail_keep_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_back_fail_decc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_back_fail_keep_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_back_fail_inv_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_equal_back_fail_repl_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_equal_back_fail_zero_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_back_fail_decw_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_back_fail_keep_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_equal_back_fail_keep_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_back_fail_wrap_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_back_fail_decw_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_back_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_back_fail_repl_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_back_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_back_fail_incc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_back_fail_zero_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_always_back_fail_repl_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_not_equal_back_fail_decw_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_not_equal_back_fail_repl_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_never_back_fail_repl_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_back_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_never_back_fail_decc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_always_back_fail_keep_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_back_fail_wrap_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_always_back_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_back_fail_decw_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_not_equal_back_fail_repl_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_not_equal_back_fail_repl_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_never_back_fail_wrap_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_back_fail_keep_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_always_back_fail_repl_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_back_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_not_equal_back_fail_decc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_back_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_always_back_fail_incc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_always_back_fail_inv_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_always_back_fail_repl_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_equal_back_fail_decc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal_back_fail_zero_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_back_fail_decc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_back_fail_decw_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_not_equal_back_fail_inv_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_back_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_always_back_fail_wrap_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_equal_back_fail_decc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_back_fail_zero_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_never_back_fail_zero_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_always_back_fail_inv_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_back_fail_decw_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_back_fail_incc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_never_back_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_back_fail_wrap_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_back_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_always_back_fail_repl_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_not_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_equal_back_fail_decc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_equal_back_fail_decc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_back_fail_incc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_equal_back_fail_repl_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_not_equal_back_fail_repl_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_back_fail_decw_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_not_equal_back_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_back_fail_keep_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_back_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_always_back_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_back_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_equal_back_fail_decw_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_not_equal_back_fail_incc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_always_back_fail_wrap_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_back_fail_decw_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_always_back_fail_repl_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_never_back_fail_wrap_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_always_back_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_always_back_fail_wrap_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_equal_back_fail_decw_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_always_back_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_back_fail_decw_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_not_equal_back_fail_inv_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_always_back_fail_zero_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_always_back_fail_incc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_not_equal_back_fail_inv_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_back_fail_wrap_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_always_back_fail_wrap_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_back_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_not_equal_back_fail_incc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_not_equal_back_fail_repl_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_not_equal_back_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_back_fail_wrap_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_or_equal_back_fail_repl_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_always_back_fail_wrap_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_never_back_fail_decc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_equal_back_fail_incc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_always_back_fail_zero_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_back_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_always_back_fail_inv_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_never_back_fail_decc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_equal_back_fail_repl_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_back_fail_keep_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_back_fail_inv_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_not_equal_back_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_back_fail_decc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_back_fail_decc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_not_equal_back_fail_decc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_not_equal_back_fail_inv_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_never_back_fail_repl_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_always_back_fail_repl_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_equal_back_fail_keep_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_back_fail_keep_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_back_fail_repl_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_always_back_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_equal_back_fail_wrap_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_equal_back_fail_keep_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_always_back_fail_keep_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_always_back_fail_incc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_always_back_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_always_back_fail_inv_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_equal_back_fail_keep_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_back_fail_keep_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_back_fail_wrap_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_back_fail_incc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_equal_back_fail_repl_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_always_back_fail_repl_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_never_back_fail_repl_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_back_fail_decw_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_back_fail_decc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_always_back_fail_zero_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_not_equal_back_fail_repl_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_always_back_fail_repl_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_never_back_fail_keep_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_equal_back_fail_zero_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_equal_back_fail_zero_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_back_fail_wrap_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_never_back_fail_inv_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_back_fail_keep_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_equal_back_fail_incc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_never_back_fail_wrap_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_never_back_fail_repl_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_back_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_never_back_fail_decc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_equal_back_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_back_fail_keep_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_never_back_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_equal_back_fail_wrap_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_not_equal_back_fail_decw_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_not_equal_back_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_always_back_fail_incc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_not_equal_back_fail_repl_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_equal_back_fail_inv_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_back_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_equal_back_fail_decc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_back_fail_incc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_back_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_back_fail_inv_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_never_back_fail_repl_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_not_equal_back_fail_decw_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_never_back_fail_decc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_back_fail_zero_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_back_fail_repl_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_back_fail_wrap_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_back_fail_repl_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_equal_back_fail_incc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_always_back_fail_wrap_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_not_equal_back_fail_decw_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_back_fail_incc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_back_fail_wrap_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_always_back_fail_wrap_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_never_back_fail_wrap_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_not_equal_back_fail_repl_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d24_unorm_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_back_fail_keep_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_back_fail_zero_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_always_back_fail_decc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_not_equal_back_fail_keep_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_back_fail_incc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_always_back_fail_repl_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_equal_back_fail_decw_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_always_back_fail_keep_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_equal_back_fail_decc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_never_back_fail_keep_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_not_equal_back_fail_zero_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_always_back_fail_incc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_equal_back_fail_inv_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_not_equal_back_fail_decw_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_never_back_fail_keep_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_always_back_fail_repl_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_always_back_fail_incc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_back_fail_wrap_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_equal_back_fail_incc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_not_equal_back_fail_wrap_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_equal_back_fail_keep_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_back_fail_decc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_back_fail_keep_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_not_equal_back_fail_decc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_always_back_fail_keep_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_back_fail_inv_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_back_fail_decc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_less_back_fail_zero_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_equal_back_fail_wrap_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_not_equal_back_fail_keep_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_never_back_fail_keep_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_always_back_fail_wrap_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_never_back_fail_repl_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_back_fail_decw_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_always_back_fail_incc_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_back_fail_keep_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_always_back_fail_incc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_equal_back_fail_inv_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_never_back_fail_inv_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_greater_back_fail_repl_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_not_equal_back_fail_decw_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_back_fail_wrap_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_or_equal_back_fail_repl_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_equal_back_fail_decc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_always_back_fail_wrap_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_never_back_fail_keep_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_always_back_fail_keep_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_never_back_fail_wrap_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_always_back_fail_zero_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_equal_back_fail_keep_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_never_back_fail_decw_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_not_equal_back_fail_repl_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_never_back_fail_decc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_never_back_fail_keep_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_back_fail_repl_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_never_back_fail_wrap_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_not_equal_back_fail_zero_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_not_equal_back_fail_decw_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_back_fail_incc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_not_equal_back_fail_inv_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_back_fail_zero_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_always_back_fail_repl_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_greater_back_fail_decc_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_always_back_fail_inv_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_always_back_fail_keep_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_never_back_fail_zero_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_back_fail_inv_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_greater_back_fail_decc_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_never_back_fail_keep_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_back_fail_zero_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_back_fail_incc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_equal_back_fail_keep_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_less_back_fail_incc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_back_fail_inv_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_back_fail_inv_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_back_fail_incc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_never_back_fail_inv_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_always_back_fail_decw_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_never_back_fail_decc_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_equal_back_fail_zero_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_equal_back_fail_incc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_back_fail_wrap_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_greater_back_fail_zero_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_back_fail_decw_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_equal_back_fail_wrap_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_back_fail_wrap_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_not_equal_back_fail_incc_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_not_equal_back_fail_decw_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_not_equal_back_fail_inv_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_always_back_fail_keep_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_not_equal_back_fail_decw_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_always_back_fail_zero_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_equal_back_fail_wrap_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_always_back_fail_zero_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_not_equal_back_fail_zero_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_always_back_fail_zero_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_not_equal_back_fail_incc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_less_back_fail_keep_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_never_back_fail_repl_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_not_equal_back_fail_zero_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_back_fail_decc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_always_back_fail_decw_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_greater_back_fail_keep_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_back_fail_inv_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_equal_back_fail_repl_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_equal_back_fail_decw_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_equal_back_fail_zero_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_back_fail_decw_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_always_back_fail_inv_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_always_back_fail_wrap_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_back_fail_incc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_greater_back_fail_wrap_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_always_back_fail_incc_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_back_fail_keep_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_back_fail_wrap_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_equal_back_fail_keep_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_back_fail_wrap_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_not_equal_back_fail_incc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_back_fail_incc_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_greater_back_fail_decw_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_always_back_fail_inv_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_equal_back_fail_repl_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_never_back_fail_repl_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_equal_back_fail_repl_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_less_back_fail_repl_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_equal_back_fail_decw_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_not_equal_back_fail_decw_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_back_fail_repl_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_equal_back_fail_incc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_back_fail_repl_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_less_back_fail_incc_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_back_fail_zero_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal_back_fail_incc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_always_back_fail_decw_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_greater_back_fail_decw_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_always_back_fail_repl_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_not_equal_back_fail_decw_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_not_equal_back_fail_repl_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_always_back_fail_decw_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_never_back_fail_inv_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_never_back_fail_decc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_greater_back_fail_incc_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_never_back_fail_keep_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_back_fail_inv_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_never_back_fail_inv_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_equal_back_fail_incc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_always_back_fail_zero_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_never_back_fail_incc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_equal_back_fail_keep_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_never_back_fail_repl_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_less_back_fail_incc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_never_back_fail_zero_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_back_fail_wrap_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_greater_back_fail_zero_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_always_back_fail_inv_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_never_back_fail_decc_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_greater_back_fail_wrap_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_always_back_fail_keep_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_not_equal_back_fail_zero_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_greater_back_fail_wrap_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_always_back_fail_decw_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_less_back_fail_wrap_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_always_back_fail_inv_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_back_fail_decw_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_back_fail_repl_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_not_equal_back_fail_repl_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_back_fail_keep_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_or_equal_back_fail_zero_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_never_back_fail_decw_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_less_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_not_equal_back_fail_repl_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_never_back_fail_wrap_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_not_equal_back_fail_inv_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_always_back_fail_keep_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_always_back_fail_incc_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_back_fail_keep_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_always_back_fail_repl_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_always_back_fail_decc_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_less_back_fail_wrap_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_greater_back_fail_keep_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_equal_back_fail_repl_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_not_equal_back_fail_decc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_back_fail_wrap_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_always_back_fail_decw_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_never_back_fail_repl_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_back_fail_incc_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_always_back_fail_incc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_never_back_fail_decw_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_always_back_fail_inv_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_always_back_fail_repl_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_back_fail_incc_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_not_equal_back_fail_zero_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_equal_back_fail_repl_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_always_back_fail_zero_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_never_back_fail_decw_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_equal_back_fail_decc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_back_fail_decc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_never_back_fail_keep_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_back_fail_inv_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal_back_fail_zero_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_back_fail_wrap_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_greater_back_fail_keep_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_always_back_fail_incc_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_greater_back_fail_decc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_greater_back_fail_decw_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_not_equal_back_fail_keep_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_always_back_fail_repl_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_not_equal_back_fail_decc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_equal_back_fail_decc_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_greater_back_fail_decw_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_not_equal_back_fail_inv_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_never_back_fail_decc_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_back_fail_zero_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_not_equal_back_fail_inv_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_back_fail_wrap_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_not_equal_back_fail_decw_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_not_equal_back_fail_repl_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_back_fail_decc_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_always_back_fail_wrap_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_greater_back_fail_wrap_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_equal_back_fail_decc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_greater_back_fail_zero_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_always_back_fail_zero_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_never_back_fail_zero_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_equal_back_fail_inv_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_always_back_fail_inv_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_back_fail_zero_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal_back_fail_zero_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_not_equal_back_fail_wrap_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_less_back_fail_repl_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_not_equal_back_fail_zero_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_back_fail_decw_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_greater_back_fail_incc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_never_back_fail_repl_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_equal_back_fail_keep_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_not_equal_back_fail_incc_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_less_or_equal_back_fail_repl_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_never_back_fail_zero_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_always_back_fail_decc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_not_equal_back_fail_keep_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_back_fail_repl_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_never_back_fail_wrap_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_or_equal_back_fail_repl_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_never_back_fail_incc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_never_back_fail_zero_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_greater_back_fail_inv_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_less_back_fail_zero_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_always_back_fail_inv_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_less_or_equal_back_fail_keep_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_always_back_fail_inv_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_back_fail_wrap_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decw_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_always_back_fail_decw_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_not_equal_back_fail_inv_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_not_equal_back_fail_inv_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_back_fail_decc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_back_fail_incc_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_always_back_fail_inv_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_not_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_always_back_fail_decw_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_never_back_fail_incc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_less_back_fail_repl_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_equal_back_fail_repl_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_less_back_fail_wrap_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_back_fail_repl_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_always_back_fail_repl_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_never_back_fail_decc_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_never_back_fail_repl_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_not_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_back_fail_wrap_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_not_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_equal_back_fail_decc_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_never_back_fail_incc_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_never_back_fail_incc_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_incc_comp_equal_back_fail_wrap_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_equal_back_fail_decc_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_less_back_fail_incc_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_never_back_fail_zero_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_less_back_fail_repl_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_less_back_fail_wrap_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_less_back_fail_repl_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_always_back_fail_wrap_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_equal_back_fail_repl_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_incc_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_less_back_fail_zero_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_back_fail_inv_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_not_equal_back_fail_keep_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_equal_back_fail_incc_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_not_equal_back_fail_decc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_greater_back_fail_decw_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_not_equal_back_fail_inv_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_equal_back_fail_decw_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_not_equal_back_fail_incc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_never_back_fail_incc_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_greater_back_fail_zero_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_less_back_fail_keep_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_not_equal_back_fail_repl_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_always_back_fail_zero_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_always_back_fail_wrap_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_equal_back_fail_repl_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_not_equal_back_fail_incc_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_less_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_back_fail_repl_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_never_back_fail_zero_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_never_back_fail_decc_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_inv_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_back_fail_zero_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_not_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_back_fail_keep_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_always_back_fail_decc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_not_equal_back_fail_zero_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_not_equal_back_fail_keep_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_less_back_fail_decw_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_back_fail_incc_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_less_back_fail_decc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_never_back_fail_decw_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_always_back_fail_decc_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_not_equal_back_fail_keep_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_always_back_fail_repl_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_never_back_fail_wrap_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_never_back_fail_keep_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_greater_back_fail_zero_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_not_equal_back_fail_inv_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_less_back_fail_keep_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_not_equal_back_fail_inv_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_equal_back_fail_decw_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_not_equal_back_fail_wrap_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_equal_back_fail_wrap_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_back_fail_inv_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_not_equal_back_fail_decc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_always_back_fail_decc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_back_fail_keep_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_greater_back_fail_decc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_back_fail_zero_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_not_equal_back_fail_incc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_not_equal_back_fail_zero_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_less_back_fail_decc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_not_equal_back_fail_incc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_always_back_fail_keep_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_incc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_not_equal_back_fail_zero_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_back_fail_repl_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_always_back_fail_repl_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_back_fail_incc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_less_back_fail_keep_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_never_back_fail_repl_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_always_back_fail_decw_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_greater_back_fail_inv_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_always_back_fail_decw_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decw_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_equal_back_fail_decw_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_keep_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_not_equal_back_fail_incc_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_equal_back_fail_decc_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_less_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_always_back_fail_wrap_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_less_back_fail_wrap_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_not_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_always_back_fail_decc_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_always_back_fail_inv_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_back_fail_zero_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_wrap_comp_equal_back_fail_incc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_decw_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_less_back_fail_decw_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_back_fail_keep_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_not_equal_back_fail_decc_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_zero_pass_repl_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_never_back_fail_wrap_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_greater_back_fail_decw_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_greater_back_fail_incc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decc_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_back_fail_decw_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_always_back_fail_decc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_always_back_fail_repl_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_decc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_not_equal_back_fail_decw_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_never_back_fail_wrap_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_never_back_fail_incc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_never_back_fail_decw_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_always_back_fail_repl_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_not_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_equal_back_fail_zero_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_always_back_fail_wrap_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_equal_back_fail_decc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_never_back_fail_wrap_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_incc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_always_back_fail_keep_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_greater_back_fail_decc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_greater_back_fail_wrap_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_always_back_fail_decw_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_greater_back_fail_inv_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_never_back_fail_zero_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_less_back_fail_decw_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_equal_back_fail_decw_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_not_equal_back_fail_keep_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_not_equal_back_fail_decw_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_equal_back_fail_keep_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_incc_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_never_back_fail_zero_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_always_back_fail_incc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_equal_back_fail_decc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_greater_back_fail_decw_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_less_back_fail_keep_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_never_back_fail_zero_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_back_fail_inv_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_always_back_fail_keep_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_less_back_fail_decw_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_less_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_back_fail_zero_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_never_back_fail_zero_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_equal_back_fail_zero_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_greater_back_fail_incc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_not_equal_back_fail_repl_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_less_back_fail_decw_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_not_equal_back_fail_wrap_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_not_equal_back_fail_decc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_equal_back_fail_keep_pass_decw_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_never_back_fail_wrap_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal_back_fail_keep_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_equal_back_fail_wrap_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_greater_back_fail_keep_pass_decw_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_less_back_fail_decc_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_incc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_never_back_fail_zero_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_less_back_fail_incc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_equal_back_fail_keep_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_not_equal_back_fail_inv_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_always_back_fail_zero_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_greater_back_fail_wrap_pass_keep_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_less_back_fail_incc_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_back_fail_repl_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_less_back_fail_repl_pass_inv_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_always_back_fail_inv_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_equal_back_fail_wrap_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_equal_back_fail_incc_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_equal_back_fail_zero_pass_decw_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_not_equal_back_fail_repl_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_equal_back_fail_keep_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_always_back_fail_inv_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_or_equal_back_fail_keep_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_always_back_fail_incc_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_less_back_fail_keep_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_not_equal_back_fail_inv_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_less_back_fail_decw_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_back_fail_decw_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_greater_back_fail_wrap_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal_back_fail_repl_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_back_fail_decc_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_greater_back_fail_repl_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_zero_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_not_equal_back_fail_keep_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_not_equal_back_fail_repl_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_not_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_less_back_fail_decw_pass_repl_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_back_fail_wrap_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_always_back_fail_wrap_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_back_fail_incc_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_not_equal_back_fail_incc_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_less_back_fail_zero_pass_wrap_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_equal_back_fail_zero_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_not_equal_back_fail_incc_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_not_equal_back_fail_repl_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_not_equal_back_fail_inv_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_always_back_fail_decc_pass_repl_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_greater_back_fail_inv_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_back_fail_wrap_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_less_back_fail_incc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_repl_comp_less_or_equal_back_fail_incc_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_less_back_fail_repl_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_not_equal_back_fail_zero_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_repl_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_back_fail_decw_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_never_back_fail_decw_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decc_comp_greater_back_fail_zero_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_less_or_equal_back_fail_zero_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_equal_back_fail_keep_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_back_fail_decw_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_less_back_fail_keep_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_greater_back_fail_incc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_not_equal_back_fail_decw_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_back_fail_decw_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_not_equal_back_fail_zero_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_not_equal_back_fail_wrap_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_never_back_fail_zero_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_equal_back_fail_decw_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_less_or_equal_back_fail_repl_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_equal_back_fail_decw_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decw_comp_always_back_fail_wrap_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_equal_back_fail_decw_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_always_back_fail_inv_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_or_equal_back_fail_incc_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_less_back_fail_decw_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_never_back_fail_inv_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_greater_back_fail_wrap_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_not_equal_back_fail_decw_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_not_equal_back_fail_repl_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_never_back_fail_keep_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_greater_back_fail_wrap_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_never_back_fail_decc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_equal_back_fail_incc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_always_back_fail_incc_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_less_back_fail_inv_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_or_equal_back_fail_inv_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_always_back_fail_zero_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_greater_back_fail_incc_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_less_back_fail_decc_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_equal_back_fail_wrap_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_not_equal_back_fail_inv_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_less_back_fail_incc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_keep_comp_less_back_fail_inv_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_not_equal_back_fail_decc_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_never_back_fail_decc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_greater_back_fail_repl_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_not_equal_back_fail_zero_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_back_fail_decw_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_not_equal_back_fail_repl_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_always_back_fail_repl_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_always_back_fail_decc_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_never_back_fail_keep_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_equal_back_fail_keep_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_never_back_fail_decw_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_not_equal_back_fail_repl_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_back_fail_decc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_less_or_equal_back_fail_zero_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_back_fail_decc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_back_fail_zero_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_never_back_fail_decc_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_not_equal_back_fail_decc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_always_back_fail_inv_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_not_equal_back_fail_decc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decc_comp_less_back_fail_decw_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_back_fail_zero_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_equal_back_fail_decc_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decw_comp_never_back_fail_decc_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_not_equal_back_fail_decc_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_greater_back_fail_wrap_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_always_back_fail_wrap_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_never_back_fail_decw_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_always_back_fail_decc_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_back_fail_inv_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_incc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_equal_back_fail_decw_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_equal_back_fail_repl_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_incc_comp_equal_back_fail_wrap_pass_decc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_greater_back_fail_keep_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decc_comp_never_back_fail_repl_pass_decc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_decc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_equal_back_fail_decc_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_equal_back_fail_inv_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_not_equal_back_fail_keep_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_incc_comp_less_or_equal_back_fail_keep_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_back_fail_inv_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_equal_back_fail_zero_pass_wrap_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal_back_fail_keep_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_equal_back_fail_wrap_pass_repl_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_zero_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_not_equal_back_fail_keep_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_less_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_greater_back_fail_decw_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_equal_back_fail_inv_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_back_fail_decc_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_less_back_fail_inv_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_greater_back_fail_zero_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_back_fail_decc_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_never_back_fail_keep_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_back_fail_inv_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_or_equal_back_fail_zero_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_not_equal_back_fail_zero_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_equal_back_fail_incc_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_always_back_fail_inv_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decw_comp_less_back_fail_keep_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_greater_back_fail_repl_pass_decw_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_greater_back_fail_repl_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_not_equal_back_fail_repl_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_greater_back_fail_inv_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decc_comp_not_equal_back_fail_wrap_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_inv_comp_greater_back_fail_decw_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_never_back_fail_inv_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_incc_comp_greater_back_fail_inv_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_less_back_fail_decc_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_not_equal_back_fail_zero_pass_wrap_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_wrap_comp_never_back_fail_wrap_pass_keep_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_not_equal_back_fail_zero_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_not_equal_back_fail_inv_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_zero_comp_never_back_fail_keep_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_decc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal_back_fail_incc_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_always_back_fail_keep_pass_repl_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_equal_back_fail_keep_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_equal_back_fail_wrap_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_not_equal_back_fail_decc_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_equal_back_fail_wrap_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_equal_back_fail_repl_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_or_equal_back_fail_zero_pass_incc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_not_equal_back_fail_keep_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_zero_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_or_equal_back_fail_keep_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_decw_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_less_back_fail_zero_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_not_equal_back_fail_inv_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_equal_back_fail_incc_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_not_equal_back_fail_inv_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_less_or_equal_back_fail_incc_pass_zero_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_equal_back_fail_repl_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_always_back_fail_incc_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_less_back_fail_incc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_greater_back_fail_inv_pass_repl_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decw_comp_always_back_fail_decc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_back_fail_zero_pass_keep_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_not_equal_back_fail_inv_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_incc_comp_equal_back_fail_keep_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_keep_comp_always_back_fail_repl_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_not_equal_back_fail_inv_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decw_comp_greater_back_fail_decc_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_equal_back_fail_inv_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_keep_comp_not_equal_back_fail_decw_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_equal_back_fail_inv_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_not_equal_back_fail_repl_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_wrap_comp_never_back_fail_repl_pass_zero_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_equal_back_fail_keep_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_less_back_fail_decw_pass_keep_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_equal_back_fail_decc_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_zero_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_equal_back_fail_zero_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_equal_back_fail_keep_pass_keep_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_less_back_fail_wrap_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_never_back_fail_incc_pass_keep_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_not_equal_back_fail_keep_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_keep_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_not_equal_back_fail_repl_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_less_back_fail_repl_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_or_equal_back_fail_zero_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_less_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_always_back_fail_repl_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_repl_comp_less_back_fail_zero_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_back_fail_inv_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_repl_comp_always_back_fail_decw_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_equal_back_fail_repl_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_always_back_fail_keep_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_greater_back_fail_decw_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_always_back_fail_decw_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_keep_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_incc_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_equal_back_fail_keep_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_never_back_fail_keep_pass_inv_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_less_back_fail_repl_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_greater_back_fail_inv_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_less_back_fail_decc_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_greater_back_fail_keep_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_wrap_comp_equal_back_fail_incc_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal_back_fail_decc_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_equal_back_fail_wrap_pass_zero_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_not_equal_back_fail_inv_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_wrap_comp_not_equal_back_fail_wrap_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_less_back_fail_zero_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_repl_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_always_back_fail_incc_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_repl_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_equal_back_fail_incc_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_less_back_fail_keep_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_or_equal_back_fail_inv_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decc_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decc_comp_not_equal_back_fail_decc_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_less_back_fail_repl_pass_wrap_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_greater_back_fail_repl_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_wrap_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_greater_back_fail_zero_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_never_back_fail_repl_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_never_back_fail_inv_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_keep_comp_less_back_fail_decw_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_not_equal_back_fail_keep_pass_incc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_always_back_fail_keep_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_back_fail_decc_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_equal_back_fail_wrap_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_zero_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_incc_comp_never_back_fail_incc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal_back_fail_incc_pass_decw_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_not_equal_back_fail_decc_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_always_back_fail_wrap_pass_keep_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_equal_back_fail_decw_pass_keep_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_wrap_comp_equal_back_fail_keep_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_greater_back_fail_keep_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_always_back_fail_decw_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_always_back_fail_decc_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_less_back_fail_zero_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_never_back_fail_zero_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_always_back_fail_keep_pass_wrap_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_less_back_fail_decw_pass_decc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_zero_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_wrap_comp_never_back_fail_incc_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_keep_comp_always_back_fail_incc_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal_back_fail_decc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_always_back_fail_wrap_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_always_back_fail_inv_pass_wrap_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_never_back_fail_zero_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_greater_back_fail_keep_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_keep_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_repl_comp_equal_back_fail_inv_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_or_equal_back_fail_decc_pass_inv_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_equal_back_fail_decc_pass_decc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_equal_back_fail_decc_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_equal_back_fail_keep_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_greater_back_fail_keep_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_never_back_fail_keep_pass_repl_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_not_equal_back_fail_decw_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_not_equal_back_fail_keep_pass_decc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_decc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_inv_comp_less_back_fail_keep_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_back_fail_zero_pass_incc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decw_comp_not_equal_back_fail_decw_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decw_comp_always_back_fail_zero_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_keep_comp_less_back_fail_decc_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decc_comp_not_equal_back_fail_keep_pass_inv_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_decw_comp_less_back_fail_repl_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_less_or_equal_back_fail_zero_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_repl_comp_less_back_fail_incc_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_or_equal_back_fail_zero_pass_repl_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_always_back_fail_decw_pass_decw_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_always_back_fail_decc_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_back_fail_decc_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_not_equal_back_fail_keep_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_less_back_fail_wrap_pass_wrap_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_keep_comp_always_back_fail_zero_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_never_back_fail_keep_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_equal_back_fail_incc_pass_zero_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_greater_back_fail_incc_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_inv_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_never_back_fail_decc_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_wrap_comp_equal_back_fail_repl_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_never_back_fail_decc_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_incc_comp_less_or_equal_back_fail_decw_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_equal_back_fail_inv_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_incc_comp_equal_back_fail_repl_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_always_back_fail_inv_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_equal_back_fail_repl_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_always_back_fail_repl_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_decc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_back_fail_inv_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_greater_back_fail_incc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_never_back_fail_inv_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_never_back_fail_wrap_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decw_comp_less_back_fail_decw_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_always_back_fail_decc_pass_decc_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_back_fail_inv_pass_zero_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal_back_fail_decc_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_equal_back_fail_incc_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_incc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_not_equal_back_fail_incc_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_never_back_fail_wrap_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_keep_comp_never_back_fail_incc_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_never_back_fail_decc_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_never_back_fail_repl_pass_incc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_always_back_fail_repl_pass_inv_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_greater_back_fail_zero_pass_incc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_wrap_comp_greater_back_fail_wrap_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_never_back_fail_decc_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_zero_comp_always_back_fail_zero_pass_inv_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_repl_comp_not_equal_back_fail_wrap_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decw_comp_never_back_fail_zero_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_not_equal_back_fail_incc_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_never_back_fail_zero_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_zero_comp_less_back_fail_keep_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_always_back_fail_decc_pass_wrap_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_back_fail_decc_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_greater_back_fail_decw_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_less_back_fail_decc_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_greater_back_fail_inv_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_always_back_fail_zero_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_not_equal_back_fail_repl_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_greater_back_fail_keep_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_always_back_fail_decc_pass_keep_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_less_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_never_back_fail_wrap_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_decc_comp_less_or_equal_back_fail_inv_pass_zero_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_equal_back_fail_incc_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_always_back_fail_zero_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_less_back_fail_repl_pass_zero_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_always_back_fail_repl_pass_wrap_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_equal_back_fail_zero_pass_repl_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_incc_comp_never_back_fail_keep_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_decw_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decw_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_less_or_equal_back_fail_repl_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_never_back_fail_zero_pass_incc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_inv_comp_equal_back_fail_keep_pass_zero_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_less_back_fail_wrap_pass_repl_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_always_back_fail_decw_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_less_or_equal_back_fail_inv_pass_zero_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_greater_back_fail_keep_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_equal_back_fail_zero_pass_keep_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decw_comp_always_back_fail_decc_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_not_equal_back_fail_repl_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_always_back_fail_repl_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_not_equal_back_fail_zero_pass_inv_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decw_comp_not_equal_back_fail_keep_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_zero_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_decw_comp_never_back_fail_decw_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_decc_comp_equal_back_fail_zero_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_not_equal_back_fail_keep_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_never_back_fail_wrap_pass_inv_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decc_comp_less_back_fail_decc_pass_decc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_equal_back_fail_decw_pass_repl_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_equal_back_fail_keep_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_zero_comp_not_equal_back_fail_decc_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_zero_comp_never_back_fail_decw_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_repl_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_repl_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_not_equal_back_fail_decc_pass_inv_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_greater_back_fail_wrap_pass_decw_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_equal_back_fail_inv_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_wrap_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_inv_comp_never_back_fail_decc_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_not_equal_back_fail_incc_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_not_equal_back_fail_decw_pass_keep_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_repl_comp_not_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_keep_comp_not_equal_back_fail_decc_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_equal_back_fail_keep_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_never_back_fail_inv_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_always_back_fail_keep_pass_decw_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_always_back_fail_keep_pass_inv_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_equal_back_fail_inv_pass_keep_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_zero_comp_not_equal_back_fail_incc_pass_repl_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decc_comp_less_back_fail_repl_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_always_back_fail_repl_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_less_back_fail_repl_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_greater_back_fail_wrap_pass_inv_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_incc_comp_always_back_fail_keep_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_or_equal_back_fail_inv_pass_repl_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_equal_back_fail_incc_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_less_back_fail_zero_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_equal_back_fail_inv_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_back_fail_keep_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_equal_back_fail_inv_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_greater_back_fail_decc_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_decc_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_never_back_fail_inv_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_keep_comp_greater_back_fail_keep_pass_incc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_never_back_fail_inv_pass_decw_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_greater_back_fail_keep_pass_decw_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_equal_back_fail_incc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_repl_comp_never_back_fail_wrap_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_inv_comp_greater_back_fail_inv_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_inv_comp_always_back_fail_incc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_less_or_equal_back_fail_repl_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_repl_comp_never_back_fail_zero_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_not_equal_back_fail_incc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_decw_comp_less_back_fail_keep_pass_repl_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_repl_pass_zero_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_equal_back_fail_zero_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_never_back_fail_zero_pass_inv_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_not_equal_back_fail_inv_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_keep_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_equal_back_fail_incc_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_decc_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_greater_back_fail_keep_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_keep_comp_less_back_fail_decc_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_never_back_fail_keep_pass_zero_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_not_equal_back_fail_zero_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_greater_back_fail_zero_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_keep_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_inv_comp_not_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_inv_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_greater_back_fail_zero_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_wrap_comp_less_back_fail_decc_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_inv_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_not_equal_back_fail_incc_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_incc_comp_never_back_fail_wrap_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_back_fail_zero_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_equal_back_fail_decw_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_not_equal_back_fail_repl_pass_incc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_inv_comp_never_back_fail_repl_pass_repl_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_not_equal_back_fail_decc_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_inv_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_repl_comp_less_back_fail_repl_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_less_back_fail_keep_pass_incc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_always_back_fail_zero_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_zero_comp_equal_back_fail_inv_pass_decc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_always_back_fail_incc_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_greater_back_fail_keep_pass_decc_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_inv_comp_greater_back_fail_decc_pass_repl_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_never_back_fail_wrap_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_wrap_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_zero_comp_less_back_fail_decc_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_zero_comp_equal_back_fail_keep_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_back_fail_inv_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_equal_back_fail_decw_pass_decw_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_inv_comp_never_back_fail_inv_pass_wrap_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_wrap_comp_not_equal_back_fail_zero_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_repl_comp_equal_back_fail_repl_pass_decw_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_inv_comp_less_or_equal_back_fail_decw_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_not_equal_back_fail_decw_pass_zero_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_repl_pass_decw_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_greater_back_fail_inv_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_incc_comp_always_back_fail_repl_pass_inv_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_equal_back_fail_keep_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_never_back_fail_decw_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_equal_back_fail_keep_pass_decc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_less_back_fail_repl_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_wrap_comp_equal_back_fail_zero_pass_inv_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal_back_fail_zero_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_not_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_incc_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_back_fail_incc_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_keep_comp_not_equal_back_fail_wrap_pass_incc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_repl_comp_always_back_fail_incc_pass_decw_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_zero_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_wrap_comp_never_back_fail_decc_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_inv_comp_never_back_fail_incc_pass_decc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal_back_fail_inv_pass_repl_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_decc_comp_never_back_fail_wrap_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_zero_comp_never_back_fail_decc_pass_zero_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_repl_comp_greater_back_fail_incc_pass_keep_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_never_back_fail_inv_pass_keep_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_greater_back_fail_decc_pass_incc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_wrap_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_inv_comp_always_back_fail_wrap_pass_incc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decc_pass_decc_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_incc_comp_equal_back_fail_wrap_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_equal_back_fail_decw_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_always_back_fail_repl_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decc_comp_greater_back_fail_incc_pass_zero_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_equal_back_fail_decc_pass_decw_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_decw_comp_less_back_fail_zero_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_repl_comp_less_or_equal_back_fail_decw_pass_repl_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_zero_comp_always_back_fail_incc_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_keep_comp_not_equal_back_fail_decc_pass_decw_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_always_back_fail_incc_pass_zero_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_less_back_fail_inv_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_never_back_fail_repl_pass_decw_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_keep_pass_keep_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_zero_comp_not_equal_back_fail_decc_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_wrap_comp_equal_back_fail_decc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_wrap_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal_back_fail_inv_pass_zero_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_incc_comp_never_back_fail_inv_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_less_back_fail_decc_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_keep_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_decw_comp_equal_back_fail_repl_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_keep_comp_never_back_fail_inv_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_keep_comp_greater_back_fail_repl_pass_incc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_decc_comp_never_back_fail_decw_pass_zero_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_always_back_fail_incc_pass_repl_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_incc_comp_not_equal_back_fail_wrap_pass_keep_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_greater_back_fail_keep_pass_decw_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decc_comp_not_equal_back_fail_incc_pass_decw_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_repl_comp_less_or_equal_back_fail_repl_pass_wrap_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_less_or_equal_back_fail_keep_pass_inv_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_not_equal_back_fail_repl_pass_repl_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_decw_comp_never_back_fail_wrap_pass_wrap_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_zero_comp_less_back_fail_decc_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_greater_back_fail_keep_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decw_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_less_back_fail_zero_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_less_back_fail_keep_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_repl_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal_back_fail_inv_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_not_equal_back_fail_wrap_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_equal_back_fail_repl_pass_inv_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_greater_back_fail_zero_pass_zero_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal_back_fail_decc_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_equal_back_fail_zero_pass_keep_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_wrap_comp_always_back_fail_repl_pass_repl_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decc_comp_never_back_fail_repl_pass_wrap_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_inv_comp_not_equal_back_fail_repl_pass_inv_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_never_back_fail_zero_pass_repl_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_never_back_fail_decw_pass_incc_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_decc_pass_inv_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_repl_comp_never_back_fail_decw_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_equal_back_fail_wrap_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_equal_back_fail_decc_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_repl_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_never_back_fail_keep_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_keep_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_zero_comp_always_back_fail_zero_pass_wrap_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_repl_comp_always_back_fail_decw_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_not_equal_back_fail_decw_pass_inv_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_inv_comp_equal_back_fail_zero_pass_wrap_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_equal_back_fail_decc_pass_wrap_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_decw_comp_never_back_fail_inv_pass_decw_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_keep_comp_less_back_fail_wrap_pass_decc_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_zero_comp_never_back_fail_inv_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_incc_comp_never_back_fail_incc_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_keep_comp_less_or_equal_back_fail_keep_pass_keep_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decw_comp_not_equal_back_fail_repl_pass_decw_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_never_back_fail_incc_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_always_back_fail_decw_pass_repl_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_zero_comp_less_or_equal_back_fail_decw_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_less_back_fail_repl_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_inv_comp_equal_back_fail_zero_pass_zero_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_decc_comp_less_back_fail_inv_pass_decw_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_keep_comp_not_equal_back_fail_incc_pass_zero_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_never_back_fail_keep_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal_back_fail_zero_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_not_equal_back_fail_wrap_pass_keep_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_incc_pass_decc_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_equal_back_fail_incc_pass_keep_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decw_comp_always_back_fail_incc_pass_keep_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_zero_comp_always_back_fail_repl_pass_repl_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal_back_fail_wrap_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_incc_dfail_decw_comp_always_back_fail_zero_pass_incc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_keep_comp_equal_back_fail_keep_pass_incc_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_repl_comp_equal_back_fail_decw_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_less_or_equal_back_fail_repl_pass_decw_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decw_comp_less_or_equal_back_fail_decw_pass_zero_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decc_dfail_inv_comp_greater_back_fail_decw_pass_zero_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_zero_pass_decc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_repl_comp_never_back_fail_decc_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_keep_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_zero_comp_always_back_fail_wrap_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_always_back_fail_incc_pass_decc_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_inv_comp_greater_back_fail_repl_pass_inv_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_less_back_fail_decw_pass_incc_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_not_equal_back_fail_repl_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_keep_comp_equal_back_fail_inv_pass_zero_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_never_back_fail_repl_pass_inv_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_repl_comp_less_or_equal_back_fail_decc_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_keep_comp_less_back_fail_inv_pass_zero_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal_back_fail_zero_pass_repl_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_repl_comp_always_back_fail_keep_pass_wrap_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_repl_comp_less_back_fail_wrap_pass_zero_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_equal_back_fail_decc_pass_decw_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_less_back_fail_inv_pass_decw_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_decw_comp_less_back_fail_decw_pass_decc_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_greater_back_fail_repl_pass_wrap_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal_back_fail_wrap_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal_back_fail_zero_pass_incc_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_incc_dfail_incc_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_repl_comp_less_back_fail_inv_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_equal_back_fail_decw_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_incc_dfail_repl_comp_never_back_fail_incc_pass_keep_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_decc_comp_less_or_equal_back_fail_wrap_pass_incc_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_keep_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_never_back_fail_decw_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_back_fail_decc_pass_inv_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_decc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_inv_comp_less_back_fail_zero_pass_inv_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_always_back_fail_inv_pass_keep_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_repl_comp_greater_back_fail_incc_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_not_equal_back_fail_wrap_pass_wrap_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_always_back_fail_zero_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_keep_comp_equal_back_fail_repl_pass_wrap_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_keep_comp_never_back_fail_decw_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_less_or_equal_back_fail_repl_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_always_back_fail_decw_pass_incc_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_inv_comp_greater_back_fail_incc_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_never_back_fail_repl_pass_inv_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal_back_fail_keep_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_inv_comp_greater_back_fail_wrap_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decc_comp_less_back_fail_inv_pass_zero_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_zero_comp_less_back_fail_decc_pass_incc_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal_back_fail_decw_pass_repl_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_wrap_pass_keep_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_keep_pass_decc_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_less_or_equal_back_fail_repl_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_zero_comp_greater_back_fail_incc_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_inv_comp_never_back_fail_wrap_pass_decw_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_wrap_comp_less_back_fail_wrap_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_greater_back_fail_repl_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_equal_back_fail_repl_pass_incc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal_back_fail_incc_pass_incc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_inv_comp_less_back_fail_wrap_pass_decc_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_decw_comp_equal_back_fail_inv_pass_inv_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_always_back_fail_zero_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_decw_comp_greater_back_fail_repl_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decw_comp_always_back_fail_keep_pass_decc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_keep_comp_never_back_fail_inv_pass_repl_dfail_decw_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_wrap_comp_equal_back_fail_zero_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_wrap_comp_never_back_fail_repl_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_less_back_fail_inv_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_keep_dfail_inv_comp_never_back_fail_decc_pass_inv_dfail_decw_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_keep_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_never_back_fail_incc_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decw_comp_not_equal_back_fail_decw_pass_decw_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_incc_comp_greater_back_fail_inv_pass_wrap_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_never_back_fail_zero_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_wrap_comp_greater_back_fail_incc_pass_wrap_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_wrap_comp_greater_back_fail_keep_pass_repl_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_decw_comp_less_or_equal_back_fail_wrap_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_not_equal_back_fail_inv_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_inv_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_back_fail_repl_pass_wrap_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_keep_comp_less_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_never_back_fail_decw_pass_incc_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_decc_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_decc_pass_wrap_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_zero_comp_equal_back_fail_inv_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_decc_comp_less_or_equal_back_fail_incc_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_incc_comp_not_equal_back_fail_incc_pass_decw_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal_back_fail_inv_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_zero_comp_less_or_equal_back_fail_zero_pass_incc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_always_back_fail_decc_pass_inv_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_repl_comp_never_back_fail_repl_pass_incc_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_always_back_fail_inv_pass_inv_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_repl_comp_equal_back_fail_decc_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal_back_fail_decw_pass_zero_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_keep_comp_always_back_fail_decc_pass_repl_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_zero_comp_not_equal_back_fail_wrap_pass_inv_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_never_back_fail_keep_pass_repl_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_incc_comp_always_back_fail_zero_pass_decw_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_greater_back_fail_repl_pass_incc_dfail_decc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_zero_comp_less_back_fail_decw_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_decc_comp_greater_back_fail_inv_pass_keep_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal_back_fail_repl_pass_incc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_zero_comp_always_back_fail_decw_pass_wrap_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal_back_fail_zero_pass_inv_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_or_equal_back_fail_wrap_pass_decc_dfail_keep_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_wrap_comp_greater_back_fail_repl_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_zero_comp_equal_back_fail_incc_pass_keep_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_incc_comp_never_back_fail_repl_pass_repl_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_repl_comp_greater_back_fail_wrap_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal_back_fail_keep_pass_zero_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_equal_back_fail_wrap_pass_decw_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_inv_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_repl_comp_equal_back_fail_decw_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_keep_comp_never_back_fail_repl_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_keep_comp_not_equal_back_fail_decw_pass_decc_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_repl_comp_greater_back_fail_decc_pass_decc_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_decw_comp_less_or_equal_back_fail_decw_pass_repl_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_decw_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_incc_comp_less_or_equal_back_fail_decw_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_greater_back_fail_inv_pass_keep_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_not_equal_back_fail_decw_pass_incc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_incc_comp_less_back_fail_incc_pass_inv_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_repl_comp_always_back_fail_repl_pass_incc_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_equal_back_fail_wrap_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_decw_comp_never_back_fail_decc_pass_decc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_repl_comp_less_back_fail_zero_pass_decw_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_not_equal_back_fail_repl_pass_zero_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal_back_fail_decc_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_incc_comp_greater_back_fail_wrap_pass_repl_dfail_decc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_inv_comp_equal_back_fail_decw_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_zero_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_less_back_fail_decc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal_back_fail_incc_pass_decc_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_less_back_fail_incc_pass_decw_dfail_incc_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_inv_comp_less_back_fail_inv_pass_decw_dfail_decc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal_back_fail_keep_pass_zero_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_decw_comp_not_equal_back_fail_incc_pass_decc_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_never_back_fail_incc_pass_incc_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_zero_comp_less_or_equal_back_fail_repl_pass_decc_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_greater_back_fail_decc_pass_zero_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_greater_back_fail_repl_pass_inv_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_wrap_comp_always_back_fail_repl_pass_zero_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_incc_comp_greater_back_fail_repl_pass_inv_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_wrap_comp_always_back_fail_wrap_pass_inv_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal_back_fail_inv_pass_decc_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_repl_comp_always_back_fail_zero_pass_decw_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_inv_comp_always_back_fail_repl_pass_repl_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_greater_back_fail_zero_pass_zero_dfail_inv_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_zero_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_never_back_fail_wrap_pass_keep_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_repl_comp_equal_back_fail_zero_pass_repl_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_keep_dfail_keep_comp_equal_back_fail_keep_pass_wrap_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_never_back_fail_decc_pass_keep_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_equal_back_fail_wrap_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_repl_dfail_incc_comp_equal_back_fail_inv_pass_decc_dfail_inv_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_keep_comp_always_back_fail_zero_pass_repl_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decw_comp_not_equal_back_fail_wrap_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_repl_comp_always_back_fail_decc_pass_inv_dfail_decw_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_zero_comp_never_back_fail_decc_pass_decc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_wrap_comp_never_back_fail_inv_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_incc_comp_equal_back_fail_repl_pass_keep_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_keep_comp_equal_back_fail_repl_pass_zero_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal_back_fail_zero_pass_decw_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_keep_comp_never_back_fail_keep_pass_inv_dfail_zero_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_keep_comp_greater_back_fail_wrap_pass_zero_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_inv_comp_less_or_equal_back_fail_repl_pass_repl_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal_back_fail_decw_pass_inv_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_decw_comp_greater_back_fail_inv_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_never_back_fail_decw_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_wrap_comp_equal_back_fail_zero_pass_decw_dfail_repl_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decw_pass_incc_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_zero_dfail_incc_comp_never_back_fail_incc_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_incc_comp_not_equal_back_fail_decc_pass_zero_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_decc_comp_greater_back_fail_decc_pass_incc_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decw_comp_equal_back_fail_zero_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_decw_comp_equal_back_fail_decw_pass_decc_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decw_dfail_wrap_comp_less_back_fail_wrap_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_wrap_comp_never_back_fail_incc_pass_decw_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_zero_dfail_wrap_comp_always_back_fail_keep_pass_wrap_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_incc_comp_always_back_fail_inv_pass_zero_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_inv_dfail_zero_comp_not_equal_back_fail_repl_pass_keep_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_inv_comp_never_back_fail_inv_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_never_back_fail_inv_pass_wrap_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_zero_dfail_decw_comp_greater_back_fail_decw_pass_decc_dfail_decc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_zero_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_less_back_fail_decc_pass_repl_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_inv_comp_not_equal_back_fail_repl_pass_incc_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_repl_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_zero_comp_equal_back_fail_zero_pass_zero_dfail_repl_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_zero_comp_greater_back_fail_repl_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decc_comp_equal_back_fail_decc_pass_repl_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_decc_comp_greater_back_fail_wrap_pass_wrap_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_always_back_fail_decw_pass_decc_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_zero_comp_never_back_fail_keep_pass_decw_dfail_wrap_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_never_back_fail_decw_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decw_dfail_zero_comp_equal_back_fail_decw_pass_inv_dfail_incc_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_zero_dfail_zero_comp_never_back_fail_repl_pass_decw_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_always_back_fail_zero_pass_zero_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_inv_comp_not_equal_back_fail_decc_pass_incc_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_zero_dfail_wrap_comp_not_equal_back_fail_wrap_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_keep_comp_less_back_fail_incc_pass_zero_dfail_wrap_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_decc_comp_always_back_fail_repl_pass_keep_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_keep_dfail_decc_comp_equal_back_fail_wrap_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_greater_back_fail_zero_pass_keep_dfail_decc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_decc_comp_always_back_fail_decw_pass_incc_dfail_zero_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_equal_back_fail_incc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_zero_dfail_wrap_comp_less_back_fail_decw_pass_incc_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_zero_comp_equal_back_fail_repl_pass_zero_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_incc_dfail_keep_comp_always_back_fail_wrap_pass_wrap_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decw_dfail_decc_comp_always_back_fail_zero_pass_incc_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_inv_dfail_incc_comp_less_or_equal_back_fail_inv_pass_decw_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_decw_comp_greater_back_fail_decw_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_keep_dfail_decw_comp_equal_back_fail_decc_pass_incc_dfail_decw_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal_back_fail_incc_pass_keep_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_inv_comp_less_or_equal_back_fail_decw_pass_decc_dfail_repl_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_zero_comp_less_back_fail_decw_pass_repl_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_inv_comp_never_back_fail_decw_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal_back_fail_keep_pass_inv_dfail_inv_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_inv_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_wrap_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_incc_comp_equal_back_fail_zero_pass_inv_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_wrap_comp_equal_back_fail_incc_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_inv_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_wrap_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_keep_dfail_repl_comp_equal_back_fail_incc_pass_incc_dfail_inv_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_keep_comp_not_equal_back_fail_keep_pass_keep_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_wrap_comp_greater_back_fail_wrap_pass_repl_dfail_keep_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decw_dfail_incc_comp_less_or_equal_back_fail_keep_pass_decw_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_keep_dfail_decw_comp_less_or_equal_back_fail_decc_pass_decw_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_decc_comp_not_equal_back_fail_inv_pass_keep_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_incc_comp_greater_back_fail_decc_pass_decc_dfail_decc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_incc_dfail_repl_comp_greater_back_fail_keep_pass_repl_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal_back_fail_repl_pass_incc_dfail_repl_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal_back_fail_repl_pass_decc_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_decc_pass_decc_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_inv_dfail_decw_comp_less_or_equal_back_fail_keep_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_repl_dfail_incc_comp_not_equal_back_fail_keep_pass_inv_dfail_decw_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_inv_dfail_zero_comp_not_equal_back_fail_decw_pass_repl_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_zero_comp_less_back_fail_repl_pass_inv_dfail_zero_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_wrap_dfail_keep_comp_equal_back_fail_zero_pass_decw_dfail_zero_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_keep_comp_never_back_fail_keep_pass_wrap_dfail_keep_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_incc_dfail_repl_comp_not_equal_back_fail_keep_pass_repl_dfail_keep_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_less_back_fail_incc_pass_incc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_zero_comp_always_back_fail_keep_pass_decw_dfail_keep_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_repl_comp_less_back_fail_decw_pass_zero_dfail_zero_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal_back_fail_incc_pass_inv_dfail_repl_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal_back_fail_inv_pass_wrap_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_inv_comp_not_equal_back_fail_wrap_pass_inv_dfail_wrap_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_equal_back_fail_repl_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_greater_back_fail_incc_pass_decc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_wrap_pass_zero_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_less_or_equal_back_fail_wrap_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_inv_comp_greater_back_fail_incc_pass_inv_dfail_incc_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_decc_dfail_incc_comp_always_back_fail_keep_pass_decw_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_decc_dfail_incc_comp_equal_back_fail_decc_pass_decc_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_incc_comp_less_back_fail_repl_pass_decc_dfail_incc_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_wrap_dfail_wrap_comp_less_back_fail_incc_pass_repl_dfail_incc_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_zero_comp_equal_back_fail_inv_pass_wrap_dfail_wrap_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_inv_comp_less_back_fail_wrap_pass_zero_dfail_zero_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_decw_dfail_repl_comp_always_back_fail_wrap_pass_wrap_dfail_inv_comp_less
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_keep_dfail_inv_comp_not_equal_back_fail_zero_pass_decw_dfail_repl_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_decw_comp_equal_back_fail_repl_pass_keep_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_zero_dfail_incc_comp_always_back_fail_wrap_pass_wrap_dfail_decw_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_decc_comp_always_back_fail_wrap_pass_zero_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_decc_dfail_repl_comp_not_equal_back_fail_decc_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_decc_comp_always_back_fail_incc_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_repl_dfail_decw_comp_never_back_fail_inv_pass_repl_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_zero_dfail_decc_comp_greater_back_fail_repl_pass_repl_dfail_decw_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_repl_comp_not_equal_back_fail_decw_pass_zero_dfail_decc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal_back_fail_inv_pass_decc_dfail_repl_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_inv_dfail_decc_comp_always_back_fail_wrap_pass_keep_dfail_keep_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_zero_pass_wrap_dfail_keep_comp_greater_back_fail_decc_pass_inv_dfail_decc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_incc_comp_greater_back_fail_zero_pass_wrap_dfail_keep_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_repl_dfail_inv_comp_greater_back_fail_repl_pass_wrap_dfail_incc_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_wrap_dfail_zero_comp_never_back_fail_wrap_pass_repl_dfail_inv_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_repl_dfail_zero_comp_less_or_equal_back_fail_incc_pass_wrap_dfail_zero_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_repl_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_wrap_dfail_keep_comp_always_back_fail_decw_pass_decw_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_wrap_dfail_incc_comp_always_back_fail_decw_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_repl_dfail_wrap_comp_never_back_fail_wrap_pass_wrap_dfail_incc_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decw_pass_incc_dfail_zero_comp_less_or_equal_back_fail_inv_pass_repl_dfail_wrap_comp_never
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_incc_dfail_repl_comp_equal_back_fail_decw_pass_incc_dfail_zero_comp_less_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_wrap_pass_decc_dfail_wrap_comp_greater_back_fail_decc_pass_incc_dfail_repl_comp_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decw_dfail_keep_comp_always_back_fail_wrap_pass_decc_dfail_wrap_comp_greater
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_incc_pass_inv_dfail_incc_comp_not_equal_back_fail_keep_pass_decw_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal_back_fail_incc_pass_inv_dfail_incc_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_repl_pass_wrap_dfail_keep_comp_always_back_fail_keep_pass_repl_dfail_inv_comp_greater_or_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_inv_pass_decc_dfail_inv_comp_not_equal_back_fail_repl_pass_wrap_dfail_keep_comp_always
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_keep_pass_decc_dfail_zero_comp_less_back_fail_inv_pass_decc_dfail_inv_comp_not_equal
+dEQP-VK.pipeline.stencil.format.d32_sfloat_s8_uint.states.front_fail_decc_pass_repl_dfail_keep_comp_less_back_fail_keep_pass_decc_dfail_zero_comp_less
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r4g4_unorm_pack8.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r4g4b4a4_unorm_pack16.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r5g6b5_unorm_pack16.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r5g5b5a1_unorm_pack16.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8_srgb.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8_srgb.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8_srgb.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r8g8b8a8_srgb.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.a2r10g10b10_unorm_pack32.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_unorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_snorm.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r16g16b16a16_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r32_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.r32g32b32a32_sfloat.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.b10g11r11_ufloat_pack32.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.e5b9g9r9_ufloat_pack32.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sa_z_sub_alpha_ca_cc_sub-color_sas_1msa_rsub_alpha_ca_da_sub-color_1mca_ca_min_alpha_1msc_1mcc_rsub-color_dc_da_sub_alpha_1mcc_1mda_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_cc_min_alpha_sc_cc_min-color_o_o_min_alpha_1msc_1msc_add-color_sas_da_add_alpha_1mdc_ca_add-color_cc_1mda_sub_alpha_dc_1mda_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_dc_add_alpha_o_1msc_add-color_sas_1mdc_max_alpha_sa_1msc_min-color_dc_sas_min_alpha_1mcc_cc_sub-color_z_1mda_add_alpha_o_1mdc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.b4g4r4a4_unorm_pack16.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_sas_rsub_alpha_1mdc_1msc_sub-color_1msa_1msc_add_alpha_ca_da_min-color_1msc_da_sub_alpha_1mca_ca_sub-color_o_1mda_max_alpha_sa_dc_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_1mda_rsub_alpha_1mda_1mcc_sub-color_1mda_1mca_min_alpha_o_cc_min-color_1mdc_da_min_alpha_1mda_da_min-color_sas_1msa_max_alpha_sas_o_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_ca_1mcc_rsub_alpha_sa_1msc_rsub-color_1mca_ca_rsub_alpha_1msc_da_rsub-color_1mcc_1mdc_sub_alpha_z_da_sub-color_sc_dc_add_alpha_1mdc_1msa_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_dc_rsub_alpha_1mdc_1mcc_sub-color_sc_cc_min_alpha_sa_1mcc_max-color_z_cc_rsub_alpha_da_1msa_max-color_1msc_1msc_add_alpha_1mca_sc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msc_cc_max_alpha_z_ca_add-color_da_sa_max_alpha_1msa_sc_sub-color_sa_1mda_add_alpha_1mcc_ca_rsub-color_dc_dc_add_alpha_1mcc_z_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_1mdc_max_alpha_1mcc_1msc_max-color_1msa_cc_add_alpha_da_z_min-color_sas_dc_max_alpha_z_sc_min-color_da_ca_rsub_alpha_z_z_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sa_o_sub_alpha_1mca_z_min-color_1mcc_ca_max_alpha_1msa_1mcc_max-color_cc_1mda_rsub_alpha_1mca_sa_max-color_1mcc_da_sub_alpha_o_1mda_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_cc_1mcc_sub_alpha_sa_dc_rsub-color_dc_1mdc_min_alpha_1mca_sas_rsub-color_1mda_o_min_alpha_1mca_sa_max-color_1mca_1msa_add_alpha_1mca_1msa_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_1mca_max_alpha_o_dc_max-color_sas_sa_max_alpha_sc_z_rsub-color_ca_sc_max_alpha_1msc_1msc_add-color_sc_o_add_alpha_dc_1mda_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_sa_sub_alpha_ca_1mdc_min-color_1msa_1mcc_sub_alpha_1msa_z_add-color_dc_da_sub_alpha_o_ca_max-color_1mcc_o_sub_alpha_dc_sas_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_dc_max_alpha_1mca_da_sub-color_1mdc_sa_sub_alpha_1mda_cc_max-color_1msc_sc_rsub_alpha_sa_1msa_add-color_1msa_cc_sub_alpha_dc_z_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_da_dc_max_alpha_z_z_rsub-color_z_1mca_max_alpha_da_1mcc_max-color_z_sc_rsub_alpha_o_dc_min-color_1mca_o_max_alpha_1mda_ca_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_cc_max_alpha_1mdc_o_max-color_sas_dc_rsub_alpha_1msc_1mda_rsub-color_sas_sa_min_alpha_1mda_cc_rsub-color_1mdc_sa_sub_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_cc_min_alpha_da_1mda_sub-color_o_1mca_rsub_alpha_1msc_ca_add-color_1mcc_1msc_rsub_alpha_sa_sa_min-color_1msa_1mda_rsub_alpha_1msa_sas_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_dc_rsub_alpha_z_1mdc_sub-color_1msc_1msc_min_alpha_sc_dc_sub-color_da_sc_min_alpha_z_cc_min-color_1msc_sas_max_alpha_z_ca_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_dc_add_alpha_1msa_1msc_min-color_1msa_da_sub_alpha_1mca_sa_max-color_ca_1mdc_min_alpha_sa_1msc_min-color_ca_sc_min_alpha_1mcc_1mda_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_1mdc_sub_alpha_1mca_cc_rsub-color_sas_1msa_rsub_alpha_1mca_ca_rsub-color_1mca_da_sub_alpha_o_1mdc_min-color_1msc_1mcc_sub_alpha_z_ca_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_da_1mdc_min_alpha_da_1mca_rsub-color_1mcc_ca_max_alpha_cc_da_min-color_z_cc_min_alpha_1mca_z_rsub-color_dc_dc_rsub_alpha_z_dc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_ca_min_alpha_z_1mca_sub-color_1msa_z_max_alpha_1mda_z_sub-color_1msc_z_sub_alpha_1mdc_ca_sub-color_o_z_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_cc_sub_alpha_dc_o_max-color_sas_1mcc_sub_alpha_dc_1mdc_add-color_z_ca_max_alpha_sc_1mdc_max-color_1msc_dc_add_alpha_1msa_sas_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mdc_sas_sub_alpha_da_cc_add-color_1mca_1msc_max_alpha_1mca_sc_max-color_1mcc_1mdc_sub_alpha_1mda_1mca_max-color_1msc_1msa_max_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_1mda_add_alpha_1msa_sas_rsub-color_1mda_cc_min_alpha_cc_cc_min-color_1mcc_1mca_rsub_alpha_1mca_1mda_min-color_sc_1mda_sub_alpha_sa_cc_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_da_add_alpha_sas_1mca_add-color_z_1mda_sub_alpha_ca_1mcc_rsub-color_sa_sa_max_alpha_dc_da_min-color_cc_dc_min_alpha_1msa_da_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_cc_sa_add_alpha_1msa_ca_rsub-color_cc_1mca_sub_alpha_o_1mcc_max-color_z_z_sub_alpha_sa_sa_rsub-color_cc_sc_rsub_alpha_1mdc_dc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_1mdc_max_alpha_sc_1mcc_sub-color_1mda_sa_max_alpha_sc_1msa_add-color_cc_dc_rsub_alpha_o_sa_min-color_sc_sa_max_alpha_1mda_1msa_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_1mda_sub_alpha_cc_sa_min-color_1mda_da_rsub_alpha_1mdc_1mda_sub-color_sc_1msc_min_alpha_1mdc_1mca_max-color_ca_cc_min_alpha_da_ca_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_cc_sub_alpha_1mcc_z_rsub-color_z_o_rsub_alpha_sa_1mdc_add-color_1mda_1msc_max_alpha_1mca_sa_sub-color_1msa_sa_rsub_alpha_z_sa_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_1mcc_sub_alpha_cc_o_rsub-color_1mca_1mca_max_alpha_dc_1mdc_add-color_dc_sas_rsub_alpha_1mdc_1mdc_max-color_sas_1mca_max_alpha_sas_1msc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_ca_sub_alpha_1mcc_da_min-color_cc_o_min_alpha_1mcc_sa_add-color_o_sas_add_alpha_ca_sc_sub-color_1msa_da_rsub_alpha_1mda_sc_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_dc_max_alpha_sas_1msc_add-color_sc_sas_sub_alpha_1msa_1mcc_max-color_sc_sc_rsub_alpha_1mdc_dc_sub-color_1msc_sa_rsub_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msc_sc_max_alpha_1mcc_1msa_max-color_1mdc_sas_min_alpha_1mcc_1msa_sub-color_ca_ca_max_alpha_dc_sc_max-color_1msa_sc_max_alpha_1mdc_o_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msc_da_rsub_alpha_ca_sc_min-color_1mca_ca_min_alpha_da_1msc_sub-color_da_sc_sub_alpha_1msa_ca_rsub-color_1mca_1mca_add_alpha_da_z_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_o_ca_sub_alpha_cc_cc_add-color_da_1msc_min_alpha_sa_ca_sub-color_sas_sa_sub_alpha_1mdc_z_max-color_sa_z_add_alpha_sc_1mca_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msc_dc_max_alpha_cc_1mca_rsub-color_o_1msa_min_alpha_1mda_1mda_rsub-color_cc_1mdc_add_alpha_sc_1mdc_add-color_sa_1mdc_min_alpha_sc_1mcc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_o_1msa_min_alpha_1mcc_1mdc_max-color_z_sas_add_alpha_1mda_dc_add-color_sc_1mda_add_alpha_sc_cc_rsub-color_1msc_1msc_max_alpha_z_o_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_sas_sub_alpha_1msa_1mca_sub-color_sas_ca_max_alpha_1mda_1msa_add-color_sa_da_sub_alpha_sa_z_rsub-color_ca_1mdc_add_alpha_z_sc_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_o_sub_alpha_1msa_1msa_sub-color_1mca_o_add_alpha_sa_sc_max-color_o_ca_rsub_alpha_o_dc_max-color_cc_1mdc_rsub_alpha_sas_z_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_1msc_rsub_alpha_sa_1mda_min-color_sa_1mcc_rsub_alpha_1msa_1mdc_max-color_ca_1mcc_max_alpha_1mca_1mcc_sub-color_dc_dc_max_alpha_da_sc_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_1mdc_rsub_alpha_1msa_cc_sub-color_da_1msc_min_alpha_1msc_sc_add-color_sa_sas_add_alpha_z_o_sub-color_dc_1msc_max_alpha_sa_da_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_ca_1mca_rsub_alpha_1mda_z_sub-color_sc_sc_add_alpha_1mca_sa_max-color_sa_1msa_min_alpha_1msc_sa_sub-color_dc_sc_add_alpha_1mdc_1mca_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mdc_1mcc_sub_alpha_sas_1msc_sub-color_ca_sc_sub_alpha_1mda_cc_max-color_o_ca_sub_alpha_1mda_sas_rsub-color_z_1mdc_rsub_alpha_1msa_z_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_o_sc_max_alpha_cc_1mca_add-color_da_1mca_add_alpha_1mcc_1msa_min-color_sa_z_rsub_alpha_1mca_dc_rsub-color_1msa_1mca_sub_alpha_cc_o_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sa_1mcc_sub_alpha_dc_o_rsub-color_cc_1mdc_rsub_alpha_1mdc_da_add-color_o_1mcc_min_alpha_sas_sas_max-color_1msa_sc_max_alpha_1msc_sa_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_1mdc_min_alpha_da_sc_sub-color_1mcc_sa_min_alpha_sa_ca_max-color_o_z_add_alpha_1mda_da_add-color_1mdc_sa_min_alpha_1mcc_sc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_1mcc_rsub_alpha_z_sa_rsub-color_ca_o_max_alpha_z_1mca_sub-color_1mcc_ca_add_alpha_cc_1mdc_rsub-color_dc_dc_rsub_alpha_o_1mcc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_sc_sub_alpha_1mcc_ca_rsub-color_dc_1mda_max_alpha_ca_dc_add-color_1mca_o_rsub_alpha_1mcc_da_add-color_cc_cc_sub_alpha_o_z_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sa_dc_sub_alpha_1mdc_sa_sub-color_sa_sc_max_alpha_ca_o_add-color_1mcc_1mda_max_alpha_z_ca_max-color_1mca_1msa_min_alpha_1mca_sas_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_cc_cc_rsub_alpha_1mca_1msa_rsub-color_1mca_1msa_max_alpha_1mda_sc_min-color_sc_z_max_alpha_dc_1mca_add-color_1mda_ca_add_alpha_sas_1mdc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_sas_min_alpha_1msa_1mda_min-color_ca_sc_min_alpha_da_1mdc_sub-color_z_1mdc_max_alpha_cc_z_rsub-color_z_dc_rsub_alpha_sc_1msc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mdc_sc_min_alpha_o_z_min-color_o_1mda_add_alpha_sas_dc_min-color_dc_1mca_rsub_alpha_sc_1msa_add-color_1mda_da_rsub_alpha_1mca_dc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_o_add_alpha_1mda_1msa_min-color_sa_da_rsub_alpha_da_o_add-color_1msa_1msc_add_alpha_o_sas_sub-color_1msa_sas_min_alpha_1mdc_1msc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sa_sas_sub_alpha_da_ca_add-color_1msc_o_add_alpha_o_1mca_sub-color_dc_1msc_max_alpha_da_1mdc_sub-color_1msa_1mdc_add_alpha_sc_1msa_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_ca_min_alpha_sas_dc_sub-color_ca_ca_max_alpha_1mda_sas_sub-color_sa_1msa_max_alpha_z_1mda_min-color_sc_1mdc_sub_alpha_1msa_da_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_sa_min_alpha_ca_1mda_max-color_1mdc_dc_add_alpha_1mda_sas_rsub-color_sas_1mca_max_alpha_1mca_1mcc_min-color_o_1msa_rsub_alpha_dc_da_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_1msa_rsub_alpha_1mdc_1mcc_sub-color_1mdc_sc_add_alpha_1mcc_1mca_min-color_z_1msc_sub_alpha_ca_sa_min-color_sa_sa_rsub_alpha_cc_sas_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_sc_rsub_alpha_1mcc_cc_sub-color_o_da_rsub_alpha_ca_1mca_sub-color_1msc_sa_sub_alpha_1mca_sc_sub-color_cc_1msa_sub_alpha_sas_ca_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_1mdc_add_alpha_1mda_1mcc_sub-color_1mda_1msc_max_alpha_1msc_da_max-color_da_1mca_sub_alpha_z_cc_min-color_sc_da_add_alpha_1mdc_sc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_dc_sub_alpha_ca_o_sub-color_1mda_z_add_alpha_cc_1msa_sub-color_1msa_1mda_min_alpha_da_o_min-color_1mda_1msc_sub_alpha_dc_1msc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_sas_add_alpha_1msa_1msa_min-color_1mda_dc_rsub_alpha_da_da_min-color_sas_o_add_alpha_sa_ca_min-color_da_1mda_sub_alpha_da_1mdc_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_ca_sub_alpha_sas_sas_sub-color_1mda_o_max_alpha_1msa_sas_sub-color_1mdc_o_sub_alpha_sas_sc_max-color_1mdc_1mcc_add_alpha_sa_z_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_da_sc_max_alpha_cc_1mda_sub-color_sas_z_rsub_alpha_sas_o_rsub-color_1mdc_1msa_rsub_alpha_1mca_1mdc_min-color_1msc_sas_sub_alpha_dc_sa_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_1msc_min_alpha_dc_1mda_sub-color_o_1msc_max_alpha_1mda_1mca_add-color_cc_sc_add_alpha_ca_1mcc_rsub-color_1mca_dc_min_alpha_sc_o_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_1mda_rsub_alpha_da_da_rsub-color_sas_1mdc_rsub_alpha_1msa_sa_add-color_cc_cc_rsub_alpha_sa_1mca_min-color_z_1msa_max_alpha_1msc_cc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_cc_cc_rsub_alpha_sas_1mcc_max-color_da_1mca_max_alpha_da_1msa_sub-color_z_o_rsub_alpha_dc_sas_sub-color_1mda_1msa_add_alpha_1mda_1mdc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_z_min_alpha_1mcc_cc_max-color_z_sas_max_alpha_ca_sa_max-color_1mcc_1mca_sub_alpha_sa_o_rsub-color_z_1mdc_sub_alpha_o_1mda_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_1msa_sub_alpha_z_o_rsub-color_o_sc_sub_alpha_1mdc_1mda_rsub-color_1mcc_1mdc_rsub_alpha_1mcc_z_min-color_cc_sa_max_alpha_o_da_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_ca_sa_min_alpha_1mdc_dc_rsub-color_1mcc_1mca_sub_alpha_1mdc_dc_min-color_o_1mcc_sub_alpha_1msc_z_min-color_sas_1msa_add_alpha_z_1msa_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_1mdc_add_alpha_dc_1mdc_sub-color_1msa_o_max_alpha_1msc_1msa_sub-color_1mda_z_max_alpha_z_sa_min-color_sas_1mdc_min_alpha_1mcc_sa_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msc_1mdc_min_alpha_z_sc_rsub-color_1mda_o_min_alpha_1mdc_sa_max-color_sc_1mcc_rsub_alpha_z_1mdc_max-color_1mda_z_add_alpha_cc_sa_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_1mca_rsub_alpha_o_1mdc_sub-color_sc_dc_add_alpha_1mcc_ca_add-color_sc_da_rsub_alpha_sa_ca_min-color_o_1msc_max_alpha_ca_1msc_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_dc_rsub_alpha_z_ca_min-color_sa_ca_rsub_alpha_sas_z_max-color_ca_sas_max_alpha_1mda_sas_max-color_1mda_sc_max_alpha_sc_1mda_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sc_1mdc_rsub_alpha_sa_1msc_min-color_1mca_ca_max_alpha_sc_sc_max-color_sa_dc_sub_alpha_cc_sas_sub-color_z_1mca_sub_alpha_da_da_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_sa_min_alpha_sa_sas_sub-color_1mda_dc_max_alpha_1mdc_sa_add-color_1mdc_cc_min_alpha_1mcc_dc_max-color_da_z_rsub_alpha_1mda_cc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_1mdc_add_alpha_1msa_z_add-color_ca_1mdc_sub_alpha_1mcc_sc_max-color_sc_1mdc_rsub_alpha_1mdc_1mdc_rsub-color_1mdc_1msc_sub_alpha_sa_1mcc_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mdc_cc_sub_alpha_1mdc_1mcc_max-color_z_sc_rsub_alpha_1msc_sas_sub-color_dc_cc_max_alpha_z_1mcc_min-color_1msc_1mda_min_alpha_sa_1mca_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_ca_cc_sub_alpha_1msa_ca_max-color_sa_dc_rsub_alpha_cc_1mca_rsub-color_dc_cc_add_alpha_1mda_1mca_rsub-color_sa_1mcc_add_alpha_1mca_da_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_sa_min_alpha_cc_sas_rsub-color_dc_sc_min_alpha_sas_dc_min-color_1mda_1msc_max_alpha_sa_1msc_max-color_cc_1mdc_min_alpha_1mda_1msc_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_cc_1mdc_min_alpha_1msa_da_max-color_1mdc_da_sub_alpha_cc_cc_max-color_1msa_sas_add_alpha_sc_dc_sub-color_z_sas_min_alpha_1msa_o_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_1mcc_add_alpha_1msc_sa_min-color_sc_da_max_alpha_sa_da_min-color_1msa_dc_max_alpha_ca_o_sub-color_sas_sas_max_alpha_da_da_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sas_1mcc_min_alpha_dc_1mdc_min-color_1mda_1mcc_add_alpha_1mca_1msa_max-color_1mda_sc_add_alpha_sc_1msa_sub-color_dc_sa_min_alpha_1mdc_1mcc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1msa_1mcc_sub_alpha_o_ca_add-color_da_sa_max_alpha_sa_1mdc_max-color_1mdc_1msc_min_alpha_o_1mda_rsub-color_dc_ca_sub_alpha_sc_1mca_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_o_rsub_alpha_dc_1mca_sub-color_1mca_sas_rsub_alpha_da_1msc_sub-color_1mca_da_rsub_alpha_ca_1msc_sub-color_o_1msa_min_alpha_1msc_1msc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sc_1mcc_min_alpha_1msc_sc_rsub-color_1mda_1mca_min_alpha_1msa_sc_sub-color_1mcc_dc_min_alpha_1mdc_1msa_max-color_1mda_da_rsub_alpha_1mca_z_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_sc_add_alpha_cc_1mca_sub-color_1mcc_sc_min_alpha_z_1mca_rsub-color_dc_da_sub_alpha_ca_1mdc_add-color_sas_1msc_max_alpha_1msc_ca_rsub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_o_1mcc_max_alpha_sa_ca_rsub-color_1mdc_1mda_max_alpha_1mdc_sa_sub-color_1mdc_1mda_sub_alpha_sa_o_sub-color_1mcc_1msa_add_alpha_1mdc_1mdc_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_da_1mda_add_alpha_cc_1msa_rsub-color_sas_1mca_sub_alpha_1mdc_1mca_rsub-color_da_da_min_alpha_sas_ca_rsub-color_1mca_dc_max_alpha_1mdc_sas_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_sc_ca_add_alpha_sas_1mcc_sub-color_1msc_1mdc_add_alpha_o_o_rsub-color_sc_1mcc_min_alpha_1mda_cc_sub-color_o_cc_min_alpha_z_ca_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_1mda_sub_alpha_1mcc_dc_sub-color_1mcc_o_sub_alpha_1mda_ca_sub-color_1mcc_ca_min_alpha_1mcc_cc_add-color_1mdc_da_min_alpha_dc_sa_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_z_1mdc_sub_alpha_cc_cc_max-color_1mca_1msc_max_alpha_1mcc_sas_max-color_1mdc_1mda_sub_alpha_1msa_1mda_sub-color_sas_da_max_alpha_da_o_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_sa_min_alpha_cc_ca_add-color_1msa_1mca_min_alpha_1mcc_1msc_min-color_1mda_dc_sub_alpha_sas_sa_add-color_1msc_1mca_add_alpha_z_cc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_1mda_add_alpha_1mda_1mcc_add-color_da_1msc_add_alpha_1mca_ca_max-color_cc_da_sub_alpha_dc_da_rsub-color_z_1mdc_rsub_alpha_1mca_1msa_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_da_sc_rsub_alpha_sa_cc_rsub-color_sas_1mda_sub_alpha_1mcc_dc_sub-color_da_1mca_min_alpha_1mda_o_rsub-color_1msa_da_max_alpha_ca_cc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mcc_dc_max_alpha_1mca_1mca_sub-color_1mda_1mdc_max_alpha_sc_sas_rsub-color_1msc_cc_add_alpha_1msc_1msc_max-color_cc_sc_min_alpha_dc_1msc_min
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mca_1mca_sub_alpha_o_1mda_add-color_z_1mcc_max_alpha_sc_1mdc_add-color_1mca_1mdc_add_alpha_sc_sa_min-color_sas_o_max_alpha_1mcc_1mdc_add
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_dc_1msc_min_alpha_ca_cc_min-color_z_1msc_rsub_alpha_sa_z_sub-color_1mdc_1mdc_max_alpha_sa_cc_sub-color_1mcc_1mdc_max_alpha_dc_1mda_max
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_1mda_ca_sub_alpha_1mda_dc_add-color_1msa_z_sub_alpha_o_sa_sub-color_dc_1mcc_max_alpha_sa_dc_max-color_o_da_sub_alpha_1mda_1msc_sub
+dEQP-VK.pipeline.blend.format.b5g5r5a1_unorm_pack16.states.color_o_1msa_add_alpha_o_1mda_max-color_da_1msa_sub_alpha_dc_sc_rsub-color_sc_ca_max_alpha_1mcc_sa_max-color_dc_ca_min_alpha_z_sc_rsub
+dEQP-VK.pipeline.depth.format_features.support_d16_unorm
+dEQP-VK.pipeline.depth.format_features.support_d24_unorm_or_d32_sfloat
+dEQP-VK.pipeline.depth.format_features.support_d24_unorm_s8_uint_or_d32_sfloat_s8_uint
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.d16_unorm.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.x8_d24_unorm_pack32.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.d32_sfloat.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.d16_unorm_s8_uint.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.d24_unorm_s8_uint.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_not_equal_not_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_equal_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_greater_greater_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_greater_or_equal_greater_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_less_or_equal_less_or_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_less_less_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_never_never_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_not_equal_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_equal_not_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_greater_greater_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_greater_or_equal_greater_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_less_or_equal_less_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_always_always_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_less_never_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_never_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_not_equal_greater_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_equal_greater_or_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_greater_not_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_greater_or_equal_less_or_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_less_or_equal_never_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_less_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_not_equal_greater_or_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_never_always_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_equal_greater_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_greater_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_greater_or_equal_not_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_less_or_equal_always_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_less_less_or_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_always_less_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_not_equal_less_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_equal_less_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_greater_never_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_greater_or_equal_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_less_not_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_less_or_equal_greater_or_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_not_equal_less_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_equal_never_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_greater_less_or_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_less_or_equal_greater_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_less_always_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_never_not_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_not_equal_always_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_always_greater_or_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_greater_or_equal_never_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_never_less_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_less_or_equal_equal_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_less_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_always_greater_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_equal_always_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_never_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_greater_less_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_always_never_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_less_or_equal_not_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_never_greater_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_equal_less_or_equal_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_greater_or_equal_always_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_always_less_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_never_greater_or_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.equal_always_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_greater_always_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_not_equal_not_equal_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_less_greater_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_not_equal_never_not_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_always_not_equal_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_or_equal_always_less_or_equal_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_greater_or_equal_less_greater
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_equal_less_or_equal_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.always_greater_or_equal_greater_or_equal_less_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_greater_or_equal_never_less
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.greater_or_equal_never_greater_never
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.less_greater_equal_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.never_greater_always_greater_or_equal
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_not_equal_greater_always
+dEQP-VK.pipeline.depth.format.d32_sfloat_s8_uint.compare_ops.not_equal_less_or_equal_not_equal_greater
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4_unorm_pack8.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r4g4b4a4_unorm_pack16.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g6b5_unorm_pack16.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r5g5b5a1_unorm_pack16.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8_srgb.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8_srgb.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8_srgb.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r8g8b8a8_srgb.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_unorm_pack32.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uint_pack32.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.a2r10g10b10_uscaled_pack32.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_unorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_snorm.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sscaled.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r16g16b16a16_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_uint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sint.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.r32g32b32a32_sfloat.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.b10g11r11_ufloat_pack32.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.e5b9g9r9_ufloat_pack32.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.b4g4r4a4_unorm_pack16.size.443x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.2x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.32x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.128x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.512x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.3x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.13x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.127x1
+dEQP-VK.pipeline.image.view_type.1d.format.b5g5r5a1_unorm_pack16.size.443x1
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4_unorm_pack8.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r4g4b4a4_unorm_pack16.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g6b5_unorm_pack16.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r5g5b5a1_unorm_pack16.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8_srgb.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8_srgb.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8_srgb.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r8g8b8a8_srgb.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_unorm_pack32.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uint_pack32.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_unorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_snorm.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sscaled.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r16g16b16a16_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_uint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sint.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.r32g32b32a32_sfloat.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b10g11r11_ufloat_pack32.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b4g4r4a4_unorm_pack16.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.2x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.2x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.32x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.32x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.128x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.128x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.512x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.512x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.3x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.3x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.13x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.13x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.127x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.127x1_array_of_6
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.443x1_array_of_3
+dEQP-VK.pipeline.image.view_type.1d_array.format.b5g5r5a1_unorm_pack16.size.443x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4_unorm_pack8.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r4g4b4a4_unorm_pack16.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r5g6b5_unorm_pack16.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r5g5b5a1_unorm_pack16.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8_srgb.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8_srgb.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8_srgb.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r8g8b8a8_srgb.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_unorm_pack32.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uint_pack32.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.a2r10g10b10_uscaled_pack32.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_unorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_snorm.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sscaled.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r16g16b16a16_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_uint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sint.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.r32g32b32a32_sfloat.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.b10g11r11_ufloat_pack32.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.e5b9g9r9_ufloat_pack32.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.b4g4r4a4_unorm_pack16.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.b5g5r5a1_unorm_pack16.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a1_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.etc2_r8g8b8a8_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11_snorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.eac_r11g11_snorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_4x4_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x4_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_5x5_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x5_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_6x6_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x5_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x6_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_8x8_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x5_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x6_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x8_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_10x10_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x10_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_unorm_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.1x1
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.2x2
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.3x3
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.8x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.32x16
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.13x23
+dEQP-VK.pipeline.image.view_type.2d.format.astc_12x12_srgb_block.size.23x8
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4_unorm_pack8.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r4g4b4a4_unorm_pack16.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g6b5_unorm_pack16.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r5g5b5a1_unorm_pack16.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8_srgb.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8_srgb.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8_srgb.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r8g8b8a8_srgb.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_unorm_pack32.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uint_pack32.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_unorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_snorm.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sscaled.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r16g16b16a16_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_uint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sint.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.r32g32b32a32_sfloat.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b10g11r11_ufloat_pack32.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b4g4r4a4_unorm_pack16.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.b5g5r5a1_unorm_pack16.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11_snorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.eac_r11g11_snorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_4x4_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x4_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_5x5_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x5_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_6x6_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x5_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x6_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_8x8_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x5_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x6_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x8_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_10x10_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x10_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_unorm_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.1x1_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.1x1_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.2x2_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.2x2_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.32x32_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.3x3_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.3x3_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.13x13_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.8x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.8x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.32x16_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.32x16_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.13x23_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.13x23_array_of_6
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.23x8_array_of_3
+dEQP-VK.pipeline.image.view_type.2d_array.format.astc_12x12_srgb_block.size.23x8_array_of_6
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4_unorm_pack8.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4_unorm_pack8.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4_unorm_pack8.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4_unorm_pack8.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4_unorm_pack8.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4b4a4_unorm_pack16.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4b4a4_unorm_pack16.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4b4a4_unorm_pack16.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4b4a4_unorm_pack16.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r4g4b4a4_unorm_pack16.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r5g6b5_unorm_pack16.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r5g6b5_unorm_pack16.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r5g6b5_unorm_pack16.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r5g6b5_unorm_pack16.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r5g6b5_unorm_pack16.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r5g5b5a1_unorm_pack16.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r5g5b5a1_unorm_pack16.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r5g5b5a1_unorm_pack16.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r5g5b5a1_unorm_pack16.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r5g5b5a1_unorm_pack16.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8_srgb.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8_srgb.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8_srgb.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8_srgb.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8_srgb.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_srgb.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_srgb.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_srgb.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_srgb.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8_srgb.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_srgb.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_srgb.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_srgb.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_srgb.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8_srgb.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_srgb.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_srgb.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_srgb.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_srgb.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r8g8b8a8_srgb.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_unorm_pack32.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_unorm_pack32.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_unorm_pack32.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_unorm_pack32.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_unorm_pack32.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uint_pack32.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uint_pack32.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uint_pack32.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uint_pack32.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uint_pack32.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uscaled_pack32.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uscaled_pack32.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uscaled_pack32.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uscaled_pack32.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.a2r10g10b10_uscaled_pack32.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_unorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_unorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_unorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_unorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_unorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_snorm.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_snorm.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_snorm.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_snorm.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_snorm.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sscaled.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sscaled.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sscaled.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sscaled.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sscaled.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r16g16b16a16_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_uint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_uint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_uint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_uint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_uint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sint.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sint.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sint.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sint.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sint.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sfloat.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sfloat.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sfloat.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sfloat.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.r32g32b32a32_sfloat.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.b10g11r11_ufloat_pack32.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.b10g11r11_ufloat_pack32.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.b10g11r11_ufloat_pack32.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.b10g11r11_ufloat_pack32.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.b10g11r11_ufloat_pack32.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.e5b9g9r9_ufloat_pack32.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.e5b9g9r9_ufloat_pack32.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.e5b9g9r9_ufloat_pack32.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.e5b9g9r9_ufloat_pack32.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.e5b9g9r9_ufloat_pack32.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.b4g4r4a4_unorm_pack16.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.b4g4r4a4_unorm_pack16.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.b4g4r4a4_unorm_pack16.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.b4g4r4a4_unorm_pack16.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.b4g4r4a4_unorm_pack16.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.b5g5r5a1_unorm_pack16.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.b5g5r5a1_unorm_pack16.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.b5g5r5a1_unorm_pack16.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.b5g5r5a1_unorm_pack16.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.b5g5r5a1_unorm_pack16.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a1_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.etc2_r8g8b8a8_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_snorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_snorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_snorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_snorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11_snorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_snorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_snorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_snorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_snorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.eac_r11g11_snorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_4x4_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x4_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_5x5_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x5_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_6x6_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x5_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x6_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_8x8_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x5_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x6_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x8_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_10x10_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x10_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_unorm_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_unorm_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_unorm_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_unorm_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_unorm_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_srgb_block.size.1x1x1
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_srgb_block.size.2x2x2
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_srgb_block.size.16x16x16
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_srgb_block.size.32x16x8
+dEQP-VK.pipeline.image.view_type.3d.format.astc_12x12_srgb_block.size.8x16x32
+dEQP-VK.pipeline.image.view_type.cube.format.r4g4_unorm_pack8.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r4g4_unorm_pack8.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r4g4b4a4_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r4g4b4a4_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r5g6b5_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r5g6b5_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r5g5b5a1_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r5g5b5a1_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_srgb.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r8g8b8a8_srgb.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_unorm_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_unorm_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_uint_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_uint_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_uscaled_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.a2r10g10b10_uscaled_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_unorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_unorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_snorm.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_snorm.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_uscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_uscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sscaled.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sscaled.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r16g16b16a16_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_uint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_uint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_sint.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_sint.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_sfloat.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.r32g32b32a32_sfloat.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.b10g11r11_ufloat_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.b10g11r11_ufloat_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.e5b9g9r9_ufloat_pack32.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.e5b9g9r9_ufloat_pack32.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.b4g4r4a4_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.b4g4r4a4_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.b5g5r5a1_unorm_pack16.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.b5g5r5a1_unorm_pack16.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a1_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a1_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a1_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a1_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.etc2_r8g8b8a8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11_snorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11_snorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11g11_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11g11_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11g11_snorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.eac_r11g11_snorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_4x4_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_4x4_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_4x4_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_4x4_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x4_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x4_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x4_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x4_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_5x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_6x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_8x8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x5_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x5_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x5_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x5_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x6_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x6_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x6_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x6_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x8_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x8_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x8_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x8_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x10_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x10_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x10_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_10x10_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x10_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x10_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x10_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x10_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x12_unorm_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x12_unorm_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x12_srgb_block.size.32x32
+dEQP-VK.pipeline.image.view_type.cube.format.astc_12x12_srgb_block.size.13x13
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4_unorm_pack8.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4_unorm_pack8.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4_unorm_pack8.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4_unorm_pack8.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4b4a4_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4b4a4_unorm_pack16.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4b4a4_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r4g4b4a4_unorm_pack16.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g6b5_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g6b5_unorm_pack16.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g6b5_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g6b5_unorm_pack16.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g5b5a1_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g5b5a1_unorm_pack16.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g5b5a1_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r5g5b5a1_unorm_pack16.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_srgb.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8_srgb.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_srgb.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8_srgb.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_srgb.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8_srgb.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_srgb.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_srgb.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_srgb.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r8g8b8a8_srgb.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_unorm_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_unorm_pack32.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_unorm_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_unorm_pack32.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uint_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uint_pack32.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uint_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uint_pack32.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_unorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_unorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_unorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_unorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_snorm.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_snorm.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_snorm.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_snorm.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sscaled.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sscaled.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sscaled.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sscaled.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r16g16b16a16_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_uint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_uint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_uint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_uint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sint.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sint.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sint.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sint.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sfloat.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sfloat.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sfloat.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.r32g32b32a32_sfloat.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b10g11r11_ufloat_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b10g11r11_ufloat_pack32.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b10g11r11_ufloat_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b10g11r11_ufloat_pack32.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b4g4r4a4_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b4g4r4a4_unorm_pack16.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b4g4r4a4_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b4g4r4a4_unorm_pack16.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b5g5r5a1_unorm_pack16.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b5g5r5a1_unorm_pack16.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.b5g5r5a1_unorm_pack16.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.b5g5r5a1_unorm_pack16.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_snorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_snorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_snorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11_snorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_snorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_snorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_snorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.eac_r11g11_snorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_4x4_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x4_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_5x5_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x5_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_6x6_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x5_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x6_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_8x8_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x5_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x6_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x8_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_10x10_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x10_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_unorm_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_unorm_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_unorm_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_unorm_block.size.13x13_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_srgb_block.size.32x32_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_srgb_block.size.32x32_array_of_36
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_srgb_block.size.13x13_array_of_6
+dEQP-VK.pipeline.image.view_type.cube_array.format.astc_12x12_srgb_block.size.13x13_array_of_36
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.1d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.2d_array.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4_unorm_pack8.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r4g4b4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g6b5_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r5g5b5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2b10g10r10_unorm_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uint_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b10g11r11_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.e5b9g9r9_ufloat_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b4g4r4a4_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b5g5r5a1_unorm_pack16.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8a8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_srgb.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8_snorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.b8g8r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8_unorm.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16a16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32g32b32a32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r32_uint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16b16_sfloat.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r16g16_sint.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.r8g8b8a8_sscaled.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.a2r10g10b10_uscaled_pack32.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a1_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.etc2_r8g8b8a8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.eac_r11g11_snorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_4x4_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_5x4_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x5_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_6x6_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_8x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x6_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_10x8_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x10_unorm_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_transparent_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_black
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.all_mode_clamp_to_border_opaque_white
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirror_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_mirrored_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_repeat_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_border_mode_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_border_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_mirrored_repeat_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirror_clamp_to_edge_mode_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_border_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirror_clamp_to_edge_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_clamp_to_edge_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_border_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_mirrored_repeat_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_repeat_mode_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_mirrored_repeat_mode_mirror_clamp_to_edge_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_edge_mode_clamp_to_border
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_clamp_to_border_mode_repeat
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_repeat_mode_mirror_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirrored_repeat_mode_clamp_to_edge
+dEQP-VK.pipeline.sampler.view_type.3d.format.astc_12x12_srgb_block.address_modes.uvw_mode_clamp_to_edge_mode_mirror_clamp_to_edge_mode_mirrored_repeat
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4_unorm_pack8.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r4g4b4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g6b5_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r5g5b5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2b10g10r10_unorm_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uint_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b10g11r11_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b4g4r4a4_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b5g5r5a1_unorm_pack16.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8a8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_srgb.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8_snorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.b8g8r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8_unorm.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16a16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32g32b32a32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r32_uint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.min_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16b16_sfloat.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r16g16_sint.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.r8g8b8a8_sscaled.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.min_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.a2r10g10b10_uscaled_pack32.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.eac_r11g11_snorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_4x4_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_5x4_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x5_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_6x6_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_8x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x6_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_10x8_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x10_unorm_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mag_filter.linear
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mag_filter.nearest
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.nearest.lod.select_bias_3_7
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.equal_min_3_max_3
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_min_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_max_4
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_2_5
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_1
+dEQP-VK.pipeline.sampler.view_type.cube_array.format.astc_12x12_srgb_block.mipmap.linear.lod.select_bias_3_7
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.1d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.2d_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.3d.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_layer_second
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_layer_last
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4_unorm_pack8.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r4g4b4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g6b5_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r5g5b5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r8g8b8a8_srgb.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_unorm_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2r10g10b10_uint_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.a2b10g10r10_uscaled_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_unorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_snorm.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sscaled.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r16g16b16a16_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_uint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sint.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.r32g32b32a32_sfloat.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b10g11r11_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.e5b9g9r9_ufloat_pack32.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b4g4r4a4_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.component_swizzle.b_g_r_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.component_swizzle.g_r_a_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.component_swizzle.r_a_b_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.component_swizzle.a_b_g_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.b5g5r5a1_unorm_pack16.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.component_swizzle.r_g_b_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.component_swizzle.g_b_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.component_swizzle.b_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.component_swizzle.one_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a1_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.etc2_r8g8b8a8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.component_swizzle.r_zero_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.component_swizzle.zero_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.component_swizzle.zero_one_r_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.component_swizzle.one_r_zero_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11_snorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.component_swizzle.r_g_zero_one
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.component_swizzle.g_zero_one_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.component_swizzle.zero_one_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.component_swizzle.one_r_g_zero
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.eac_r11g11_snorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_4x4_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x4_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_5x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_6x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_8x8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x5_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x6_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x8_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_10x10_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x10_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_unorm_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.component_swizzle.r_g_b_a
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.component_swizzle.g_b_a_r
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.component_swizzle.b_a_r_g
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.component_swizzle.a_r_g_b
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_base_array_layer
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_base_mip_level_array_base_and_size
+dEQP-VK.pipeline.image_view.view_type.cube_array.format.astc_12x12_srgb_block.subresource_range.lod_mip_levels_array_base_and_size
+dEQP-VK.pipeline.push_constant.graphics_pipeline.range_size_4
+dEQP-VK.pipeline.push_constant.graphics_pipeline.range_size_16
+dEQP-VK.pipeline.push_constant.graphics_pipeline.range_size_128
+dEQP-VK.pipeline.push_constant.graphics_pipeline.count_2_shader_VF
+dEQP-VK.pipeline.push_constant.graphics_pipeline.count_3shader_VGF
+dEQP-VK.pipeline.push_constant.graphics_pipeline.count_5_shader_VTGF
+dEQP-VK.pipeline.push_constant.graphics_pipeline.count_1_shader_VF
+dEQP-VK.pipeline.push_constant.graphics_pipeline.data_update_partial_1
+dEQP-VK.pipeline.push_constant.graphics_pipeline.data_update_partial_2
+dEQP-VK.pipeline.push_constant.graphics_pipeline.data_update_multiple
+dEQP-VK.pipeline.push_constant.compute_pipeline.simple_test
+dEQP-VK.pipeline.multisample.raster_samples.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.raster_samples.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.raster_samples.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.raster_samples_consistency.unique_colors_check
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_0.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_25.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_5.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_0_75.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.min_sample_shading.min_1_0.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_on.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_all_off.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_one.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_2.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_2.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_2.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_4.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_4.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_4.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_8.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_8.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_8.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_16.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_16.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_16.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_32.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_32.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_32.primitive_point
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_64.primitive_triangle
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_64.primitive_line
+dEQP-VK.pipeline.multisample.sample_mask.mask_random.samples_64.primitive_point
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_2
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_4
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_8
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_16
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_32
+dEQP-VK.pipeline.multisample.alpha_to_one.samples_64
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_2.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_2.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_2.alpha_invisible
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_4.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_4.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_4.alpha_invisible
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_8.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_8.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_8.alpha_invisible
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_16.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_16.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_16.alpha_invisible
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_32.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_32.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_32.alpha_invisible
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_64.alpha_opaque
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_64.alpha_translucent
+dEQP-VK.pipeline.multisample.alpha_to_coverage.samples_64.alpha_invisible
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r8_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r8_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r8_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r8g8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r8g8_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r8g8_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r8g8_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r8g8b8a8_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r8g8b8a8_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r8g8b8a8_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r16_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r16_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r16_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r16g16_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r16g16_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r16g16_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_unorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_snorm_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r16g16b16a16_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r16g16b16a16_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r16g16b16a16_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uint_as_r32_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.int_as_r32_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec2_as_r32g32_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r32g32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec2_as_r32g32_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r32g32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r32g32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec3_as_r32g32b32_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec3_as_r32g32b32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec3_as_r32g32b32_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r32g32b32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r32g32b32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.uvec4_as_r32g32b32a32_uint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r32g32b32a32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.ivec4_as_r32g32b32a32_sint_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r32g32b32a32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r32g32b32a32_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r16g16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r16g16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r16g16b16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r16g16b16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r16g16b16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r16g16b16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r16g16b16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r16g16b16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r16g16b16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r16g16b16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_uscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_uscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r16g16b16a16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_sscaled_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r16g16b16a16_sscaled_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.float_as_r8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec2_as_r8g8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat2_as_r8g8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_r8g8b8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_r8g8b8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec3_as_b8g8r8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat3_as_b8g8r8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_r8g8b8a8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_r8g8b8a8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.vec4_as_b8g8r8a8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_srgb_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.mat4_as_b8g8r8a8_srgb_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.double_as_r64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.double_as_r64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec2_as_r64g64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec2_as_r64g64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat2_as_r64g64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat2_as_r64g64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec3_as_r64g64b64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec3_as_r64g64b64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat3_as_r64g64b64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat3_as_r64g64b64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec4_as_r64g64b64a64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dvec4_as_r64g64b64a64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat4_as_r64g64b64a64_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.single_attribute.dmat4_as_r64g64b64a64_sfloat_rate_instance
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-ivec3_as_r32g32b32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-ivec4_as_r32g32b32a32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r32g32_sint_rate_instance-uint_as_r8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r32g32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-ivec4_as_r8g8b8a8_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uint_as_r8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec2_as_r8g8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uint_as_r16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r8g8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uint_as_r32_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r8_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r16_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.int_as_r32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-ivec4_as_r32g32b32a32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uint_as_r32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-float_as_r16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uint_as_r32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r16_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r16_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-float_as_r8_snorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r8_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r16g16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r8g8_sint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec2_as_r32g32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uint_as_r16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-float_as_r16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r8_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r32_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uint_as_r16_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-float_as_r8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-mat2_as_r16g16_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-mat2_as_r16g16_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r8_uint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r32_uint_rate_vertex-mat2_as_r8g8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uint_as_r16_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-float_as_r16_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-vec4_as_r16g16b16a16_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r8g8_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r16g16_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec2_as_r32g32_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat2_as_r8g8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r16_unorm_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_unorm_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_unorm_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_unorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r32_sfloat_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r8_snorm_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_snorm_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_snorm_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r32_sfloat_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.float_as_r16_sfloat_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r16g16_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r8g8_unorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r16g16_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r8g8_unorm_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r32g32_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r16g16_unorm_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec2_as_r8g8_unorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_r16g16b16a16_sfloat_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec4_as_r8g8b8a8_snorm_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec4_as_r16g16b16a16_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.vec4_as_r16g16b16a16_snorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_one.attributes.mat2_as_r16g16_unorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-ivec3_as_r32g32b32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-ivec4_as_r32g32b32a32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r32g32_sint_rate_instance-uint_as_r8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r32g32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec2_as_r8g8_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec2_as_r16g16_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-ivec4_as_r8g8b8a8_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uint_as_r8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec2_as_r8g8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uint_as_r16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r8g8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uint_as_r32_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r8_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r16_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.int_as_r32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-ivec4_as_r32g32b32a32_sint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uint_as_r32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-float_as_r16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec3_as_r32g32b32_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-uint_as_r32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r16_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uint_as_r16_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-float_as_r32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-float_as_r8_snorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-float_as_r8_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r16g16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r8g8_sint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec2_as_r32g32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uint_as_r16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r32g32b32a32_sint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r16g16b16a16_sint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-ivec4_as_r8g8b8a8_sint_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r16g16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-float_as_r16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec3_as_r32g32b32_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-uvec2_as_r32g32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r16_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r8_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r32_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uint_as_r16_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r8_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uint_as_r32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uint_as_r16_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-float_as_r8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r32g32b32a32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-float_as_r16_snorm_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-float_as_r8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-float_as_r16_unorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r32g32b32a32_sint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-mat2_as_r16g16_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r16g16b16a16_sint_rate_vertex-mat2_as_r16g16_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.ivec4_as_r8g8b8a8_sint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-uvec3_as_r32g32b32_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r32g32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r8g8_uint_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec2_as_r16g16_uint_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r16g16b16a16_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-vec2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-float_as_r8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r8_uint_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r32_uint_rate_vertex-mat2_as_r8g8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uint_as_r16_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-uvec4_as_r8g8b8a8_uint_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-vec4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec3_as_r32g32b32_uint_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-float_as_r16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-vec2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-float_as_r16_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-float_as_r8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-float_as_r16_unorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-vec4_as_r16g16b16a16_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r8g8_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r16g16_uint_rate_vertex-mat2_as_r32g32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec2_as_r32g32_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-float_as_r32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r16g16b16a16_uint_rate_instance-vec2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r8g8b8a8_uint_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-uvec4_as_r32g32b32a32_uint_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-float_as_r16_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r8g8_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r8g8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-vec4_as_r16g16b16a16_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat2_as_r8g8_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec3_as_r32g32b32_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r16_unorm_rate_instance-vec2_as_r16g16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r16_snorm_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-float_as_r8_unorm_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-float_as_r32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-float_as_r16_snorm_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r8g8_unorm_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec2_as_r16g16_unorm_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r8g8b8a8_uint_rate_vertex-vec4_as_r8g8b8a8_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r32g32b32a32_uint_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.uvec4_as_r16g16b16a16_uint_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-vec3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r16g16_sfloat_rate_instance-vec4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_unorm_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat2_as_r32g32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_unorm_rate_vertex-vec2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_snorm_rate_vertex-vec2_as_r32g32_sfloat_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_unorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r32_sfloat_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat2_as_r8g8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r8_snorm_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_snorm_rate_vertex-vec4_as_r8g8b8a8_unorm_rate_instance-mat4_as_r8g8b8a8_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_snorm_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r32_sfloat_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.float_as_r16_sfloat_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r16g16_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-vec4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r8g8_unorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r16g16_snorm_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec3_as_r32g32b32_sfloat_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat2_as_r16g16_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r32g32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r8g8_unorm_rate_vertex-vec4_as_r16g16b16a16_unorm_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r32g32_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r16g16_unorm_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_r32g32b32a32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec2_as_r8g8_unorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_r16g16b16a16_sfloat_rate_instance-mat2_as_r16g16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_r32g32b32a32_sfloat_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-vec4_as_b8g8r8a8_unorm_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat2_as_r16g16_snorm_rate_instance-mat4_as_b8g8r8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec3_as_r32g32b32_sfloat_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec4_as_r8g8b8a8_snorm_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat3_as_r32g32b32_sfloat_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec4_as_r16g16b16a16_sfloat_rate_vertex-mat2_as_r16g16_unorm_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.vec4_as_r16g16b16a16_snorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r8g8b8a8_unorm_rate_vertex
+dEQP-VK.pipeline.vertex_input.multiple_attributes.binding_one_to_many.attributes.mat2_as_r16g16_unorm_rate_vertex-mat3_as_r32g32b32_sfloat_rate_instance-mat4_as_r16g16b16a16_snorm_rate_vertex
+dEQP-VK.pipeline.input_assembly.primitive_topology.point_list
+dEQP-VK.pipeline.input_assembly.primitive_topology.line_list
+dEQP-VK.pipeline.input_assembly.primitive_topology.line_strip
+dEQP-VK.pipeline.input_assembly.primitive_topology.triangle_list
+dEQP-VK.pipeline.input_assembly.primitive_topology.triangle_strip
+dEQP-VK.pipeline.input_assembly.primitive_topology.triangle_fan
+dEQP-VK.pipeline.input_assembly.primitive_topology.line_list_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_topology.line_strip_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_topology.triangle_list_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_topology.triangle_strip_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint16.line_strip
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint16.triangle_strip
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint16.triangle_fan
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint16.line_strip_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint16.triangle_strip_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.line_strip
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.triangle_strip
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.triangle_fan
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.line_strip_with_adjacency
+dEQP-VK.pipeline.input_assembly.primitive_restart.index_type_uint32.triangle_strip_with_adjacency
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_input_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_input_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_shader_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.fragment_shader_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.fragment_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.early_fragment_tests_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.early_fragment_tests_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.late_fragment_tests_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.late_fragment_tests_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.color_attachment_output_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.color_attachment_output_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.all_graphics_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.all_graphics_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.all_commands_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.all_commands_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_shader_stage_fragment_shader_stage_late_fragment_tests_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_shader_stage_fragment_shader_stage_late_fragment_tests_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_input_stage_early_fragment_tests_stage_color_attachment_output_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.basic_graphics_tests.vertex_input_stage_early_fragment_tests_stage_color_attachment_output_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.draw_indirect_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.draw_indirect_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.tessellation_control_shader_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.tessellation_control_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.tessellation_evaluation_shader_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.tessellation_evaluation_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.geometry_shader_stage_in_render_pass
+dEQP-VK.pipeline.timestamp.advanced_graphics_tests.geometry_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_compute_tests.compute_shader_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.basic_compute_tests.all_commands_stage_out_of_render_pass
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_copy_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_copy_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_blit_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_copy_buffer_to_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_copy_image_to_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_update_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_fill_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_clear_color_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_clear_depth_stencil_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_resolve_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.transfer_stage_with_copy_query_pool_results_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_copy_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_copy_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_blit_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_copy_buffer_to_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_copy_image_to_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_update_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_fill_buffer_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_clear_color_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_clear_depth_stencil_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_resolve_image_method
+dEQP-VK.pipeline.timestamp.transfer_tests.host_stage_with_copy_query_pool_results_method
+dEQP-VK.pipeline.timestamp.misc_tests.timestamp_only
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.compute.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.compute.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.compute.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.compute.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.compute.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.no_access.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_ctrl.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.tess_eval.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.geometry.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.compute.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_texel_buffer.vertex_fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.no_access.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_ctrl.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.tess_eval.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.geometry.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.compute.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_texel_buffer.vertex_fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.no_access.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_ctrl.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.tess_eval.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.geometry.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.compute.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer.vertex_fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.no_access.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_ctrl.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.tess_eval.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.geometry.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.compute.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer.vertex_fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.compute.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.compute.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.primary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_mutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.combined_image_sampler_immutable.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.no_access.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_ctrl.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.tess_eval.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.geometry.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.single_descriptor.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.multiple_descriptors.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.1d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.2d_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.3d
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.3d_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array_base_mip
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_image.vertex_fragment.descriptor_array.cube_array_base_slice
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.no_access.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_ctrl.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.tess_eval.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.geometry.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_texel_buffer.vertex_fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.no_access.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_ctrl.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.tess_eval.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.geometry.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.single_descriptor.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.single_descriptor.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.multiple_descriptors.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.multiple_descriptors.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.descriptor_array.offset_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_texel_buffer.vertex_fragment.descriptor_array.offset_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.no_access.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_ctrl.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.tess_eval.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.geometry.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer.vertex_fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.no_access.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_ctrl.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.tess_eval.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.geometry.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.single_descriptor.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.single_descriptor.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.multiple_descriptors.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.multiple_descriptors.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.descriptor_array.offset_view_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer.vertex_fragment.descriptor_array.offset_view_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.uniform_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.no_access.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_ctrl.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.tess_eval.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.geometry.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.single_descriptor.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.multiple_descriptors.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_zero_dynamic_nonzero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_zero
+dEQP-VK.binding_model.shader_access.secondary_cmd_buf.storage_buffer_dynamic.vertex_fragment.descriptor_array.offset_view_nonzero_dynamic_nonzero
+dEQP-VK.spirv_assembly.instruction.compute.opnop.all
+dEQP-VK.spirv_assembly.instruction.compute.opline.all
+dEQP-VK.spirv_assembly.instruction.compute.opnoline.all
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.bool
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.sint32
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.uint32
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.float32
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.vec4float32
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.vec3bool
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.vec2uint32
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.matrix
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.array
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.struct
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnull.pointer
+dEQP-VK.spirv_assembly.instruction.compute.opconstantcomposite.vector
+dEQP-VK.spirv_assembly.instruction.compute.opconstantcomposite.matrix
+dEQP-VK.spirv_assembly.instruction.compute.opconstantcomposite.struct
+dEQP-VK.spirv_assembly.instruction.compute.opconstantcomposite.nested_struct
+dEQP-VK.spirv_assembly.instruction.compute.opconstantnullcomposite.spotcheck
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.iadd
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.isub
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.imul
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.sdiv
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.udiv
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.srem
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.smod
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.umod
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.bitwiseand
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.bitwiseor
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.bitwisexor
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.shiftrightlogical
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.shiftrightarithmetic
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.shiftleftlogical
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.slessthan
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.ulessthan
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.sgreaterthan
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.ugreaterthan
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.slessthanequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.ulessthanequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.sgreaterthanequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.ugreaterthanequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.iequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.logicaland
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.logicalor
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.logicalequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.logicalnotequal
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.snegate
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.not
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.logicalnot
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.select
+dEQP-VK.spirv_assembly.instruction.compute.opspecconstantop.vector_related
+dEQP-VK.spirv_assembly.instruction.compute.opsource.unknown_source
+dEQP-VK.spirv_assembly.instruction.compute.opsource.wrong_source
+dEQP-VK.spirv_assembly.instruction.compute.opsource.normal_filename
+dEQP-VK.spirv_assembly.instruction.compute.opsource.empty_filename
+dEQP-VK.spirv_assembly.instruction.compute.opsource.normal_source_code
+dEQP-VK.spirv_assembly.instruction.compute.opsource.empty_source_code
+dEQP-VK.spirv_assembly.instruction.compute.opsource.long_source_code
+dEQP-VK.spirv_assembly.instruction.compute.opsource.utf8_source_code
+dEQP-VK.spirv_assembly.instruction.compute.opsource.normal_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsource.empty_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsource.long_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsource.utf8_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsource.multi_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsource.empty_source_before_sourcecontinued
+dEQP-VK.spirv_assembly.instruction.compute.opsourceextension.empty_extension
+dEQP-VK.spirv_assembly.instruction.compute.opsourceextension.real_extension
+dEQP-VK.spirv_assembly.instruction.compute.opsourceextension.fake_extension
+dEQP-VK.spirv_assembly.instruction.compute.opsourceextension.utf8_extension
+dEQP-VK.spirv_assembly.instruction.compute.opsourceextension.long_extension
+dEQP-VK.spirv_assembly.instruction.compute.decoration_group.all
+dEQP-VK.spirv_assembly.instruction.compute.opphi.block
+dEQP-VK.spirv_assembly.instruction.compute.opphi.induction
+dEQP-VK.spirv_assembly.instruction.compute.opphi.swap
+dEQP-VK.spirv_assembly.instruction.compute.loop_control.none
+dEQP-VK.spirv_assembly.instruction.compute.loop_control.unroll
+dEQP-VK.spirv_assembly.instruction.compute.loop_control.dont_unroll
+dEQP-VK.spirv_assembly.instruction.compute.loop_control.unroll_dont_unroll
+dEQP-VK.spirv_assembly.instruction.compute.function_control.none
+dEQP-VK.spirv_assembly.instruction.compute.function_control.inline
+dEQP-VK.spirv_assembly.instruction.compute.function_control.dont_inline
+dEQP-VK.spirv_assembly.instruction.compute.function_control.pure
+dEQP-VK.spirv_assembly.instruction.compute.function_control.const
+dEQP-VK.spirv_assembly.instruction.compute.function_control.inline_pure
+dEQP-VK.spirv_assembly.instruction.compute.function_control.const_dont_inline
+dEQP-VK.spirv_assembly.instruction.compute.function_control.inline_dont_inline
+dEQP-VK.spirv_assembly.instruction.compute.function_control.pure_inline_dont_inline
+dEQP-VK.spirv_assembly.instruction.compute.selection_control.none
+dEQP-VK.spirv_assembly.instruction.compute.selection_control.flatten
+dEQP-VK.spirv_assembly.instruction.compute.selection_control.dont_flatten
+dEQP-VK.spirv_assembly.instruction.compute.selection_control.flatten_dont_flatten
+dEQP-VK.spirv_assembly.instruction.compute.block_order.all
+dEQP-VK.spirv_assembly.instruction.compute.multiple_shaders.shader1
+dEQP-VK.spirv_assembly.instruction.compute.multiple_shaders.shader2
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.null
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.none
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.volatile
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.aligned
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.nontemporal
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.aligned_nontemporal
+dEQP-VK.spirv_assembly.instruction.compute.memory_access.aligned_volatile
+dEQP-VK.spirv_assembly.instruction.compute.opcopymemory.vector
+dEQP-VK.spirv_assembly.instruction.compute.opcopymemory.array
+dEQP-VK.spirv_assembly.instruction.compute.opcopymemory.struct
+dEQP-VK.spirv_assembly.instruction.compute.opcopymemory.float
+dEQP-VK.spirv_assembly.instruction.compute.opcopyobject.spotcheck
+dEQP-VK.spirv_assembly.instruction.compute.nocontraction.multiplication
+dEQP-VK.spirv_assembly.instruction.compute.nocontraction.addition
+dEQP-VK.spirv_assembly.instruction.compute.nocontraction.both
+dEQP-VK.spirv_assembly.instruction.compute.opundef.bool
+dEQP-VK.spirv_assembly.instruction.compute.opundef.sint32
+dEQP-VK.spirv_assembly.instruction.compute.opundef.uint32
+dEQP-VK.spirv_assembly.instruction.compute.opundef.float32
+dEQP-VK.spirv_assembly.instruction.compute.opundef.vec4float32
+dEQP-VK.spirv_assembly.instruction.compute.opundef.vec2uint32
+dEQP-VK.spirv_assembly.instruction.compute.opundef.matrix
+dEQP-VK.spirv_assembly.instruction.compute.opundef.image
+dEQP-VK.spirv_assembly.instruction.compute.opundef.sampler
+dEQP-VK.spirv_assembly.instruction.compute.opundef.sampledimage
+dEQP-VK.spirv_assembly.instruction.compute.opundef.array
+dEQP-VK.spirv_assembly.instruction.compute.opundef.runtimearray
+dEQP-VK.spirv_assembly.instruction.compute.opundef.struct
+dEQP-VK.spirv_assembly.instruction.compute.opundef.pointer
+dEQP-VK.spirv_assembly.instruction.compute.opunreachable.all
+dEQP-VK.spirv_assembly.instruction.compute.opquantize.infinities
+dEQP-VK.spirv_assembly.instruction.compute.opquantize.propagated_nans
+dEQP-VK.spirv_assembly.instruction.compute.opquantize.flush_to_zero
+dEQP-VK.spirv_assembly.instruction.compute.opquantize.exact
+dEQP-VK.spirv_assembly.instruction.compute.opquantize.rounded
+dEQP-VK.spirv_assembly.instruction.compute.opfrem.all
+dEQP-VK.spirv_assembly.instruction.graphics.opnop.opnop_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opnop.opnop_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opnop.opnop_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opnop.opnop_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opnop.opnop_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.unknown_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.unknown_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.unknown_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.unknown_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.unknown_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.essl_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.essl_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.essl_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.essl_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.essl_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.glsl_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.glsl_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.glsl_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.glsl_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.glsl_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_cpp_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_cpp_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_cpp_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_cpp_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_cpp_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_c_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_c_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_c_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_c_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.opencl_c_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.multiple_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.multiple_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.multiple_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.multiple_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.multiple_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.file_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.file_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.file_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.file_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.file_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.source_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.source_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.source_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.source_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.source_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.longsource_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.longsource_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.longsource_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.longsource_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsource.longsource_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.empty_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.empty_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.empty_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.empty_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.empty_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.short_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.short_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.short_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.short_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.short_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.multiple_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.multiple_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.multiple_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.multiple_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.multiple_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.long_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.long_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.long_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.long_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opsourcecontinued.long_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_empty_name_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_empty_name_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_empty_name_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_empty_name_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_empty_name_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_short_name_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_short_name_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_short_name_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_short_name_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_short_name_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_long_name_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_long_name_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_long_name_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_long_name_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opline.opline_long_name_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opnoline.opnoline_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opnoline.opnoline_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opnoline.opnoline_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opnoline.opnoline_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opnoline.opnoline_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.vec4_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.vec4_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.vec4_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.vec4_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.vec4_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.float_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.float_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.float_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.float_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.float_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.bool_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.bool_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.bool_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.bool_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.bool_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.i32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.i32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.i32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.i32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.i32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.struct_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.struct_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.struct_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.struct_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.struct_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.array_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.array_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.array_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.array_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.array_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.matrix_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.matrix_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.matrix_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.matrix_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantnull.matrix_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.vec4_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.vec4_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.vec4_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.vec4_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.vec4_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.struct_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.struct_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.struct_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.struct_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.struct_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.matrix_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.matrix_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.matrix_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.matrix_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.matrix_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_of_struct_of_array_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_of_struct_of_array_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_of_struct_of_array_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_of_struct_of_array_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opconstantcomposite.array_of_struct_of_array_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.none_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.none_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.none_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.none_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.none_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.aligned_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.aligned_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.aligned_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.aligned_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.aligned_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_aligned_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_aligned_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_aligned_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_aligned_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_aligned_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.nontemporal_aligned_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.nontemporal_aligned_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.nontemporal_aligned_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.nontemporal_aligned_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.nontemporal_aligned_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontemporal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontemporal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontemporal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontemporal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontemporal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontermporal_aligned_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontermporal_aligned_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontermporal_aligned_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontermporal_aligned_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opmemoryaccess.volatile_nontermporal_aligned_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.bool_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.bool_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.bool_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.bool_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.bool_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec2uint32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec2uint32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec2uint32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec2uint32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec2uint32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.image_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.image_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.image_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.image_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.image_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampler_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampler_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampler_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampler_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampler_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampledimage_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampledimage_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampledimage_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampledimage_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sampledimage_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.pointer_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.pointer_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.pointer_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.pointer_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.pointer_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.runtimearray_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.runtimearray_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.runtimearray_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.runtimearray_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.runtimearray_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.array_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.array_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.array_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.array_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.array_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.struct_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.struct_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.struct_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.struct_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.struct_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.float32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.float32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.float32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.float32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.float32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sint32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sint32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sint32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sint32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.sint32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.uint32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.uint32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.uint32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.uint32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.uint32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec4float32_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec4float32_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec4float32_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec4float32_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.vec4float32_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.matrix_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.matrix_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.matrix_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.matrix_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opundef.matrix_frag
+dEQP-VK.spirv_assembly.instruction.graphics.selection_block_order.out_of_order_vert
+dEQP-VK.spirv_assembly.instruction.graphics.selection_block_order.out_of_order_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.selection_block_order.out_of_order_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.selection_block_order.out_of_order_geom
+dEQP-VK.spirv_assembly.instruction.graphics.selection_block_order.out_of_order_frag
+dEQP-VK.spirv_assembly.instruction.graphics.module.same_module
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc1_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc1_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc1_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc1_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc2_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc2_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc2_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom1_tessc2_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc1_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc1_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc1_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc1_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc2_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc2_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc2_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert1_geom2_tessc2_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc1_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc1_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc1_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc1_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc2_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc2_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc2_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom1_tessc2_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc1_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc1_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc1_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc1_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc2_tesse1_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc2_tesse1_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc2_tesse2_frag1
+dEQP-VK.spirv_assembly.instruction.graphics.module.vert2_geom2_tessc2_tesse2_frag2
+dEQP-VK.spirv_assembly.instruction.graphics.switch_block_order.out_of_order_vert
+dEQP-VK.spirv_assembly.instruction.graphics.switch_block_order.out_of_order_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.switch_block_order.out_of_order_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.switch_block_order.out_of_order_geom
+dEQP-VK.spirv_assembly.instruction.graphics.switch_block_order.out_of_order_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.out_of_order_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.out_of_order_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.out_of_order_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.out_of_order_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.out_of_order_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.induction_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.induction_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.induction_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.induction_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.induction_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.swap_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.swap_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.swap_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.swap_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opphi.swap_frag
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.multiplication_vert
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.multiplication_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.multiplication_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.multiplication_geom
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.multiplication_frag
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.addition_vert
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.addition_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.addition_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.addition_geom
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.addition_frag
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.both_vert
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.both_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.both_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.both_geom
+dEQP-VK.spirv_assembly.instruction.graphics.nocontraction.both_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.denorm_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.denorm_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.denorm_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.denorm_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.denorm_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_denorm_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_denorm_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_denorm_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_denorm_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_denorm_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.too_small_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.too_small_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.too_small_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.too_small_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.too_small_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_too_small_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_too_small_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_too_small_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_too_small_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_too_small_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_negative_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_negative_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_negative_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_negative_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_negative_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.round_to_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.nan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.nan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.nan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.nan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.nan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_nan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_nan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_nan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_nan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_nan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_denorm_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_denorm_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_denorm_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_denorm_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_denorm_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_denorm_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_denorm_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_denorm_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_denorm_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_denorm_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_too_small_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_too_small_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_too_small_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_too_small_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_too_small_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_too_small_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_too_small_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_too_small_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_too_small_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_too_small_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_negative_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_negative_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_negative_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_negative_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_negative_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_inf_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_inf_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_inf_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_inf_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_round_to_inf_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_nan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_nan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_nan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_nan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_nan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_nan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_nan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_nan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_nan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_nan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_round_up_or_round_down_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_round_up_or_round_down_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_round_up_or_round_down_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_round_up_or_round_down_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.positive_round_up_or_round_down_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_round_up_or_round_down_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_round_up_or_round_down_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_round_up_or_round_down_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_round_up_or_round_down_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.negative_round_up_or_round_down_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_bit_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_bit_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_bit_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_bit_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_bit_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_to_exponent_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_to_exponent_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_to_exponent_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_to_exponent_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.carry_to_exponent_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_round_up_or_round_down_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_round_up_or_round_down_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_round_up_or_round_down_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_round_up_or_round_down_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_positive_round_up_or_round_down_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_round_up_or_round_down_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_round_up_or_round_down_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_round_up_or_round_down_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_round_up_or_round_down_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_negative_round_up_or_round_down_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_bit_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_bit_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_bit_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_bit_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_bit_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_to_exponent_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_to_exponent_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_to_exponent_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_to_exponent_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opquantize.spec_const_carry_to_exponent_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.single_block_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.single_block_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.single_block_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.single_block_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.single_block_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_continue_construct_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_continue_construct_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_continue_construct_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_continue_construct_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_continue_construct_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_loop_construct_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_loop_construct_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_loop_construct_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_loop_construct_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.multi_block_loop_construct_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.continue_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.continue_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.continue_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.continue_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.continue_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.break_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.break_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.break_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.break_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.break_frag
+dEQP-VK.spirv_assembly.instruction.graphics.loop.return_vert
+dEQP-VK.spirv_assembly.instruction.graphics.loop.return_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.loop.return_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.loop.return_geom
+dEQP-VK.spirv_assembly.instruction.graphics.loop.return_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iadd_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iadd_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iadd_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iadd_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iadd_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.isub_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.isub_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.isub_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.isub_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.isub_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.imul_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.imul_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.imul_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.imul_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.imul_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sdiv_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sdiv_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sdiv_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sdiv_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sdiv_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.udiv_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.udiv_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.udiv_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.udiv_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.udiv_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.srem_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.srem_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.srem_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.srem_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.srem_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.smod_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.smod_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.smod_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.smod_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.smod_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.umod_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.umod_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.umod_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.umod_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.umod_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseand_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseand_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseand_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseand_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseand_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseor_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseor_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseor_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseor_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwiseor_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwisexor_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwisexor_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwisexor_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwisexor_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.bitwisexor_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightlogical_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightlogical_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightlogical_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightlogical_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightlogical_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightarithmetic_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightarithmetic_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightarithmetic_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightarithmetic_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftrightarithmetic_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftleftlogical_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftleftlogical_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftleftlogical_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftleftlogical_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.shiftleftlogical_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthan_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthan_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthan_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthan_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthan_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthanequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthanequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthanequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthanequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.slessthanequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthanequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthanequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthanequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthanequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ulessthanequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthanequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthanequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthanequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthanequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.sgreaterthanequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthanequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthanequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthanequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthanequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.ugreaterthanequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.iequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicaland_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicaland_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicaland_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicaland_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicaland_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalor_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalor_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalor_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalor_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalor_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnotequal_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnotequal_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnotequal_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnotequal_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnotequal_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.snegate_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.snegate_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.snegate_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.snegate_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.snegate_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.not_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.not_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.not_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.not_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.not_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnot_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnot_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnot_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnot_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.logicalnot_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.select_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.select_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.select_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.select_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.select_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.vector_related_vert
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.vector_related_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.vector_related_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.vector_related_geom
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop.vector_related_frag
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop_opquantize.infinities
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop_opquantize.propagated_nans
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop_opquantize.flush_to_zero
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop_opquantize.exact
+dEQP-VK.spirv_assembly.instruction.graphics.opspecconstantop_opquantize.rounded
+dEQP-VK.spirv_assembly.instruction.graphics.barrier.in_function
+dEQP-VK.spirv_assembly.instruction.graphics.barrier.in_switch
+dEQP-VK.spirv_assembly.instruction.graphics.barrier.in_if
+dEQP-VK.spirv_assembly.instruction.graphics.barrier.after_divergent_if
+dEQP-VK.spirv_assembly.instruction.graphics.barrier.in_loop
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_on_multiple_types_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_on_multiple_types_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_on_multiple_types_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_on_multiple_types_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_on_multiple_types_frag
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.empty_decoration_group_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.empty_decoration_group_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.empty_decoration_group_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.empty_decoration_group_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.empty_decoration_group_frag
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.one_element_decoration_group_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.one_element_decoration_group_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.one_element_decoration_group_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.one_element_decoration_group_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.one_element_decoration_group_frag
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_elements_decoration_group_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_elements_decoration_group_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_elements_decoration_group_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_elements_decoration_group_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_elements_decoration_group_frag
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_decoration_groups_on_same_variable_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_decoration_groups_on_same_variable_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_decoration_groups_on_same_variable_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_decoration_groups_on_same_variable_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.multiple_decoration_groups_on_same_variable_frag
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_multiple_times_vert
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_multiple_times_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_multiple_times_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_multiple_times_geom
+dEQP-VK.spirv_assembly.instruction.graphics.decoration_group.same_decoration_group_multiple_times_frag
+dEQP-VK.spirv_assembly.instruction.graphics.frem.frem_vert
+dEQP-VK.spirv_assembly.instruction.graphics.frem.frem_tessc
+dEQP-VK.spirv_assembly.instruction.graphics.frem.frem_tesse
+dEQP-VK.spirv_assembly.instruction.graphics.frem.frem_geom
+dEQP-VK.spirv_assembly.instruction.graphics.frem.frem_frag
+dEQP-VK.glsl.arrays.constructor.float3_vertex
+dEQP-VK.glsl.arrays.constructor.float3_fragment
+dEQP-VK.glsl.arrays.constructor.float4_vertex
+dEQP-VK.glsl.arrays.constructor.float4_fragment
+dEQP-VK.glsl.arrays.constructor.int3_vertex
+dEQP-VK.glsl.arrays.constructor.int3_fragment
+dEQP-VK.glsl.arrays.constructor.int4_vertex
+dEQP-VK.glsl.arrays.constructor.int4_fragment
+dEQP-VK.glsl.arrays.constructor.bool3_vertex
+dEQP-VK.glsl.arrays.constructor.bool3_fragment
+dEQP-VK.glsl.arrays.constructor.bool4_vertex
+dEQP-VK.glsl.arrays.constructor.bool4_fragment
+dEQP-VK.glsl.arrays.constructor.struct3_vertex
+dEQP-VK.glsl.arrays.constructor.struct3_fragment
+dEQP-VK.glsl.arrays.constructor.struct4_vertex
+dEQP-VK.glsl.arrays.constructor.struct4_fragment
+dEQP-VK.glsl.arrays.constructor.float_vec3_vertex
+dEQP-VK.glsl.arrays.constructor.float_vec3_fragment
+dEQP-VK.glsl.arrays.constructor.int_vec3_vertex
+dEQP-VK.glsl.arrays.constructor.int_vec3_fragment
+dEQP-VK.glsl.arrays.constructor.bool_vec3_vertex
+dEQP-VK.glsl.arrays.constructor.bool_vec3_fragment
+dEQP-VK.glsl.arrays.constructor.float_mat3_vertex
+dEQP-VK.glsl.arrays.constructor.float_mat3_fragment
+dEQP-VK.glsl.arrays.constructor.int_mat3_vertex
+dEQP-VK.glsl.arrays.constructor.int_mat3_fragment
+dEQP-VK.glsl.arrays.constructor.bool_mat3_vertex
+dEQP-VK.glsl.arrays.constructor.bool_mat3_fragment
+dEQP-VK.glsl.arrays.return.float_vertex
+dEQP-VK.glsl.arrays.return.float_fragment
+dEQP-VK.glsl.arrays.return.int_vertex
+dEQP-VK.glsl.arrays.return.int_fragment
+dEQP-VK.glsl.arrays.return.bool_vertex
+dEQP-VK.glsl.arrays.return.bool_fragment
+dEQP-VK.glsl.arrays.return.float_vec3_vertex
+dEQP-VK.glsl.arrays.return.float_vec3_fragment
+dEQP-VK.glsl.arrays.return.struct_vertex
+dEQP-VK.glsl.arrays.return.struct_fragment
+dEQP-VK.glsl.arrays.return.int_vec3_vertex
+dEQP-VK.glsl.arrays.return.int_vec3_fragment
+dEQP-VK.glsl.arrays.return.bool_vec3_vertex
+dEQP-VK.glsl.arrays.return.bool_vec3_fragment
+dEQP-VK.glsl.arrays.return.float_mat3_vertex
+dEQP-VK.glsl.arrays.return.float_mat3_fragment
+dEQP-VK.glsl.arrays.return.int_mat3_vertex
+dEQP-VK.glsl.arrays.return.int_mat3_fragment
+dEQP-VK.glsl.arrays.return.bool_mat3_vertex
+dEQP-VK.glsl.arrays.return.bool_mat3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.float_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.float_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.int_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.int_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.struct_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.struct_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.float_vec3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.float_vec3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.int_vec3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.int_vec3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_vec3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_vec3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.float_mat3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.float_mat3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.int_mat3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.int_mat3_fragment
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_mat3_vertex
+dEQP-VK.glsl.arrays.unnamed_parameter.bool_mat3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_struct_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_struct_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_vec3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_vec3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_ivec3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_ivec3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_bvec3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_bvec3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_mat3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_float_mat3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_mat3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_int_mat3_fragment
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_mat3_vertex
+dEQP-VK.glsl.arrays.declaration.implicit_size_bool_mat3_fragment
+dEQP-VK.glsl.arrays.declaration.constant_expression_array_size_vertex
+dEQP-VK.glsl.arrays.declaration.constant_expression_array_size_fragment
+dEQP-VK.glsl.arrays.declaration.constant_expression_array_access_vertex
+dEQP-VK.glsl.arrays.declaration.constant_expression_array_access_fragment
+dEQP-VK.glsl.arrays.declaration.dynamic_expression_array_access_vertex
+dEQP-VK.glsl.arrays.declaration.dynamic_expression_array_access_fragment
+dEQP-VK.glsl.arrays.declaration.multiple_declarations_single_statement_explicit_vertex
+dEQP-VK.glsl.arrays.declaration.multiple_declarations_single_statement_explicit_fragment
+dEQP-VK.glsl.arrays.declaration.multiple_declarations_single_statement_implicit_vertex
+dEQP-VK.glsl.arrays.declaration.multiple_declarations_single_statement_implicit_fragment
+dEQP-VK.glsl.arrays.length.float_vertex
+dEQP-VK.glsl.arrays.length.float_fragment
+dEQP-VK.glsl.arrays.length.int_vertex
+dEQP-VK.glsl.arrays.length.int_fragment
+dEQP-VK.glsl.arrays.length.bool_vertex
+dEQP-VK.glsl.arrays.length.bool_fragment
+dEQP-VK.glsl.arrays.length.struct_vertex
+dEQP-VK.glsl.arrays.length.struct_fragment
+dEQP-VK.glsl.conditionals.if.single_statement_vertex
+dEQP-VK.glsl.conditionals.if.single_statement_fragment
+dEQP-VK.glsl.conditionals.if.compound_statement_vertex
+dEQP-VK.glsl.conditionals.if.compound_statement_fragment
+dEQP-VK.glsl.conditionals.if.sequence_statements_vertex
+dEQP-VK.glsl.conditionals.if.sequence_statements_fragment
+dEQP-VK.glsl.conditionals.if.sequence_condition_vertex
+dEQP-VK.glsl.conditionals.if.sequence_condition_fragment
+dEQP-VK.glsl.conditionals.if.complex_condition_vertex
+dEQP-VK.glsl.conditionals.if.complex_condition_fragment
+dEQP-VK.glsl.conditionals.if.if_else_vertex
+dEQP-VK.glsl.conditionals.if.if_else_fragment
+dEQP-VK.glsl.conditionals.if.if_elseif_vertex
+dEQP-VK.glsl.conditionals.if.if_elseif_fragment
+dEQP-VK.glsl.conditionals.if.if_elseif_else_vertex
+dEQP-VK.glsl.conditionals.if.if_elseif_else_fragment
+dEQP-VK.glsl.conditionals.if.mixed_if_elseif_else_vertex
+dEQP-VK.glsl.conditionals.if.mixed_if_elseif_else_fragment
+dEQP-VK.glsl.constant_expressions.trivial.float_vertex
+dEQP-VK.glsl.constant_expressions.trivial.float_fragment
+dEQP-VK.glsl.constant_expressions.trivial.int_vertex
+dEQP-VK.glsl.constant_expressions.trivial.int_fragment
+dEQP-VK.glsl.constant_expressions.trivial.bool_vertex
+dEQP-VK.glsl.constant_expressions.trivial.bool_fragment
+dEQP-VK.glsl.constant_expressions.trivial.cast_vertex
+dEQP-VK.glsl.constant_expressions.trivial.cast_fragment
+dEQP-VK.glsl.constant_expressions.operators.math_float_vertex
+dEQP-VK.glsl.constant_expressions.operators.math_float_fragment
+dEQP-VK.glsl.constant_expressions.operators.math_vec_vertex
+dEQP-VK.glsl.constant_expressions.operators.math_vec_fragment
+dEQP-VK.glsl.constant_expressions.operators.math_int_vertex
+dEQP-VK.glsl.constant_expressions.operators.math_int_fragment
+dEQP-VK.glsl.constant_expressions.operators.math_ivec_vertex
+dEQP-VK.glsl.constant_expressions.operators.math_ivec_fragment
+dEQP-VK.glsl.constant_expressions.operators.math_mat_vertex
+dEQP-VK.glsl.constant_expressions.operators.math_mat_fragment
+dEQP-VK.glsl.constant_expressions.operators.bitwise_vertex
+dEQP-VK.glsl.constant_expressions.operators.bitwise_fragment
+dEQP-VK.glsl.constant_expressions.operators.logical_vertex
+dEQP-VK.glsl.constant_expressions.operators.logical_fragment
+dEQP-VK.glsl.constant_expressions.operators.compare_vertex
+dEQP-VK.glsl.constant_expressions.operators.compare_fragment
+dEQP-VK.glsl.constant_expressions.operators.selection_vertex
+dEQP-VK.glsl.constant_expressions.operators.selection_fragment
+dEQP-VK.glsl.constant_expressions.complex_types.struct_vertex
+dEQP-VK.glsl.constant_expressions.complex_types.struct_fragment
+dEQP-VK.glsl.constant_expressions.complex_types.nested_struct_vertex
+dEQP-VK.glsl.constant_expressions.complex_types.nested_struct_fragment
+dEQP-VK.glsl.constant_expressions.complex_types.array_size_vertex
+dEQP-VK.glsl.constant_expressions.complex_types.array_size_fragment
+dEQP-VK.glsl.constant_expressions.complex_types.array_length_vertex
+dEQP-VK.glsl.constant_expressions.complex_types.array_length_fragment
+dEQP-VK.glsl.constant_expressions.complex_types.array_vertex
+dEQP-VK.glsl.constant_expressions.complex_types.array_fragment
+dEQP-VK.glsl.constant_expressions.other.switch_case_vertex
+dEQP-VK.glsl.constant_expressions.other.switch_case_fragment
+dEQP-VK.glsl.constant_expressions.other.nested_builtin_funcs_vertex
+dEQP-VK.glsl.constant_expressions.other.nested_builtin_funcs_fragment
+dEQP-VK.glsl.constant_expressions.other.complex_vertex
+dEQP-VK.glsl.constant_expressions.other.complex_fragment
+dEQP-VK.glsl.constants.float_input_vertex
+dEQP-VK.glsl.constants.float_input_fragment
+dEQP-VK.glsl.constants.float_uniform_vertex
+dEQP-VK.glsl.constants.float_uniform_fragment
+dEQP-VK.glsl.constants.float_0_vertex
+dEQP-VK.glsl.constants.float_0_fragment
+dEQP-VK.glsl.constants.float_1_vertex
+dEQP-VK.glsl.constants.float_1_fragment
+dEQP-VK.glsl.constants.float_2_vertex
+dEQP-VK.glsl.constants.float_2_fragment
+dEQP-VK.glsl.constants.float_3_vertex
+dEQP-VK.glsl.constants.float_3_fragment
+dEQP-VK.glsl.constants.float_4_vertex
+dEQP-VK.glsl.constants.float_4_fragment
+dEQP-VK.glsl.constants.float_5_vertex
+dEQP-VK.glsl.constants.float_5_fragment
+dEQP-VK.glsl.constants.float_6_vertex
+dEQP-VK.glsl.constants.float_6_fragment
+dEQP-VK.glsl.constants.float_7_vertex
+dEQP-VK.glsl.constants.float_7_fragment
+dEQP-VK.glsl.constants.float_8_vertex
+dEQP-VK.glsl.constants.float_8_fragment
+dEQP-VK.glsl.constants.float_f_suffix_0_vertex
+dEQP-VK.glsl.constants.float_f_suffix_0_fragment
+dEQP-VK.glsl.constants.float_f_suffix_1_vertex
+dEQP-VK.glsl.constants.float_f_suffix_1_fragment
+dEQP-VK.glsl.constants.int_0_vertex
+dEQP-VK.glsl.constants.int_0_fragment
+dEQP-VK.glsl.constants.int_1_vertex
+dEQP-VK.glsl.constants.int_1_fragment
+dEQP-VK.glsl.constants.int_2_vertex
+dEQP-VK.glsl.constants.int_2_fragment
+dEQP-VK.glsl.constants.int_3_vertex
+dEQP-VK.glsl.constants.int_3_fragment
+dEQP-VK.glsl.constants.int_4_vertex
+dEQP-VK.glsl.constants.int_4_fragment
+dEQP-VK.glsl.constants.bool_0_vertex
+dEQP-VK.glsl.constants.bool_0_fragment
+dEQP-VK.glsl.constants.bool_1_vertex
+dEQP-VK.glsl.constants.bool_1_fragment
+dEQP-VK.glsl.constants.const_float_global_vertex
+dEQP-VK.glsl.constants.const_float_global_fragment
+dEQP-VK.glsl.constants.const_float_main_vertex
+dEQP-VK.glsl.constants.const_float_main_fragment
+dEQP-VK.glsl.constants.const_float_function_vertex
+dEQP-VK.glsl.constants.const_float_function_fragment
+dEQP-VK.glsl.constants.const_float_scope_vertex
+dEQP-VK.glsl.constants.const_float_scope_fragment
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_1_vertex
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_1_fragment
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_2_vertex
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_2_fragment
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_3_vertex
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_3_fragment
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_4_vertex
+dEQP-VK.glsl.constants.const_float_scope_shawdowing_4_fragment
+dEQP-VK.glsl.constants.const_float_operations_with_const_vertex
+dEQP-VK.glsl.constants.const_float_operations_with_const_fragment
+dEQP-VK.glsl.constants.const_float_assignment_1_vertex
+dEQP-VK.glsl.constants.const_float_assignment_1_fragment
+dEQP-VK.glsl.constants.const_float_assignment_2_vertex
+dEQP-VK.glsl.constants.const_float_assignment_2_fragment
+dEQP-VK.glsl.constants.const_float_assignment_3_vertex
+dEQP-VK.glsl.constants.const_float_assignment_3_fragment
+dEQP-VK.glsl.constants.const_float_assignment_4_vertex
+dEQP-VK.glsl.constants.const_float_assignment_4_fragment
+dEQP-VK.glsl.constants.const_float_from_int_vertex
+dEQP-VK.glsl.constants.const_float_from_int_fragment
+dEQP-VK.glsl.constants.const_float_from_vec2_vertex
+dEQP-VK.glsl.constants.const_float_from_vec2_fragment
+dEQP-VK.glsl.constants.const_float_from_vec3_vertex
+dEQP-VK.glsl.constants.const_float_from_vec3_fragment
+dEQP-VK.glsl.constants.const_float_from_vec4_vertex
+dEQP-VK.glsl.constants.const_float_from_vec4_fragment
+dEQP-VK.glsl.constants.int_decimal_vertex
+dEQP-VK.glsl.constants.int_decimal_fragment
+dEQP-VK.glsl.constants.int_octal_vertex
+dEQP-VK.glsl.constants.int_octal_fragment
+dEQP-VK.glsl.constants.int_hexadecimal_0_vertex
+dEQP-VK.glsl.constants.int_hexadecimal_0_fragment
+dEQP-VK.glsl.constants.int_hexadecimal_1_vertex
+dEQP-VK.glsl.constants.int_hexadecimal_1_fragment
+dEQP-VK.glsl.constants.uint_decimal_0_vertex
+dEQP-VK.glsl.constants.uint_decimal_0_fragment
+dEQP-VK.glsl.constants.uint_decimal_1_vertex
+dEQP-VK.glsl.constants.uint_decimal_1_fragment
+dEQP-VK.glsl.constants.uint_decimal_2_vertex
+dEQP-VK.glsl.constants.uint_decimal_2_fragment
+dEQP-VK.glsl.constants.uint_decimal_3_vertex
+dEQP-VK.glsl.constants.uint_decimal_3_fragment
+dEQP-VK.glsl.constants.uint_octal_0_vertex
+dEQP-VK.glsl.constants.uint_octal_0_fragment
+dEQP-VK.glsl.constants.uint_octal_1_vertex
+dEQP-VK.glsl.constants.uint_octal_1_fragment
+dEQP-VK.glsl.constants.uint_hexadecimal_0_vertex
+dEQP-VK.glsl.constants.uint_hexadecimal_0_fragment
+dEQP-VK.glsl.constants.uint_hexadecimal_1_vertex
+dEQP-VK.glsl.constants.uint_hexadecimal_1_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_float_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_float_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_int_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_int_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_bool_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_bool_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_float_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_float_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_int_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_int_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_bool_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_bool_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_float_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_float_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_int_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_int_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_bool_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_bool_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_float_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_float_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_int_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_int_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_bool_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_bool_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_uint_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.float_to_uint_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_uint_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.int_to_uint_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_uint_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.uint_to_uint_fragment
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_uint_vertex
+dEQP-VK.glsl.conversions.scalar_to_scalar.bool_to_uint_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_vec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_ivec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_bvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_vec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_ivec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_bvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_vec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_ivec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_bvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_vec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_ivec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_bvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.float_to_uvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.int_to_uvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.uint_to_uvec4_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec2_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec3_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec3_fragment
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec4_vertex
+dEQP-VK.glsl.conversions.scalar_to_vector.bool_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_float_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_float_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_int_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_int_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_bool_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_bool_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec2_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec3_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.vec4_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec2_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec3_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.ivec4_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec2_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec3_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.uvec4_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec2_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec3_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_uint_vertex
+dEQP-VK.glsl.conversions.vector_to_scalar.bvec4_to_uint_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec4_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec4_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec4_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec4_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec3_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec3_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec3_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec3_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.vec2_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.ivec2_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.uvec2_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_to_vector.bvec2_to_uvec2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.float_to_mat2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.int_to_mat2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.uint_to_mat2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2_vertex
+dEQP-VK.glsl.conversions.scalar_to_matrix.bool_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x3_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat4x2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x4_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat3x2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x4_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2x3_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_to_matrix.mat2_to_mat2_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_vec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_vec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_ivec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_ivec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_bvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_bvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_vec2_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_bvec2_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_float_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_int_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_uint_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_bool_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_bool_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_ivec2_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bvec2_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec3_float_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec3_float_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_ivec2_int_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_ivec2_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec3_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_uvec4_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uvec2_bool_to_uvec4_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_vec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_vec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_ivec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_ivec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_bvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_bvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_float_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_int_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_uint_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_bool_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_float_int_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.vec2_bool_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_float_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bvec2_int_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_ivec2_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_uvec3_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_uvec2_to_uvec3_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_vec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_vec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_ivec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_ivec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_bvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_bvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_float_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_int_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_uint_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.bool_bool_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_int_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.float_bool_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_bool_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.int_uint_to_uvec2_fragment
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_uvec2_vertex
+dEQP-VK.glsl.conversions.vector_combine.uint_float_to_uvec2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_vec2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_vec2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec2_bvec2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec2_bvec2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.uint_uint_uint_uint_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.uint_uint_uint_uint_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.int_ivec2_int_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.int_ivec2_int_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_ivec2_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_ivec2_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_uvec3_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_uvec3_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.int_uvec2_bool_to_mat2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.int_uvec2_bool_to_mat2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_bvec3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_bvec3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_float_float_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_float_float_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_int_int_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_int_int_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_bool_bool_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_bool_bool_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_float_int_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_float_int_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_ivec3_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_ivec3_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec4_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec4_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_ivec2_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_ivec2_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_bvec2_to_mat2x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_bvec2_to_mat2x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_to_mat2x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_to_mat2x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_bvec3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_bvec3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_float_float_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_float_float_float_float_float_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_int_int_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.int_int_int_int_int_int_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_bool_bool_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bool_bool_bool_bool_bool_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_float_int_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_bool_float_int_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_ivec3_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_ivec3_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec4_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_bvec4_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_ivec2_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec3_float_ivec2_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_bvec2_to_mat3x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_float_bvec2_to_mat3x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec3_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec3_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec3_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec3_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_bool_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_bool_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_float_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_float_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_bool_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_bool_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_bool_to_mat3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_bool_to_mat3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec3_bool_bvec3_float_bool_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec3_bool_bvec3_float_bool_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec4_int_vec4_bool_float_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec4_int_vec4_bool_float_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec4_bool_bool_int_to_mat3x4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec4_bool_bool_int_to_mat3x4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec3_vec3_vec2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec3_ivec3_ivec2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_int_bool_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec2_bool_bvec2_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec2_int_vec4_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_to_mat4x2_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec2_bool_to_mat4x2_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec2_ivec2_float_float_float_int_int_bool_bool_bool_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec3_bool_bvec3_float_bool_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_float_int_vec3_bool_bvec3_float_bool_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec4_int_vec4_bool_float_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bool_bvec4_int_vec4_bool_float_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec4_bool_bool_int_to_mat4x3_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_bvec4_ivec4_bool_bool_int_to_mat4x3_fragment
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_vec4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.vec4_vec4_vec4_vec4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_ivec4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.ivec4_ivec4_ivec4_ivec4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.bvec4_bvec4_bvec4_bvec4_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.bvec4_bvec4_bvec4_bvec4_to_mat4_fragment
+dEQP-VK.glsl.conversions.matrix_combine.float_ivec3_bvec3_vec4_ivec2_float_vec2_to_mat4_vertex
+dEQP-VK.glsl.conversions.matrix_combine.float_ivec3_bvec3_vec4_ivec2_float_vec2_to_mat4_fragment
+dEQP-VK.glsl.functions.datatypes.float_float_vertex
+dEQP-VK.glsl.functions.datatypes.float_float_fragment
+dEQP-VK.glsl.functions.datatypes.float_vec2_vertex
+dEQP-VK.glsl.functions.datatypes.float_vec2_fragment
+dEQP-VK.glsl.functions.datatypes.float_vec3_vertex
+dEQP-VK.glsl.functions.datatypes.float_vec3_fragment
+dEQP-VK.glsl.functions.datatypes.float_vec4_vertex
+dEQP-VK.glsl.functions.datatypes.float_vec4_fragment
+dEQP-VK.glsl.functions.datatypes.float_mat2_vertex
+dEQP-VK.glsl.functions.datatypes.float_mat2_fragment
+dEQP-VK.glsl.functions.datatypes.float_mat3_vertex
+dEQP-VK.glsl.functions.datatypes.float_mat3_fragment
+dEQP-VK.glsl.functions.datatypes.float_mat4_vertex
+dEQP-VK.glsl.functions.datatypes.float_mat4_fragment
+dEQP-VK.glsl.functions.datatypes.int_int_vertex
+dEQP-VK.glsl.functions.datatypes.int_int_fragment
+dEQP-VK.glsl.functions.datatypes.int_ivec2_vertex
+dEQP-VK.glsl.functions.datatypes.int_ivec2_fragment
+dEQP-VK.glsl.functions.datatypes.int_ivec3_vertex
+dEQP-VK.glsl.functions.datatypes.int_ivec3_fragment
+dEQP-VK.glsl.functions.datatypes.int_ivec4_vertex
+dEQP-VK.glsl.functions.datatypes.int_ivec4_fragment
+dEQP-VK.glsl.functions.datatypes.uint_uint_vertex
+dEQP-VK.glsl.functions.datatypes.uint_uint_fragment
+dEQP-VK.glsl.functions.datatypes.uint_uvec2_vertex
+dEQP-VK.glsl.functions.datatypes.uint_uvec2_fragment
+dEQP-VK.glsl.functions.datatypes.uint_uvec3_vertex
+dEQP-VK.glsl.functions.datatypes.uint_uvec3_fragment
+dEQP-VK.glsl.functions.datatypes.uint_uvec4_vertex
+dEQP-VK.glsl.functions.datatypes.uint_uvec4_fragment
+dEQP-VK.glsl.functions.datatypes.bool_bool_vertex
+dEQP-VK.glsl.functions.datatypes.bool_bool_fragment
+dEQP-VK.glsl.functions.datatypes.bool_bvec2_vertex
+dEQP-VK.glsl.functions.datatypes.bool_bvec2_fragment
+dEQP-VK.glsl.functions.datatypes.bool_bvec3_vertex
+dEQP-VK.glsl.functions.datatypes.bool_bvec3_fragment
+dEQP-VK.glsl.functions.datatypes.bool_bvec4_vertex
+dEQP-VK.glsl.functions.datatypes.bool_bvec4_fragment
+dEQP-VK.glsl.functions.datatypes.mat2_vertex
+dEQP-VK.glsl.functions.datatypes.mat2_fragment
+dEQP-VK.glsl.functions.datatypes.mat2x3_vertex
+dEQP-VK.glsl.functions.datatypes.mat2x3_fragment
+dEQP-VK.glsl.functions.datatypes.mat2x4_vertex
+dEQP-VK.glsl.functions.datatypes.mat2x4_fragment
+dEQP-VK.glsl.functions.datatypes.mat3x2_vertex
+dEQP-VK.glsl.functions.datatypes.mat3x2_fragment
+dEQP-VK.glsl.functions.datatypes.mat3_vertex
+dEQP-VK.glsl.functions.datatypes.mat3_fragment
+dEQP-VK.glsl.functions.datatypes.mat3x4_vertex
+dEQP-VK.glsl.functions.datatypes.mat3x4_fragment
+dEQP-VK.glsl.functions.datatypes.mat4x2_vertex
+dEQP-VK.glsl.functions.datatypes.mat4x2_fragment
+dEQP-VK.glsl.functions.datatypes.mat4x3_vertex
+dEQP-VK.glsl.functions.datatypes.mat4x3_fragment
+dEQP-VK.glsl.functions.datatypes.mat4_vertex
+dEQP-VK.glsl.functions.datatypes.mat4_fragment
+dEQP-VK.glsl.functions.datatypes.float_struct_vertex
+dEQP-VK.glsl.functions.datatypes.float_struct_fragment
+dEQP-VK.glsl.functions.datatypes.struct_struct_vertex
+dEQP-VK.glsl.functions.datatypes.struct_struct_fragment
+dEQP-VK.glsl.functions.datatypes.struct_nested_struct_vertex
+dEQP-VK.glsl.functions.datatypes.struct_nested_struct_fragment
+dEQP-VK.glsl.functions.qualifiers.in_float_vertex
+dEQP-VK.glsl.functions.qualifiers.in_float_fragment
+dEQP-VK.glsl.functions.qualifiers.out_float_vertex
+dEQP-VK.glsl.functions.qualifiers.out_float_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_float_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_float_fragment
+dEQP-VK.glsl.functions.qualifiers.in_lowp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.in_lowp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.out_lowp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.out_lowp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_lowp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_lowp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.in_highp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.in_highp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.out_highp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.out_highp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_highp_float_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_highp_float_fragment
+dEQP-VK.glsl.functions.qualifiers.const_float_vertex
+dEQP-VK.glsl.functions.qualifiers.const_float_fragment
+dEQP-VK.glsl.functions.qualifiers.const_in_float_vertex
+dEQP-VK.glsl.functions.qualifiers.const_in_float_fragment
+dEQP-VK.glsl.functions.qualifiers.in_int_vertex
+dEQP-VK.glsl.functions.qualifiers.in_int_fragment
+dEQP-VK.glsl.functions.qualifiers.out_int_vertex
+dEQP-VK.glsl.functions.qualifiers.out_int_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_int_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_int_fragment
+dEQP-VK.glsl.functions.qualifiers.in_lowp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.in_lowp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.out_lowp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.out_lowp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_lowp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_lowp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.in_highp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.in_highp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.out_highp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.out_highp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_highp_int_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_highp_int_fragment
+dEQP-VK.glsl.functions.qualifiers.const_int_vertex
+dEQP-VK.glsl.functions.qualifiers.const_int_fragment
+dEQP-VK.glsl.functions.qualifiers.const_in_int_vertex
+dEQP-VK.glsl.functions.qualifiers.const_in_int_fragment
+dEQP-VK.glsl.functions.qualifiers.in_bool_vertex
+dEQP-VK.glsl.functions.qualifiers.in_bool_fragment
+dEQP-VK.glsl.functions.qualifiers.out_bool_vertex
+dEQP-VK.glsl.functions.qualifiers.out_bool_fragment
+dEQP-VK.glsl.functions.qualifiers.inout_bool_vertex
+dEQP-VK.glsl.functions.qualifiers.inout_bool_fragment
+dEQP-VK.glsl.functions.qualifiers.const_bool_vertex
+dEQP-VK.glsl.functions.qualifiers.const_bool_fragment
+dEQP-VK.glsl.functions.declarations.basic_vertex
+dEQP-VK.glsl.functions.declarations.basic_fragment
+dEQP-VK.glsl.functions.declarations.basic_arg_vertex
+dEQP-VK.glsl.functions.declarations.basic_arg_fragment
+dEQP-VK.glsl.functions.declarations.define_after_use_vertex
+dEQP-VK.glsl.functions.declarations.define_after_use_fragment
+dEQP-VK.glsl.functions.declarations.double_declare_vertex
+dEQP-VK.glsl.functions.declarations.double_declare_fragment
+dEQP-VK.glsl.functions.declarations.declare_after_define_vertex
+dEQP-VK.glsl.functions.declarations.declare_after_define_fragment
+dEQP-VK.glsl.functions.declarations.void_vs_no_void_vertex
+dEQP-VK.glsl.functions.declarations.void_vs_no_void_fragment
+dEQP-VK.glsl.functions.declarations.in_vs_no_in_vertex
+dEQP-VK.glsl.functions.declarations.in_vs_no_in_fragment
+dEQP-VK.glsl.functions.declarations.default_vs_explicit_precision_vertex
+dEQP-VK.glsl.functions.declarations.default_vs_explicit_precision_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_type_simple_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_type_simple_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_float_types_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_float_types_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_int_types_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_int_types_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_bool_types_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_bool_types_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_basic_types_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_basic_types_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arg_complex_types_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arg_complex_types_fragment
+dEQP-VK.glsl.functions.overloading.user_func_arguments_vertex
+dEQP-VK.glsl.functions.overloading.user_func_arguments_fragment
+dEQP-VK.glsl.functions.overloading.array_size_vertex
+dEQP-VK.glsl.functions.overloading.array_size_fragment
+dEQP-VK.glsl.functions.array_arguments.local_in_float_vertex
+dEQP-VK.glsl.functions.array_arguments.local_in_float_fragment
+dEQP-VK.glsl.functions.array_arguments.global_in_float_vertex
+dEQP-VK.glsl.functions.array_arguments.global_in_float_fragment
+dEQP-VK.glsl.functions.array_arguments.local_in_int_vertex
+dEQP-VK.glsl.functions.array_arguments.local_in_int_fragment
+dEQP-VK.glsl.functions.array_arguments.global_in_int_vertex
+dEQP-VK.glsl.functions.array_arguments.global_in_int_fragment
+dEQP-VK.glsl.functions.array_arguments.local_in_bool_vertex
+dEQP-VK.glsl.functions.array_arguments.local_in_bool_fragment
+dEQP-VK.glsl.functions.array_arguments.global_in_bool_vertex
+dEQP-VK.glsl.functions.array_arguments.global_in_bool_fragment
+dEQP-VK.glsl.functions.array_arguments.test_helpers_vertex
+dEQP-VK.glsl.functions.array_arguments.test_helpers_fragment
+dEQP-VK.glsl.functions.array_arguments.copy_local_in_on_call_vertex
+dEQP-VK.glsl.functions.array_arguments.copy_local_in_on_call_fragment
+dEQP-VK.glsl.functions.array_arguments.copy_global_in_on_call_vertex
+dEQP-VK.glsl.functions.array_arguments.copy_global_in_on_call_fragment
+dEQP-VK.glsl.functions.array_arguments.copy_local_inout_on_call_vertex
+dEQP-VK.glsl.functions.array_arguments.copy_local_inout_on_call_fragment
+dEQP-VK.glsl.functions.array_arguments.copy_global_inout_on_call_vertex
+dEQP-VK.glsl.functions.array_arguments.copy_global_inout_on_call_fragment
+dEQP-VK.glsl.functions.control_flow.simple_return_vertex
+dEQP-VK.glsl.functions.control_flow.simple_return_fragment
+dEQP-VK.glsl.functions.control_flow.return_in_if_vertex
+dEQP-VK.glsl.functions.control_flow.return_in_if_fragment
+dEQP-VK.glsl.functions.control_flow.return_in_else_vertex
+dEQP-VK.glsl.functions.control_flow.return_in_else_fragment
+dEQP-VK.glsl.functions.control_flow.return_in_loop_vertex
+dEQP-VK.glsl.functions.control_flow.return_in_loop_fragment
+dEQP-VK.glsl.functions.control_flow.return_in_loop_if_vertex
+dEQP-VK.glsl.functions.control_flow.return_in_loop_if_fragment
+dEQP-VK.glsl.functions.control_flow.return_after_loop_vertex
+dEQP-VK.glsl.functions.control_flow.return_after_loop_fragment
+dEQP-VK.glsl.functions.control_flow.return_after_break_vertex
+dEQP-VK.glsl.functions.control_flow.return_after_break_fragment
+dEQP-VK.glsl.functions.control_flow.return_after_continue_vertex
+dEQP-VK.glsl.functions.control_flow.return_after_continue_fragment
+dEQP-VK.glsl.functions.control_flow.return_in_nested_loop_vertex
+dEQP-VK.glsl.functions.control_flow.return_in_nested_loop_fragment
+dEQP-VK.glsl.functions.control_flow.return_after_loop_sequence_vertex
+dEQP-VK.glsl.functions.control_flow.return_after_loop_sequence_fragment
+dEQP-VK.glsl.functions.control_flow.mixed_return_break_continue_vertex
+dEQP-VK.glsl.functions.control_flow.mixed_return_break_continue_fragment
+dEQP-VK.glsl.functions.misc.multi_arg_float_vertex
+dEQP-VK.glsl.functions.misc.multi_arg_float_fragment
+dEQP-VK.glsl.functions.misc.multi_arg_int_vertex
+dEQP-VK.glsl.functions.misc.multi_arg_int_fragment
+dEQP-VK.glsl.functions.misc.argument_eval_order_1_vertex
+dEQP-VK.glsl.functions.misc.argument_eval_order_1_fragment
+dEQP-VK.glsl.functions.misc.argument_eval_order_2_vertex
+dEQP-VK.glsl.functions.misc.argument_eval_order_2_fragment
+dEQP-VK.glsl.linkage.varying.rules.fragment_declare
+dEQP-VK.glsl.linkage.varying.rules.vertex_declare
+dEQP-VK.glsl.linkage.varying.rules.both_declare
+dEQP-VK.glsl.linkage.varying.rules.vertex_declare_fragment_use
+dEQP-VK.glsl.linkage.varying.rules.vertex_use_fragment_declare
+dEQP-VK.glsl.linkage.varying.rules.vertex_use_declare_fragment
+dEQP-VK.glsl.linkage.varying.rules.vertex_use_fragment_use
+dEQP-VK.glsl.linkage.varying.rules.differing_precision_1
+dEQP-VK.glsl.linkage.varying.rules.differing_precision_2
+dEQP-VK.glsl.linkage.varying.rules.differing_precision_3
+dEQP-VK.glsl.linkage.varying.rules.differing_interpolation_2
+dEQP-VK.glsl.linkage.varying.basic_types.float
+dEQP-VK.glsl.linkage.varying.basic_types.vec2
+dEQP-VK.glsl.linkage.varying.basic_types.vec3
+dEQP-VK.glsl.linkage.varying.basic_types.vec4
+dEQP-VK.glsl.linkage.varying.basic_types.mat2
+dEQP-VK.glsl.linkage.varying.basic_types.mat2x3
+dEQP-VK.glsl.linkage.varying.basic_types.mat2x4
+dEQP-VK.glsl.linkage.varying.basic_types.mat3x2
+dEQP-VK.glsl.linkage.varying.basic_types.mat3
+dEQP-VK.glsl.linkage.varying.basic_types.mat3x4
+dEQP-VK.glsl.linkage.varying.basic_types.mat4x2
+dEQP-VK.glsl.linkage.varying.basic_types.mat4x3
+dEQP-VK.glsl.linkage.varying.basic_types.mat4
+dEQP-VK.glsl.linkage.varying.basic_types.int
+dEQP-VK.glsl.linkage.varying.basic_types.ivec2
+dEQP-VK.glsl.linkage.varying.basic_types.ivec3
+dEQP-VK.glsl.linkage.varying.basic_types.ivec4
+dEQP-VK.glsl.linkage.varying.basic_types.uint
+dEQP-VK.glsl.linkage.varying.basic_types.uvec2
+dEQP-VK.glsl.linkage.varying.basic_types.uvec3
+dEQP-VK.glsl.linkage.varying.basic_types.uvec4
+dEQP-VK.glsl.linkage.varying.struct.float
+dEQP-VK.glsl.linkage.varying.struct.vec2
+dEQP-VK.glsl.linkage.varying.struct.vec3
+dEQP-VK.glsl.linkage.varying.struct.vec4
+dEQP-VK.glsl.linkage.varying.struct.mat2
+dEQP-VK.glsl.linkage.varying.struct.mat2x3
+dEQP-VK.glsl.linkage.varying.struct.mat2x4
+dEQP-VK.glsl.linkage.varying.struct.mat3x2
+dEQP-VK.glsl.linkage.varying.struct.mat3
+dEQP-VK.glsl.linkage.varying.struct.mat3x4
+dEQP-VK.glsl.linkage.varying.struct.mat4x2
+dEQP-VK.glsl.linkage.varying.struct.mat4x3
+dEQP-VK.glsl.linkage.varying.struct.mat4
+dEQP-VK.glsl.linkage.varying.struct.int
+dEQP-VK.glsl.linkage.varying.struct.ivec2
+dEQP-VK.glsl.linkage.varying.struct.ivec3
+dEQP-VK.glsl.linkage.varying.struct.ivec4
+dEQP-VK.glsl.linkage.varying.struct.uint
+dEQP-VK.glsl.linkage.varying.struct.uvec2
+dEQP-VK.glsl.linkage.varying.struct.uvec3
+dEQP-VK.glsl.linkage.varying.struct.uvec4
+dEQP-VK.glsl.linkage.varying.struct.float_vec3
+dEQP-VK.glsl.linkage.varying.struct.float_uvec2_vec3
+dEQP-VK.glsl.linkage.varying.interpolation.smooth
+dEQP-VK.glsl.linkage.varying.interpolation.centroid
+dEQP-VK.glsl.linkage.varying.interpolation.flat
+dEQP-VK.glsl.linkage.varying.usage.readback_1
+dEQP-VK.glsl.scoping.valid.local_variable_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.local_variable_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.block_variable_hides_local_variable_vertex
+dEQP-VK.glsl.scoping.valid.block_variable_hides_local_variable_fragment
+dEQP-VK.glsl.scoping.valid.block_variable_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.block_variable_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.for_init_statement_variable_hides_local_variable_vertex
+dEQP-VK.glsl.scoping.valid.for_init_statement_variable_hides_local_variable_fragment
+dEQP-VK.glsl.scoping.valid.while_condition_variable_hides_local_variable_vertex
+dEQP-VK.glsl.scoping.valid.while_condition_variable_hides_local_variable_fragment
+dEQP-VK.glsl.scoping.valid.for_init_statement_variable_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.for_init_statement_variable_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.while_condition_variable_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.while_condition_variable_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.variable_in_if_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.variable_in_if_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.variable_from_outer_scope_visible_in_initializer_vertex
+dEQP-VK.glsl.scoping.valid.variable_from_outer_scope_visible_in_initializer_fragment
+dEQP-VK.glsl.scoping.valid.local_int_variable_hides_struct_type_vertex
+dEQP-VK.glsl.scoping.valid.local_int_variable_hides_struct_type_fragment
+dEQP-VK.glsl.scoping.valid.local_struct_variable_hides_struct_type_vertex
+dEQP-VK.glsl.scoping.valid.local_struct_variable_hides_struct_type_fragment
+dEQP-VK.glsl.scoping.valid.local_variable_hides_function_vertex
+dEQP-VK.glsl.scoping.valid.local_variable_hides_function_fragment
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_global_variable_vertex
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_global_variable_fragment
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_struct_type_vertex
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_struct_type_fragment
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_function_vertex
+dEQP-VK.glsl.scoping.valid.function_parameter_hides_function_fragment
+dEQP-VK.glsl.scoping.valid.local_variable_in_inner_scope_hides_function_parameter_vertex
+dEQP-VK.glsl.scoping.valid.local_variable_in_inner_scope_hides_function_parameter_fragment
+dEQP-VK.glsl.scoping.valid.redeclare_function_vertex
+dEQP-VK.glsl.scoping.valid.redeclare_function_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_ss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_ss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_st_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_st_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_ts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_ts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_gr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_gr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec2_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_z_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_z_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xyz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xyz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xzx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xzx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xyyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_xyyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_zzzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_p_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_p_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_stp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_stp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_ppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_ppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_ppt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_ppt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sps_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_sps_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_stts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_stts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_pppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_b_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_b_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rgb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rgb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rbr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rbr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rggr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_rggr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec3_bbbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_w_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_w_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_www_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_www_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_yyw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_yyw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xyzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xyzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xxxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xxxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wwww_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wwww_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wzzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wwwy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_wwwy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_zzwz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_zzwz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_q_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_q_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qs_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qs_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ttq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ttq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qpt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qpt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_stpq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_stpq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qpts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qpts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ssss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ssss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qppq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qppq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqqt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_qqqt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ppqp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ppqp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_a_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_a_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ar_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ar_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_ab_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_gga_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_gga_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rgba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rgba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rrrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rrrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_abba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaag_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_aaag_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_bbab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_vec4_bbab_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_ss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_ss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_st_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_st_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_ts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_ts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_gr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_gr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec2_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_z_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_z_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xyz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xyz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xzx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xzx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xyyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_xyyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_zzzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_p_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_p_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_stp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_stp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_ppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_ppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_ppt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_ppt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sps_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_sps_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_stts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_stts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_pppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_b_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_b_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rgb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rgb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rbr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rbr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rggr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_rggr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec3_bbbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_w_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_w_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_www_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_www_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_yyw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_yyw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xyzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xyzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xxxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xxxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wwww_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wwww_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wzzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wwwy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_wwwy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_zzwz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_zzwz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_q_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_q_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qs_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qs_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ttq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ttq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qpt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qpt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_stpq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_stpq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qpts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qpts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ssss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ssss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qppq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qppq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqqt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_qqqt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ppqp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ppqp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_a_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_a_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ar_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ar_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_ab_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_gga_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_gga_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rgba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rgba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rrrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rrrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_abba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaag_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_aaag_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_bbab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_ivec4_bbab_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_ss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_ss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_st_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_st_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_ts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_ts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_gr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_gr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec2_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_z_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_z_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xyz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xyz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_yxy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_yxy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xzx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xzx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xyyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_xyyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzzz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_zzzz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_p_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_p_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_stp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_stp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_ppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_ppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_ppt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_ppt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_tst_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_tst_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sps_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_sps_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_stts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_stts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pppp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_pppp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_b_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_b_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rgb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rgb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_grg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_grg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rbr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rbr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rggr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_rggr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbbb_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec3_bbbb_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_x_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_x_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_w_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_w_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_www_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_www_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_yyw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_yyw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xyzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xyzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzyx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzyx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xxxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xxxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_yyyy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_yyyy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wwww_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wwww_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzzw_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wzzw_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wwwy_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_wwwy_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xyxx_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_xyxx_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_zzwz_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_zzwz_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_s_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_s_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_q_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_q_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qs_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qs_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ttq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ttq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qpt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qpt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_stpq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_stpq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qpts_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qpts_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ssss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ssss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_tttt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_tttt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqqq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqqq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qppq_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qppq_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqqt_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_qqqt_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_stss_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_stss_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ppqp_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ppqp_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_r_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_r_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_a_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_a_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ar_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ar_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_ab_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_gga_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_gga_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rgba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rgba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abgr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abgr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rrrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rrrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_gggg_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_gggg_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaaa_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaaa_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abba_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_abba_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaag_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_aaag_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rgrr_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_rgrr_fragment
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_bbab_vertex
+dEQP-VK.glsl.swizzles.vector_swizzles.mediump_bvec4_bbab_fragment
+dEQP-VK.glsl.discard.basic_always
+dEQP-VK.glsl.discard.basic_never
+dEQP-VK.glsl.discard.basic_uniform
+dEQP-VK.glsl.discard.basic_dynamic
+dEQP-VK.glsl.discard.basic_texture
+dEQP-VK.glsl.discard.function_always
+dEQP-VK.glsl.discard.function_never
+dEQP-VK.glsl.discard.function_uniform
+dEQP-VK.glsl.discard.function_dynamic
+dEQP-VK.glsl.discard.function_texture
+dEQP-VK.glsl.discard.static_loop_always
+dEQP-VK.glsl.discard.static_loop_never
+dEQP-VK.glsl.discard.static_loop_uniform
+dEQP-VK.glsl.discard.static_loop_dynamic
+dEQP-VK.glsl.discard.static_loop_texture
+dEQP-VK.glsl.discard.dynamic_loop_always
+dEQP-VK.glsl.discard.dynamic_loop_never
+dEQP-VK.glsl.discard.dynamic_loop_uniform
+dEQP-VK.glsl.discard.dynamic_loop_dynamic
+dEQP-VK.glsl.discard.dynamic_loop_texture
+dEQP-VK.glsl.discard.function_static_loop_always
+dEQP-VK.glsl.discard.function_static_loop_never
+dEQP-VK.glsl.discard.function_static_loop_uniform
+dEQP-VK.glsl.discard.function_static_loop_dynamic
+dEQP-VK.glsl.discard.function_static_loop_texture
+dEQP-VK.glsl.indexing.varying_array.float_static_write_static_read
+dEQP-VK.glsl.indexing.varying_array.float_static_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.float_static_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_static_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_write_static_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_static_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.float_static_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.float_static_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_static_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.float_dynamic_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_static_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec2_dynamic_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_static_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec3_dynamic_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_static_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_loop_write_static_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_loop_write_dynamic_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_loop_write_static_loop_read
+dEQP-VK.glsl.indexing.varying_array.vec4_dynamic_loop_write_dynamic_loop_read
+dEQP-VK.glsl.indexing.uniform_array.float_static_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.float_static_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.float_dynamic_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.float_dynamic_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.float_static_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.float_static_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.float_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.float_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec2_static_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec2_static_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec2_dynamic_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec2_dynamic_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec2_static_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec2_static_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec2_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec2_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec3_static_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec3_static_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec3_dynamic_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec3_dynamic_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec3_static_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec3_static_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec3_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec3_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec4_static_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec4_static_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec4_dynamic_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec4_dynamic_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec4_static_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec4_static_loop_read_fragment
+dEQP-VK.glsl.indexing.uniform_array.vec4_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.uniform_array.vec4_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.float_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec2_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec3_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.tmp_array.vec4_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_direct_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_component_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_static_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec2_dynamic_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_direct_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_component_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_static_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec3_dynamic_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_direct_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_component_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_static_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_direct_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_direct_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_component_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_component_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_static_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_static_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_dynamic_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_dynamic_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_static_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_static_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_dynamic_loop_subscript_read_vertex
+dEQP-VK.glsl.indexing.vector_subscript.vec4_dynamic_loop_subscript_write_dynamic_loop_subscript_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x3_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat2x4_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x2_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat3x4_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x2_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4x3_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_static_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_static_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_static_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_dynamic_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_dynamic_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_static_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_static_loop_read_fragment
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_dynamic_loop_read_vertex
+dEQP-VK.glsl.indexing.matrix_subscript.mat4_dynamic_loop_write_dynamic_loop_read_fragment
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.for_constant_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.for_uniform_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.for_dynamic_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.while_constant_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.while_uniform_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.while_dynamic_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_constant_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_uniform_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_mediump_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_mediump_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_mediump_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_mediump_float_fragment
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_highp_int_vertex
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_highp_int_fragment
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_highp_float_vertex
+dEQP-VK.glsl.loops.generic.do_while_dynamic_iterations.basic_highp_float_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.for_constant_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.for_uniform_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.for_dynamic_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.while_constant_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.while_uniform_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.no_iterations_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.no_iterations_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.while_dynamic_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.do_while_constant_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.do_while_uniform_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.empty_body_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.empty_body_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_unconditional_break_first_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_unconditional_break_first_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_unconditional_break_last_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_unconditional_break_last_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.infinite_with_conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.single_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.single_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.compound_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.compound_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.sequence_statement_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.sequence_statement_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.single_iteration_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.single_iteration_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.select_iteration_count_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.select_iteration_count_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.conditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.conditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.unconditional_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.unconditional_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.only_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.only_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.double_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.double_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.conditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.conditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.unconditional_break_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.unconditional_break_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.pre_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.pre_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.post_increment_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.post_increment_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.mixed_break_continue_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.mixed_break_continue_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.vector_counter_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.vector_counter_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.101_iterations_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.101_iterations_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_sequence_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_sequence_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_tricky_dataflow_1_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_tricky_dataflow_1_fragment
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_tricky_dataflow_2_vertex
+dEQP-VK.glsl.loops.special.do_while_dynamic_iterations.nested_tricky_dataflow_2_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.const.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.const.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.const.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.const.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.uniform.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.uniform.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.add.dynamic.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.const.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.const.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.const.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.uniform.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.sub.dynamic.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.const.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.uniform.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x3_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_vec2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_vec2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat2x4_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat3x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat3x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x2_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_vec3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_vec3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat2x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat2x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat3x4_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat4x2_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec2_mat4x2_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x2_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat4x3_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec3_mat4x3_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4x3_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_vec4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_vec4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_vec4_mat4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat2x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat2x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat3x4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat3x4_fragment
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.mul.dynamic.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.const.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.const.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.const.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.const.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.uniform.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.uniform.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.div.dynamic.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2_mat2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2_mat2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2x3_mat2x3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2x3_mat2x3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2x4_mat2x4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat2x4_mat2x4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3x2_mat3x2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3x2_mat3x2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3_mat3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3_mat3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3x4_mat3x4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat3x4_mat3x4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4x2_mat4x2_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4x2_mat4x2_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4x3_mat4x3_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4x3_mat4x3_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.mediump_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4_mat4_vertex
+dEQP-VK.glsl.matrix.matrixcompmult.dynamic.highp_mat4_mat4_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.outerproduct.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.transpose.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.determinant.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.inverse.dynamic.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.unary_addition.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.negation.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.negation.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.negation.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.negation.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.pre_increment.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.pre_decrement.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.post_increment.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.post_increment.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.post_decrement.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.add_assign.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.add_assign.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.sub_assign.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.mul_assign.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.mul_assign.highp_mat4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat2x3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat2x3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat2x4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat2x4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat3x2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat3x2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat3x4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat3x4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat4x2_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat4x2_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat4x3_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat4x3_float_fragment
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.mediump_mat4_float_fragment
+dEQP-VK.glsl.matrix.div_assign.highp_mat4_float_vertex
+dEQP-VK.glsl.matrix.div_assign.highp_mat4_float_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.plus.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.minus.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.not.bool_vertex
+dEQP-VK.glsl.operator.unary_operator.not.bool_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.bitwise_not.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_increment_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.pre_decrement_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_increment_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_float_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_float_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_int_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_int_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uint_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uint_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.unary_operator.post_decrement_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_int_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.mediump_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor.highp_uint_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_effect.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_effect.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.add_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.sub_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mul_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.div_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.mod_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_and_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_or_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.bitwise_xor_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.left_shift_assign_result.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_int_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_int_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uint_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uint_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.mediump_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec2_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec3_int_fragment
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_int_vertex
+dEQP-VK.glsl.operator.binary_operator.right_shift_assign_result.highp_uvec4_int_fragment
+dEQP-VK.glsl.operator.binary_operator.less.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.less.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.less.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.less.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.less.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.less.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.less.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.less.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.less.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.less.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.less.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.less.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.less_or_equal.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.greater.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.greater.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.greater_or_equal.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.bool_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.bool_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.bvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.bvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.bvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.bvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.equal.bvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.equal.bvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_float_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_float_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_float_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_float_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_vec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_int_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_int_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_int_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_int_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_ivec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uint_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uint_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.highp_uvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.bool_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.bool_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec2_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec2_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec3_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec3_fragment
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec4_vertex
+dEQP-VK.glsl.operator.binary_operator.not_equal.bvec4_fragment
+dEQP-VK.glsl.operator.binary_operator.logical_and.bool_vertex
+dEQP-VK.glsl.operator.binary_operator.logical_and.bool_fragment
+dEQP-VK.glsl.operator.binary_operator.logical_or.bool_vertex
+dEQP-VK.glsl.operator.binary_operator.logical_or.bool_fragment
+dEQP-VK.glsl.operator.binary_operator.logical_xor.bool_vertex
+dEQP-VK.glsl.operator.binary_operator.logical_xor.bool_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.radians.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.degrees.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sin2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cos2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tan2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asin.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acos.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atan2.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.sinh2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.cosh2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.tanh2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.asinh.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.acosh.highp_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_float_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_float_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec2_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec2_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec3_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec3_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.mediump_vec4_fragment
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec4_vertex
+dEQP-VK.glsl.operator.angle_and_trigonometry.atanh.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.pow.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.pow.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.pow.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.pow.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.pow.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.pow.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.pow.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.pow.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.pow.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.pow.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.pow.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.exp.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.exp.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.exp.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.exp.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.exp.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.exp.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.exp.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.exp.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.exp.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.exp.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.exp.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.log.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.log.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.log.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.log.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.log.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.log.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.log.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.log.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.log.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.log.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.log.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.log.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.log.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.log.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.log.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.log.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.exp2.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.exp2.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.exp2.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.exp2.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.exp2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.exp2.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.log2.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.log2.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.log2.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.log2.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.log2.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.log2.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.log2.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.log2.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.log2.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.log2.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.log2.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.sqrt.highp_vec4_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_float_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_float_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_float_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_float_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec2_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec2_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec2_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec2_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec3_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec3_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec3_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec3_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec4_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.mediump_vec4_fragment
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec4_vertex
+dEQP-VK.glsl.operator.exponential.inversesqrt.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.abs.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.abs.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.abs.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.abs.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.abs.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.abs.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.sign.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.sign.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.sign.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.sign.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.sign.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.sign.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.floor.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.floor.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.floor.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.floor.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.floor.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.floor.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.trunc.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.round.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.round.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.round.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.round.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.round.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.round.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.round.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.round.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.round.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.round.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.round.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.roundEven.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.ceil.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.fract.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.fract.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.fract.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.fract.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.fract.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.fract.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.mod.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.min.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.max.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec2_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec2_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec3_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec3_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec4_int_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_ivec4_int_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec2_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec2_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec3_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec3_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec4_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec4_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec2_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec2_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec3_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec3_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.mediump_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec4_uint_vertex
+dEQP-VK.glsl.operator.common_functions.clamp.highp_uvec4_uint_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.common_functions.mix.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.step.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.step.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec2_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec2_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec3_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec3_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.mediump_float_vec4_fragment
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec4_vertex
+dEQP-VK.glsl.operator.common_functions.smoothstep.highp_float_vec4_fragment
+dEQP-VK.glsl.operator.geometric.length.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.length.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.length.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.length.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.length.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.length.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.length.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.length.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.length.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.length.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.length.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.length.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.length.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.length.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.length.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.length.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.distance.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.distance.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.distance.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.distance.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.distance.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.distance.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.distance.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.distance.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.distance.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.distance.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.distance.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.dot.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.dot.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.dot.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.dot.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.dot.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.dot.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.dot.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.dot.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.dot.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.dot.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.dot.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.cross.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.cross.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.cross.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.cross.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.normalize.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.normalize.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.normalize.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.normalize.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.normalize.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.normalize.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.faceforward.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.reflect.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.reflect.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.reflect.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.reflect.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec2_vertex
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec2_fragment
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec2_vertex
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec2_fragment
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec3_vertex
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec3_fragment
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec3_vertex
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec3_fragment
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec4_vertex
+dEQP-VK.glsl.operator.geometric.reflect.mediump_vec4_fragment
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec4_vertex
+dEQP-VK.glsl.operator.geometric.reflect.highp_vec4_fragment
+dEQP-VK.glsl.operator.geometric.refract.mediump_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.mediump_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.highp_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.highp_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec2_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec2_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.highp_vec2_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.highp_vec2_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec3_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec3_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.highp_vec3_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.highp_vec3_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec4_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.mediump_vec4_float_fragment
+dEQP-VK.glsl.operator.geometric.refract.highp_vec4_float_vertex
+dEQP-VK.glsl.operator.geometric.refract.highp_vec4_float_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.lessThan.highp_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.lessThanEqual.highp_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThan.highp_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.greaterThanEqual.highp_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.equal.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.equal.highp_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec2_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec2_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec3_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec3_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.mediump_vec4_fragment
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec4_vertex
+dEQP-VK.glsl.operator.float_compare.notEqual.highp_vec4_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.lessThan.highp_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.lessThanEqual.highp_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThan.highp_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.greaterThanEqual.highp_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.equal.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.equal.highp_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec2_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec2_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec3_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec3_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec4_vertex
+dEQP-VK.glsl.operator.int_compare.notEqual.highp_ivec4_fragment
+dEQP-VK.glsl.operator.bool_compare.equal.bvec2_vertex
+dEQP-VK.glsl.operator.bool_compare.equal.bvec2_fragment
+dEQP-VK.glsl.operator.bool_compare.equal.bvec3_vertex
+dEQP-VK.glsl.operator.bool_compare.equal.bvec3_fragment
+dEQP-VK.glsl.operator.bool_compare.equal.bvec4_vertex
+dEQP-VK.glsl.operator.bool_compare.equal.bvec4_fragment
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec2_vertex
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec2_fragment
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec3_vertex
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec3_fragment
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec4_vertex
+dEQP-VK.glsl.operator.bool_compare.notEqual.bvec4_fragment
+dEQP-VK.glsl.operator.bool_compare.any.bvec2_vertex
+dEQP-VK.glsl.operator.bool_compare.any.bvec2_fragment
+dEQP-VK.glsl.operator.bool_compare.any.bvec3_vertex
+dEQP-VK.glsl.operator.bool_compare.any.bvec3_fragment
+dEQP-VK.glsl.operator.bool_compare.any.bvec4_vertex
+dEQP-VK.glsl.operator.bool_compare.any.bvec4_fragment
+dEQP-VK.glsl.operator.bool_compare.all.bvec2_vertex
+dEQP-VK.glsl.operator.bool_compare.all.bvec2_fragment
+dEQP-VK.glsl.operator.bool_compare.all.bvec3_vertex
+dEQP-VK.glsl.operator.bool_compare.all.bvec3_fragment
+dEQP-VK.glsl.operator.bool_compare.all.bvec4_vertex
+dEQP-VK.glsl.operator.bool_compare.all.bvec4_fragment
+dEQP-VK.glsl.operator.bool_compare.not.bvec2_vertex
+dEQP-VK.glsl.operator.bool_compare.not.bvec2_fragment
+dEQP-VK.glsl.operator.bool_compare.not.bvec3_vertex
+dEQP-VK.glsl.operator.bool_compare.not.bvec3_fragment
+dEQP-VK.glsl.operator.bool_compare.not.bvec4_vertex
+dEQP-VK.glsl.operator.bool_compare.not.bvec4_fragment
+dEQP-VK.glsl.operator.selection.mediump_float_vertex
+dEQP-VK.glsl.operator.selection.mediump_float_fragment
+dEQP-VK.glsl.operator.selection.highp_float_vertex
+dEQP-VK.glsl.operator.selection.highp_float_fragment
+dEQP-VK.glsl.operator.selection.mediump_vec2_vertex
+dEQP-VK.glsl.operator.selection.mediump_vec2_fragment
+dEQP-VK.glsl.operator.selection.highp_vec2_vertex
+dEQP-VK.glsl.operator.selection.highp_vec2_fragment
+dEQP-VK.glsl.operator.selection.mediump_vec3_vertex
+dEQP-VK.glsl.operator.selection.mediump_vec3_fragment
+dEQP-VK.glsl.operator.selection.highp_vec3_vertex
+dEQP-VK.glsl.operator.selection.highp_vec3_fragment
+dEQP-VK.glsl.operator.selection.mediump_vec4_vertex
+dEQP-VK.glsl.operator.selection.mediump_vec4_fragment
+dEQP-VK.glsl.operator.selection.highp_vec4_vertex
+dEQP-VK.glsl.operator.selection.highp_vec4_fragment
+dEQP-VK.glsl.operator.selection.mediump_int_vertex
+dEQP-VK.glsl.operator.selection.mediump_int_fragment
+dEQP-VK.glsl.operator.selection.highp_int_vertex
+dEQP-VK.glsl.operator.selection.highp_int_fragment
+dEQP-VK.glsl.operator.selection.mediump_ivec2_vertex
+dEQP-VK.glsl.operator.selection.mediump_ivec2_fragment
+dEQP-VK.glsl.operator.selection.highp_ivec2_vertex
+dEQP-VK.glsl.operator.selection.highp_ivec2_fragment
+dEQP-VK.glsl.operator.selection.mediump_ivec3_vertex
+dEQP-VK.glsl.operator.selection.mediump_ivec3_fragment
+dEQP-VK.glsl.operator.selection.highp_ivec3_vertex
+dEQP-VK.glsl.operator.selection.highp_ivec3_fragment
+dEQP-VK.glsl.operator.selection.mediump_ivec4_vertex
+dEQP-VK.glsl.operator.selection.mediump_ivec4_fragment
+dEQP-VK.glsl.operator.selection.highp_ivec4_vertex
+dEQP-VK.glsl.operator.selection.highp_ivec4_fragment
+dEQP-VK.glsl.operator.selection.mediump_uint_vertex
+dEQP-VK.glsl.operator.selection.mediump_uint_fragment
+dEQP-VK.glsl.operator.selection.highp_uint_vertex
+dEQP-VK.glsl.operator.selection.highp_uint_fragment
+dEQP-VK.glsl.operator.selection.mediump_uvec2_vertex
+dEQP-VK.glsl.operator.selection.mediump_uvec2_fragment
+dEQP-VK.glsl.operator.selection.highp_uvec2_vertex
+dEQP-VK.glsl.operator.selection.highp_uvec2_fragment
+dEQP-VK.glsl.operator.selection.mediump_uvec3_vertex
+dEQP-VK.glsl.operator.selection.mediump_uvec3_fragment
+dEQP-VK.glsl.operator.selection.highp_uvec3_vertex
+dEQP-VK.glsl.operator.selection.highp_uvec3_fragment
+dEQP-VK.glsl.operator.selection.mediump_uvec4_vertex
+dEQP-VK.glsl.operator.selection.mediump_uvec4_fragment
+dEQP-VK.glsl.operator.selection.highp_uvec4_vertex
+dEQP-VK.glsl.operator.selection.highp_uvec4_fragment
+dEQP-VK.glsl.operator.selection.bool_vertex
+dEQP-VK.glsl.operator.selection.bool_fragment
+dEQP-VK.glsl.operator.selection.bvec2_vertex
+dEQP-VK.glsl.operator.selection.bvec2_fragment
+dEQP-VK.glsl.operator.selection.bvec3_vertex
+dEQP-VK.glsl.operator.selection.bvec3_fragment
+dEQP-VK.glsl.operator.selection.bvec4_vertex
+dEQP-VK.glsl.operator.selection.bvec4_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_vec4_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_vec4_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_vec4_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_vec4_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_float_uint_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_float_uint_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_float_uint_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_float_uint_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_bool_vec2_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_bool_vec2_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_bool_vec2_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_bool_vec2_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_vec4_ivec4_bvec4_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.mediump_vec4_ivec4_bvec4_fragment
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_vec4_ivec4_bvec4_vertex
+dEQP-VK.glsl.operator.sequence.no_side_effects.highp_vec4_ivec4_bvec4_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_vec4_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_vec4_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.highp_vec4_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.highp_vec4_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_float_uint_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_float_uint_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.highp_float_uint_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.highp_float_uint_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_bool_vec2_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_bool_vec2_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.highp_bool_vec2_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.highp_bool_vec2_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_vec4_ivec4_bvec4_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.mediump_vec4_ivec4_bvec4_fragment
+dEQP-VK.glsl.operator.sequence.side_effects.highp_vec4_ivec4_bvec4_vertex
+dEQP-VK.glsl.operator.sequence.side_effects.highp_vec4_ivec4_bvec4_fragment
+dEQP-VK.glsl.return.single_return_vertex
+dEQP-VK.glsl.return.single_return_fragment
+dEQP-VK.glsl.return.conditional_return_always_vertex
+dEQP-VK.glsl.return.conditional_return_always_fragment
+dEQP-VK.glsl.return.conditional_return_never_vertex
+dEQP-VK.glsl.return.conditional_return_never_fragment
+dEQP-VK.glsl.return.conditional_return_dynamic_vertex
+dEQP-VK.glsl.return.conditional_return_dynamic_fragment
+dEQP-VK.glsl.return.double_return_vertex
+dEQP-VK.glsl.return.double_return_fragment
+dEQP-VK.glsl.return.last_statement_in_main_vertex
+dEQP-VK.glsl.return.last_statement_in_main_fragment
+dEQP-VK.glsl.return.output_write_always_vertex
+dEQP-VK.glsl.return.output_write_always_fragment
+dEQP-VK.glsl.return.output_write_never_vertex
+dEQP-VK.glsl.return.output_write_never_fragment
+dEQP-VK.glsl.return.output_write_dynamic_vertex
+dEQP-VK.glsl.return.output_write_dynamic_fragment
+dEQP-VK.glsl.return.output_write_in_func_always_vertex
+dEQP-VK.glsl.return.output_write_in_func_always_fragment
+dEQP-VK.glsl.return.output_write_in_func_never_vertex
+dEQP-VK.glsl.return.output_write_in_func_never_fragment
+dEQP-VK.glsl.return.output_write_in_func_dynamic_vertex
+dEQP-VK.glsl.return.output_write_in_func_dynamic_fragment
+dEQP-VK.glsl.return.return_in_static_loop_always_vertex
+dEQP-VK.glsl.return.return_in_static_loop_always_fragment
+dEQP-VK.glsl.return.return_in_static_loop_never_vertex
+dEQP-VK.glsl.return.return_in_static_loop_never_fragment
+dEQP-VK.glsl.return.return_in_static_loop_dynamic_vertex
+dEQP-VK.glsl.return.return_in_static_loop_dynamic_fragment
+dEQP-VK.glsl.return.return_in_dynamic_loop_always_vertex
+dEQP-VK.glsl.return.return_in_dynamic_loop_always_fragment
+dEQP-VK.glsl.return.return_in_dynamic_loop_never_vertex
+dEQP-VK.glsl.return.return_in_dynamic_loop_never_fragment
+dEQP-VK.glsl.return.return_in_dynamic_loop_dynamic_vertex
+dEQP-VK.glsl.return.return_in_dynamic_loop_dynamic_fragment
+dEQP-VK.glsl.return.return_in_infinite_loop_vertex
+dEQP-VK.glsl.return.return_in_infinite_loop_fragment
+dEQP-VK.glsl.struct.local.basic_vertex
+dEQP-VK.glsl.struct.local.basic_fragment
+dEQP-VK.glsl.struct.local.nested_vertex
+dEQP-VK.glsl.struct.local.nested_fragment
+dEQP-VK.glsl.struct.local.array_member_vertex
+dEQP-VK.glsl.struct.local.array_member_fragment
+dEQP-VK.glsl.struct.local.array_member_dynamic_index_vertex
+dEQP-VK.glsl.struct.local.array_member_dynamic_index_fragment
+dEQP-VK.glsl.struct.local.struct_array_vertex
+dEQP-VK.glsl.struct.local.struct_array_fragment
+dEQP-VK.glsl.struct.local.struct_array_dynamic_index_vertex
+dEQP-VK.glsl.struct.local.struct_array_dynamic_index_fragment
+dEQP-VK.glsl.struct.local.nested_struct_array_vertex
+dEQP-VK.glsl.struct.local.nested_struct_array_fragment
+dEQP-VK.glsl.struct.local.nested_struct_array_dynamic_index_vertex
+dEQP-VK.glsl.struct.local.nested_struct_array_dynamic_index_fragment
+dEQP-VK.glsl.struct.local.parameter_vertex
+dEQP-VK.glsl.struct.local.parameter_fragment
+dEQP-VK.glsl.struct.local.parameter_nested_vertex
+dEQP-VK.glsl.struct.local.parameter_nested_fragment
+dEQP-VK.glsl.struct.local.return_vertex
+dEQP-VK.glsl.struct.local.return_fragment
+dEQP-VK.glsl.struct.local.return_nested_vertex
+dEQP-VK.glsl.struct.local.return_nested_fragment
+dEQP-VK.glsl.struct.local.conditional_assignment_vertex
+dEQP-VK.glsl.struct.local.conditional_assignment_fragment
+dEQP-VK.glsl.struct.local.loop_assignment_vertex
+dEQP-VK.glsl.struct.local.loop_assignment_fragment
+dEQP-VK.glsl.struct.local.dynamic_loop_assignment_vertex
+dEQP-VK.glsl.struct.local.dynamic_loop_assignment_fragment
+dEQP-VK.glsl.struct.local.nested_conditional_assignment_vertex
+dEQP-VK.glsl.struct.local.nested_conditional_assignment_fragment
+dEQP-VK.glsl.struct.local.nested_loop_assignment_vertex
+dEQP-VK.glsl.struct.local.nested_loop_assignment_fragment
+dEQP-VK.glsl.struct.local.nested_dynamic_loop_assignment_vertex
+dEQP-VK.glsl.struct.local.nested_dynamic_loop_assignment_fragment
+dEQP-VK.glsl.struct.local.loop_struct_array_vertex
+dEQP-VK.glsl.struct.local.loop_struct_array_fragment
+dEQP-VK.glsl.struct.local.loop_nested_struct_array_vertex
+dEQP-VK.glsl.struct.local.loop_nested_struct_array_fragment
+dEQP-VK.glsl.struct.local.dynamic_loop_struct_array_vertex
+dEQP-VK.glsl.struct.local.dynamic_loop_struct_array_fragment
+dEQP-VK.glsl.struct.local.dynamic_loop_nested_struct_array_vertex
+dEQP-VK.glsl.struct.local.dynamic_loop_nested_struct_array_fragment
+dEQP-VK.glsl.struct.local.basic_equal_vertex
+dEQP-VK.glsl.struct.local.basic_equal_fragment
+dEQP-VK.glsl.struct.local.basic_not_equal_vertex
+dEQP-VK.glsl.struct.local.basic_not_equal_fragment
+dEQP-VK.glsl.struct.local.nested_equal_vertex
+dEQP-VK.glsl.struct.local.nested_equal_fragment
+dEQP-VK.glsl.struct.local.nested_not_equal_vertex
+dEQP-VK.glsl.struct.local.nested_not_equal_fragment
+dEQP-VK.glsl.struct.uniform.basic_vertex
+dEQP-VK.glsl.struct.uniform.basic_fragment
+dEQP-VK.glsl.struct.uniform.nested_vertex
+dEQP-VK.glsl.struct.uniform.nested_fragment
+dEQP-VK.glsl.struct.uniform.array_member_vertex
+dEQP-VK.glsl.struct.uniform.array_member_fragment
+dEQP-VK.glsl.struct.uniform.array_member_dynamic_index_vertex
+dEQP-VK.glsl.struct.uniform.array_member_dynamic_index_fragment
+dEQP-VK.glsl.struct.uniform.struct_array_vertex
+dEQP-VK.glsl.struct.uniform.struct_array_fragment
+dEQP-VK.glsl.struct.uniform.struct_array_dynamic_index_vertex
+dEQP-VK.glsl.struct.uniform.struct_array_dynamic_index_fragment
+dEQP-VK.glsl.struct.uniform.nested_struct_array_vertex
+dEQP-VK.glsl.struct.uniform.nested_struct_array_fragment
+dEQP-VK.glsl.struct.uniform.nested_struct_array_dynamic_index_vertex
+dEQP-VK.glsl.struct.uniform.nested_struct_array_dynamic_index_fragment
+dEQP-VK.glsl.struct.uniform.loop_struct_array_vertex
+dEQP-VK.glsl.struct.uniform.loop_struct_array_fragment
+dEQP-VK.glsl.struct.uniform.loop_nested_struct_array_vertex
+dEQP-VK.glsl.struct.uniform.loop_nested_struct_array_fragment
+dEQP-VK.glsl.struct.uniform.dynamic_loop_struct_array_vertex
+dEQP-VK.glsl.struct.uniform.dynamic_loop_struct_array_fragment
+dEQP-VK.glsl.struct.uniform.dynamic_loop_nested_struct_array_vertex
+dEQP-VK.glsl.struct.uniform.dynamic_loop_nested_struct_array_fragment
+dEQP-VK.glsl.struct.uniform.equal_vertex
+dEQP-VK.glsl.struct.uniform.equal_fragment
+dEQP-VK.glsl.struct.uniform.not_equal_vertex
+dEQP-VK.glsl.struct.uniform.not_equal_fragment
+dEQP-VK.glsl.switch.basic_static_vertex
+dEQP-VK.glsl.switch.basic_static_fragment
+dEQP-VK.glsl.switch.basic_uniform_vertex
+dEQP-VK.glsl.switch.basic_uniform_fragment
+dEQP-VK.glsl.switch.basic_dynamic_vertex
+dEQP-VK.glsl.switch.basic_dynamic_fragment
+dEQP-VK.glsl.switch.const_expr_in_label_static_vertex
+dEQP-VK.glsl.switch.const_expr_in_label_static_fragment
+dEQP-VK.glsl.switch.const_expr_in_label_uniform_vertex
+dEQP-VK.glsl.switch.const_expr_in_label_uniform_fragment
+dEQP-VK.glsl.switch.const_expr_in_label_dynamic_vertex
+dEQP-VK.glsl.switch.const_expr_in_label_dynamic_fragment
+dEQP-VK.glsl.switch.default_label_static_vertex
+dEQP-VK.glsl.switch.default_label_static_fragment
+dEQP-VK.glsl.switch.default_label_uniform_vertex
+dEQP-VK.glsl.switch.default_label_uniform_fragment
+dEQP-VK.glsl.switch.default_label_dynamic_vertex
+dEQP-VK.glsl.switch.default_label_dynamic_fragment
+dEQP-VK.glsl.switch.default_not_last_static_vertex
+dEQP-VK.glsl.switch.default_not_last_static_fragment
+dEQP-VK.glsl.switch.default_not_last_uniform_vertex
+dEQP-VK.glsl.switch.default_not_last_uniform_fragment
+dEQP-VK.glsl.switch.default_not_last_dynamic_vertex
+dEQP-VK.glsl.switch.default_not_last_dynamic_fragment
+dEQP-VK.glsl.switch.no_default_label_static_vertex
+dEQP-VK.glsl.switch.no_default_label_static_fragment
+dEQP-VK.glsl.switch.no_default_label_uniform_vertex
+dEQP-VK.glsl.switch.no_default_label_uniform_fragment
+dEQP-VK.glsl.switch.no_default_label_dynamic_vertex
+dEQP-VK.glsl.switch.no_default_label_dynamic_fragment
+dEQP-VK.glsl.switch.fall_through_static_vertex
+dEQP-VK.glsl.switch.fall_through_static_fragment
+dEQP-VK.glsl.switch.fall_through_uniform_vertex
+dEQP-VK.glsl.switch.fall_through_uniform_fragment
+dEQP-VK.glsl.switch.fall_through_dynamic_vertex
+dEQP-VK.glsl.switch.fall_through_dynamic_fragment
+dEQP-VK.glsl.switch.fall_through_default_static_vertex
+dEQP-VK.glsl.switch.fall_through_default_static_fragment
+dEQP-VK.glsl.switch.fall_through_default_uniform_vertex
+dEQP-VK.glsl.switch.fall_through_default_uniform_fragment
+dEQP-VK.glsl.switch.fall_through_default_dynamic_vertex
+dEQP-VK.glsl.switch.fall_through_default_dynamic_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_static_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_static_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_uniform_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_uniform_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_dynamic_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_dynamic_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_2_static_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_2_static_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_2_uniform_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_2_uniform_fragment
+dEQP-VK.glsl.switch.conditional_fall_through_2_dynamic_vertex
+dEQP-VK.glsl.switch.conditional_fall_through_2_dynamic_fragment
+dEQP-VK.glsl.switch.scope_static_vertex
+dEQP-VK.glsl.switch.scope_static_fragment
+dEQP-VK.glsl.switch.scope_uniform_vertex
+dEQP-VK.glsl.switch.scope_uniform_fragment
+dEQP-VK.glsl.switch.scope_dynamic_vertex
+dEQP-VK.glsl.switch.scope_dynamic_fragment
+dEQP-VK.glsl.switch.switch_in_if_static_vertex
+dEQP-VK.glsl.switch.switch_in_if_static_fragment
+dEQP-VK.glsl.switch.switch_in_if_uniform_vertex
+dEQP-VK.glsl.switch.switch_in_if_uniform_fragment
+dEQP-VK.glsl.switch.switch_in_if_dynamic_vertex
+dEQP-VK.glsl.switch.switch_in_if_dynamic_fragment
+dEQP-VK.glsl.switch.switch_in_for_loop_static_vertex
+dEQP-VK.glsl.switch.switch_in_for_loop_static_fragment
+dEQP-VK.glsl.switch.switch_in_for_loop_uniform_vertex
+dEQP-VK.glsl.switch.switch_in_for_loop_uniform_fragment
+dEQP-VK.glsl.switch.switch_in_for_loop_dynamic_vertex
+dEQP-VK.glsl.switch.switch_in_for_loop_dynamic_fragment
+dEQP-VK.glsl.switch.switch_in_while_loop_static_vertex
+dEQP-VK.glsl.switch.switch_in_while_loop_static_fragment
+dEQP-VK.glsl.switch.switch_in_while_loop_uniform_vertex
+dEQP-VK.glsl.switch.switch_in_while_loop_uniform_fragment
+dEQP-VK.glsl.switch.switch_in_while_loop_dynamic_vertex
+dEQP-VK.glsl.switch.switch_in_while_loop_dynamic_fragment
+dEQP-VK.glsl.switch.switch_in_do_while_loop_static_vertex
+dEQP-VK.glsl.switch.switch_in_do_while_loop_static_fragment
+dEQP-VK.glsl.switch.switch_in_do_while_loop_uniform_vertex
+dEQP-VK.glsl.switch.switch_in_do_while_loop_uniform_fragment
+dEQP-VK.glsl.switch.switch_in_do_while_loop_dynamic_vertex
+dEQP-VK.glsl.switch.switch_in_do_while_loop_dynamic_fragment
+dEQP-VK.glsl.switch.if_in_switch_static_vertex
+dEQP-VK.glsl.switch.if_in_switch_static_fragment
+dEQP-VK.glsl.switch.if_in_switch_uniform_vertex
+dEQP-VK.glsl.switch.if_in_switch_uniform_fragment
+dEQP-VK.glsl.switch.if_in_switch_dynamic_vertex
+dEQP-VK.glsl.switch.if_in_switch_dynamic_fragment
+dEQP-VK.glsl.switch.for_loop_in_switch_static_vertex
+dEQP-VK.glsl.switch.for_loop_in_switch_static_fragment
+dEQP-VK.glsl.switch.for_loop_in_switch_uniform_vertex
+dEQP-VK.glsl.switch.for_loop_in_switch_uniform_fragment
+dEQP-VK.glsl.switch.for_loop_in_switch_dynamic_vertex
+dEQP-VK.glsl.switch.for_loop_in_switch_dynamic_fragment
+dEQP-VK.glsl.switch.while_loop_in_switch_static_vertex
+dEQP-VK.glsl.switch.while_loop_in_switch_static_fragment
+dEQP-VK.glsl.switch.while_loop_in_switch_uniform_vertex
+dEQP-VK.glsl.switch.while_loop_in_switch_uniform_fragment
+dEQP-VK.glsl.switch.while_loop_in_switch_dynamic_vertex
+dEQP-VK.glsl.switch.while_loop_in_switch_dynamic_fragment
+dEQP-VK.glsl.switch.do_while_loop_in_switch_static_vertex
+dEQP-VK.glsl.switch.do_while_loop_in_switch_static_fragment
+dEQP-VK.glsl.switch.do_while_loop_in_switch_uniform_vertex
+dEQP-VK.glsl.switch.do_while_loop_in_switch_uniform_fragment
+dEQP-VK.glsl.switch.do_while_loop_in_switch_dynamic_vertex
+dEQP-VK.glsl.switch.do_while_loop_in_switch_dynamic_fragment
+dEQP-VK.glsl.switch.switch_in_switch_static_vertex
+dEQP-VK.glsl.switch.switch_in_switch_static_fragment
+dEQP-VK.glsl.switch.switch_in_switch_uniform_vertex
+dEQP-VK.glsl.switch.switch_in_switch_uniform_fragment
+dEQP-VK.glsl.switch.switch_in_switch_dynamic_vertex
+dEQP-VK.glsl.switch.switch_in_switch_dynamic_fragment
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.int_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.int_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.abs.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.int_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.int_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.sign.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floor.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.trunc.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.round.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.round.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.round.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.round.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.round.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.round.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.roundeven.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ceil.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fract.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.modf.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isnan.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.isinf.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstoint.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.floatbitstouint.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.frexp.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.ldexp.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.float_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.float_highp_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec2_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec3_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec4_mediump_compute
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_vertex
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_fragment
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_geometry
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.fma.vec4_highp_compute
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.int_highp_geometry
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.int_highp_compute
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.intbitstofloat.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uint_highp_compute
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.common.uintbitstofloat.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.uaddcarry.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.usubborrow.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.umulextended.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.umulextended.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.imulextended.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.imulextended.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldextract.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldinsert.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitfieldreverse.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.bitcount.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findlsb.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.int_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.ivec4_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uint_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec2_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec3_highp_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_mediump_compute
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_vertex
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_fragment
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_geometry
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_tess_control
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_tess_eval
+dEQP-VK.glsl.builtin.function.integer.findMSB.uvec4_highp_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_mediump_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm4x8_highp_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm4x8_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_mediump_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm4x8_highp_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_vertex
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_tess_control
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_tess_eval
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_fragment
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm4x8_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm2x16_mediump_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm2x16_mediump_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm2x16_highp_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packsnorm2x16_highp_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm2x16_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.unpacksnorm2x16_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm2x16_mediump_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm2x16_mediump_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm2x16_highp_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packunorm2x16_highp_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm2x16_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackunorm2x16_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.packhalf2x16_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.packhalf2x16_compute
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackhalf2x16_geometry
+dEQP-VK.glsl.builtin.function.pack_unpack.unpackhalf2x16_compute
+dEQP-VK.glsl.builtin.precision.add.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.add.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.add.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.add.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.add.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.add.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.add.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.add.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sub.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.sub.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.sub.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.sub.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.sub.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.sub.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.sub.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.sub.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.mul.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.mul.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.mul.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.mul.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.mul.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.mul.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.mul.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.mul.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.div.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.div.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.div.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.div.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.radians.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.radians.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.radians.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.radians.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.radians.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.radians.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.radians.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.radians.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.degrees.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.degrees.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.degrees.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.degrees.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.degrees.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.degrees.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.degrees.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.degrees.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sin.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.sin.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.sin.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.sin.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.cos.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.cos.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.cos.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.cos.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.tan.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.tan.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.tan.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.tan.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.asin.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.asin.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.asin.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.asin.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.asin.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.asin.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.asin.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.asin.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.acos.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.acos.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.acos.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.acos.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.atan.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.atan.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.atan.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.atan.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sinh.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.sinh.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.sinh.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.sinh.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.sinh.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.sinh.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.sinh.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.sinh.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.cosh.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.cosh.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.cosh.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.cosh.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.cosh.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.cosh.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.cosh.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.cosh.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.tanh.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.tanh.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.tanh.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.tanh.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.tanh.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.tanh.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.tanh.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.tanh.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.asinh.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.asinh.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.asinh.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.asinh.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.asinh.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.asinh.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.asinh.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.asinh.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.acosh.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.acosh.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.acosh.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.acosh.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.pow.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.pow.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.pow.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.pow.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.pow.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.pow.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.pow.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.pow.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.exp.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.exp.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.exp.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.exp.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.exp.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.exp.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.exp.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.exp.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.log.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.log.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.log.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.log.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.log.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.log.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.log.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.log.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.exp2.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.exp2.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.exp2.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.exp2.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.exp2.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.exp2.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.exp2.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.exp2.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.log2.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.log2.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.log2.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.log2.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.log2.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.log2.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.log2.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.log2.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sqrt.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.sqrt.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.sqrt.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.sqrt.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.sqrt.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.sqrt.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.sqrt.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.sqrt.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.inversesqrt.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.inversesqrt.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.inversesqrt.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.inversesqrt.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.inversesqrt.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.inversesqrt.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.inversesqrt.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.inversesqrt.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.abs.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.abs.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.abs.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.abs.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.abs.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.abs.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.abs.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.abs.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.sign.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.sign.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.sign.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.sign.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.sign.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.sign.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.sign.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.sign.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.floor.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.floor.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.floor.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.floor.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.floor.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.floor.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.floor.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.floor.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.trunc.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.trunc.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.trunc.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.trunc.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.trunc.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.trunc.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.trunc.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.trunc.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.round.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.round.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.round.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.round.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.round.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.round.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.round.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.round.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.roundeven.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.roundeven.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.roundeven.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.roundeven.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.roundeven.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.roundeven.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.roundeven.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.roundeven.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.ceil.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.ceil.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.ceil.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.ceil.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.ceil.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.ceil.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.ceil.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.ceil.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.fract.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.fract.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.fract.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.fract.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.fract.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.fract.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.fract.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.fract.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.mod.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.mod.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.mod.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.mod.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.modf.mediump_compute
+dEQP-VK.glsl.builtin.precision.modf.highp_compute
+dEQP-VK.glsl.builtin.precision.min.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.min.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.min.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.min.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.max.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.max.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.max.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.max.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.clamp.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.clamp.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.clamp.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.clamp.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.mix.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.mix.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.mix.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.mix.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.mix.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.mix.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.mix.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.mix.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.step.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.step.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.step.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.step.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.step.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.step.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.step.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.step.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.length.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.length.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.length.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.length.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.length.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.length.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.length.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.length.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.distance.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.distance.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.distance.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.distance.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.distance.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.distance.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.distance.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.distance.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.dot.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.dot.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.dot.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.dot.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.cross.mediump_compute
+dEQP-VK.glsl.builtin.precision.cross.highp_compute
+dEQP-VK.glsl.builtin.precision.normalize.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.normalize.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.normalize.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.normalize.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.normalize.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.normalize.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.normalize.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.normalize.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.faceforward.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.faceforward.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.faceforward.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.faceforward.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.faceforward.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.faceforward.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.faceforward.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.faceforward.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.reflect.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.reflect.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.reflect.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.reflect.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.reflect.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.mediump_compute.mat4
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.matrixcompmult.highp_compute.mat4
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat2
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat3
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.outerproduct.mediump_compute.mat4
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat2
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat3
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.outerproduct.highp_compute.mat4
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat2
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat3
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.transpose.mediump_compute.mat4
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat2
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat2x3
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat2x4
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat3x2
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat3
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat3x4
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat4x2
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat4x3
+dEQP-VK.glsl.builtin.precision.transpose.highp_compute.mat4
+dEQP-VK.glsl.builtin.precision.determinant.mediump_compute.mat2
+dEQP-VK.glsl.builtin.precision.determinant.highp_compute.mat2
+dEQP-VK.glsl.builtin.precision.inverse.mediump_compute.mat2
+dEQP-VK.glsl.builtin.precision.frexp.mediump_vertex.scalar
+dEQP-VK.glsl.builtin.precision.frexp.mediump_vertex.vec2
+dEQP-VK.glsl.builtin.precision.frexp.mediump_vertex.vec3
+dEQP-VK.glsl.builtin.precision.frexp.mediump_vertex.vec4
+dEQP-VK.glsl.builtin.precision.frexp.mediump_fragment.scalar
+dEQP-VK.glsl.builtin.precision.frexp.mediump_fragment.vec2
+dEQP-VK.glsl.builtin.precision.frexp.mediump_fragment.vec3
+dEQP-VK.glsl.builtin.precision.frexp.mediump_fragment.vec4
+dEQP-VK.glsl.builtin.precision.frexp.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.frexp.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.frexp.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.frexp.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.frexp.highp_vertex.scalar
+dEQP-VK.glsl.builtin.precision.frexp.highp_vertex.vec2
+dEQP-VK.glsl.builtin.precision.frexp.highp_vertex.vec3
+dEQP-VK.glsl.builtin.precision.frexp.highp_vertex.vec4
+dEQP-VK.glsl.builtin.precision.frexp.highp_fragment.scalar
+dEQP-VK.glsl.builtin.precision.frexp.highp_fragment.vec2
+dEQP-VK.glsl.builtin.precision.frexp.highp_fragment.vec3
+dEQP-VK.glsl.builtin.precision.frexp.highp_fragment.vec4
+dEQP-VK.glsl.builtin.precision.frexp.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.frexp.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.frexp.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.frexp.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_vertex.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_vertex.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_vertex.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_vertex.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_fragment.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_fragment.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_fragment.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_fragment.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.highp_vertex.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.highp_vertex.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.highp_vertex.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.highp_vertex.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.highp_fragment.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.highp_fragment.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.highp_fragment.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.highp_fragment.vec4
+dEQP-VK.glsl.builtin.precision.ldexp.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.ldexp.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.ldexp.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.ldexp.highp_compute.vec4
+dEQP-VK.glsl.builtin.precision.fma.mediump_vertex.scalar
+dEQP-VK.glsl.builtin.precision.fma.mediump_vertex.vec2
+dEQP-VK.glsl.builtin.precision.fma.mediump_vertex.vec3
+dEQP-VK.glsl.builtin.precision.fma.mediump_vertex.vec4
+dEQP-VK.glsl.builtin.precision.fma.mediump_fragment.scalar
+dEQP-VK.glsl.builtin.precision.fma.mediump_fragment.vec2
+dEQP-VK.glsl.builtin.precision.fma.mediump_fragment.vec3
+dEQP-VK.glsl.builtin.precision.fma.mediump_fragment.vec4
+dEQP-VK.glsl.builtin.precision.fma.mediump_compute.scalar
+dEQP-VK.glsl.builtin.precision.fma.mediump_compute.vec2
+dEQP-VK.glsl.builtin.precision.fma.mediump_compute.vec3
+dEQP-VK.glsl.builtin.precision.fma.mediump_compute.vec4
+dEQP-VK.glsl.builtin.precision.fma.highp_vertex.scalar
+dEQP-VK.glsl.builtin.precision.fma.highp_vertex.vec2
+dEQP-VK.glsl.builtin.precision.fma.highp_vertex.vec3
+dEQP-VK.glsl.builtin.precision.fma.highp_vertex.vec4
+dEQP-VK.glsl.builtin.precision.fma.highp_fragment.scalar
+dEQP-VK.glsl.builtin.precision.fma.highp_fragment.vec2
+dEQP-VK.glsl.builtin.precision.fma.highp_fragment.vec3
+dEQP-VK.glsl.builtin.precision.fma.highp_fragment.vec4
+dEQP-VK.glsl.builtin.precision.fma.highp_compute.scalar
+dEQP-VK.glsl.builtin.precision.fma.highp_compute.vec2
+dEQP-VK.glsl.builtin.precision.fma.highp_compute.vec3
+dEQP-VK.glsl.builtin.precision.fma.highp_compute.vec4
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.vertex.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.fragment.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_literal.compute.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.vertex.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.fragment.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.const_expression.compute.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.vertex.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.fragment.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.uniform.compute.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.vertex.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.fragment.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.sampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.samplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.sampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.sampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.sampler2dshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.samplercubeshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.sampler2darrayshadow
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.isampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.isamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.isampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.isampler3d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.usampler2d
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.usamplercube
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.usampler2darray
+dEQP-VK.glsl.opaque_type_indexing.sampler.dynamically_uniform.compute.usampler3d
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_literal_vertex
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_literal_fragment
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_literal_compute
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_expression_vertex
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_expression_fragment
+dEQP-VK.glsl.opaque_type_indexing.ubo.const_expression_compute
+dEQP-VK.glsl.opaque_type_indexing.ubo.uniform_vertex
+dEQP-VK.glsl.opaque_type_indexing.ubo.uniform_fragment
+dEQP-VK.glsl.opaque_type_indexing.ubo.uniform_compute
+dEQP-VK.glsl.opaque_type_indexing.ubo.dynamically_uniform_vertex
+dEQP-VK.glsl.opaque_type_indexing.ubo.dynamically_uniform_fragment
+dEQP-VK.glsl.opaque_type_indexing.ubo.dynamically_uniform_compute
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_literal_vertex
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_literal_fragment
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_literal_compute
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_expression_vertex
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_expression_fragment
+dEQP-VK.glsl.opaque_type_indexing.ssbo.const_expression_compute
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_literal_vertex
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_literal_fragment
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_literal_compute
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_expression_vertex
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_expression_fragment
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.const_expression_compute
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.uniform_vertex
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.uniform_fragment
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.uniform_compute
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.dynamically_uniform_vertex
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.dynamically_uniform_fragment
+dEQP-VK.glsl.opaque_type_indexing.atomic_counter.dynamically_uniform_compute
+dEQP-VK.renderpass.simple.color
+dEQP-VK.renderpass.simple.depth
+dEQP-VK.renderpass.simple.stencil
+dEQP-VK.renderpass.simple.depth_stencil
+dEQP-VK.renderpass.simple.color_depth
+dEQP-VK.renderpass.simple.color_stencil
+dEQP-VK.renderpass.simple.color_depth_stencil
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.clear.clear
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.clear.draw
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.clear.clear_draw
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.load.clear
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.load.draw
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.load.clear_draw
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.dont_care.clear
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.dont_care.draw
+dEQP-VK.renderpass.formats.r5g6b5_unorm_pack16.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8_unorm.clear.clear
+dEQP-VK.renderpass.formats.r8_unorm.clear.draw
+dEQP-VK.renderpass.formats.r8_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8_unorm.load.clear
+dEQP-VK.renderpass.formats.r8_unorm.load.draw
+dEQP-VK.renderpass.formats.r8_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8_snorm.clear.clear
+dEQP-VK.renderpass.formats.r8_snorm.clear.draw
+dEQP-VK.renderpass.formats.r8_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8_snorm.load.clear
+dEQP-VK.renderpass.formats.r8_snorm.load.draw
+dEQP-VK.renderpass.formats.r8_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8_uint.clear.clear
+dEQP-VK.renderpass.formats.r8_uint.clear.draw
+dEQP-VK.renderpass.formats.r8_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8_uint.load.clear
+dEQP-VK.renderpass.formats.r8_uint.load.draw
+dEQP-VK.renderpass.formats.r8_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r8_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r8_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r8_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8_sint.clear.clear
+dEQP-VK.renderpass.formats.r8_sint.clear.draw
+dEQP-VK.renderpass.formats.r8_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8_sint.load.clear
+dEQP-VK.renderpass.formats.r8_sint.load.draw
+dEQP-VK.renderpass.formats.r8_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r8_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r8_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r8_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8_unorm.clear.clear
+dEQP-VK.renderpass.formats.r8g8_unorm.clear.draw
+dEQP-VK.renderpass.formats.r8g8_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8_unorm.load.clear
+dEQP-VK.renderpass.formats.r8g8_unorm.load.draw
+dEQP-VK.renderpass.formats.r8g8_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8_snorm.clear.clear
+dEQP-VK.renderpass.formats.r8g8_snorm.clear.draw
+dEQP-VK.renderpass.formats.r8g8_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8_snorm.load.clear
+dEQP-VK.renderpass.formats.r8g8_snorm.load.draw
+dEQP-VK.renderpass.formats.r8g8_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8_uint.clear.clear
+dEQP-VK.renderpass.formats.r8g8_uint.clear.draw
+dEQP-VK.renderpass.formats.r8g8_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8_uint.load.clear
+dEQP-VK.renderpass.formats.r8g8_uint.load.draw
+dEQP-VK.renderpass.formats.r8g8_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8_sint.clear.clear
+dEQP-VK.renderpass.formats.r8g8_sint.clear.draw
+dEQP-VK.renderpass.formats.r8g8_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8_sint.load.clear
+dEQP-VK.renderpass.formats.r8g8_sint.load.draw
+dEQP-VK.renderpass.formats.r8g8_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.clear.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.clear.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.load.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.load.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.clear.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.clear.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.load.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.load.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.clear.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.clear.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.load.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.load.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.clear.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.clear.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.load.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.load.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.clear.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.clear.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.clear.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.load.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.load.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.load.clear_draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.dont_care.clear
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.dont_care.draw
+dEQP-VK.renderpass.formats.r8g8b8a8_srgb.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.clear.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.clear.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.load.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.load.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_unorm_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.clear.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.clear.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.load.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.load.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_snorm_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.clear.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.clear.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.load.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.load.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_uint_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.clear.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.clear.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.load.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.load.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_sint_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.clear.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.clear.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.load.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.load.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a8b8g8r8_srgb_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.clear.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.clear.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.load.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.load.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.clear.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.clear.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.clear.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.load.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.load.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.load.clear_draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.dont_care.clear
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.dont_care.draw
+dEQP-VK.renderpass.formats.b8g8r8a8_srgb.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.clear.clear
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.clear.draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.load.clear
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.load.draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a2r10g10b10_unorm_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.clear.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.clear.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.load.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.load.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_unorm_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.clear.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.clear.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.load.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.load.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.a2b10g10r10_uint_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16_unorm.clear.clear
+dEQP-VK.renderpass.formats.r16_unorm.clear.draw
+dEQP-VK.renderpass.formats.r16_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16_unorm.load.clear
+dEQP-VK.renderpass.formats.r16_unorm.load.draw
+dEQP-VK.renderpass.formats.r16_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16_snorm.clear.clear
+dEQP-VK.renderpass.formats.r16_snorm.clear.draw
+dEQP-VK.renderpass.formats.r16_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16_snorm.load.clear
+dEQP-VK.renderpass.formats.r16_snorm.load.draw
+dEQP-VK.renderpass.formats.r16_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16_uint.clear.clear
+dEQP-VK.renderpass.formats.r16_uint.clear.draw
+dEQP-VK.renderpass.formats.r16_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16_uint.load.clear
+dEQP-VK.renderpass.formats.r16_uint.load.draw
+dEQP-VK.renderpass.formats.r16_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r16_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r16_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r16_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16_sint.clear.clear
+dEQP-VK.renderpass.formats.r16_sint.clear.draw
+dEQP-VK.renderpass.formats.r16_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16_sint.load.clear
+dEQP-VK.renderpass.formats.r16_sint.load.draw
+dEQP-VK.renderpass.formats.r16_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r16_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r16_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r16_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r16_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r16_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r16_sfloat.load.clear
+dEQP-VK.renderpass.formats.r16_sfloat.load.draw
+dEQP-VK.renderpass.formats.r16_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r16_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r16_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r16_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16_unorm.clear.clear
+dEQP-VK.renderpass.formats.r16g16_unorm.clear.draw
+dEQP-VK.renderpass.formats.r16g16_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16_unorm.load.clear
+dEQP-VK.renderpass.formats.r16g16_unorm.load.draw
+dEQP-VK.renderpass.formats.r16g16_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16_snorm.clear.clear
+dEQP-VK.renderpass.formats.r16g16_snorm.clear.draw
+dEQP-VK.renderpass.formats.r16g16_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16_snorm.load.clear
+dEQP-VK.renderpass.formats.r16g16_snorm.load.draw
+dEQP-VK.renderpass.formats.r16g16_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16_uint.clear.clear
+dEQP-VK.renderpass.formats.r16g16_uint.clear.draw
+dEQP-VK.renderpass.formats.r16g16_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16_uint.load.clear
+dEQP-VK.renderpass.formats.r16g16_uint.load.draw
+dEQP-VK.renderpass.formats.r16g16_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sint.clear.clear
+dEQP-VK.renderpass.formats.r16g16_sint.clear.draw
+dEQP-VK.renderpass.formats.r16g16_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sint.load.clear
+dEQP-VK.renderpass.formats.r16g16_sint.load.draw
+dEQP-VK.renderpass.formats.r16g16_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r16g16_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.load.clear
+dEQP-VK.renderpass.formats.r16g16_sfloat.load.draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.clear.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.clear.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.load.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.load.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.clear.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.clear.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.load.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.load.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_snorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.clear.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.clear.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.load.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.load.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.clear.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.clear.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.load.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.load.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.load.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.load.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r16g16b16a16_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32_uint.clear.clear
+dEQP-VK.renderpass.formats.r32_uint.clear.draw
+dEQP-VK.renderpass.formats.r32_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32_uint.load.clear
+dEQP-VK.renderpass.formats.r32_uint.load.draw
+dEQP-VK.renderpass.formats.r32_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r32_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r32_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r32_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32_sint.clear.clear
+dEQP-VK.renderpass.formats.r32_sint.clear.draw
+dEQP-VK.renderpass.formats.r32_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32_sint.load.clear
+dEQP-VK.renderpass.formats.r32_sint.load.draw
+dEQP-VK.renderpass.formats.r32_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r32_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r32_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r32_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r32_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r32_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r32_sfloat.load.clear
+dEQP-VK.renderpass.formats.r32_sfloat.load.draw
+dEQP-VK.renderpass.formats.r32_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r32_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r32_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r32_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32_uint.clear.clear
+dEQP-VK.renderpass.formats.r32g32_uint.clear.draw
+dEQP-VK.renderpass.formats.r32g32_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32_uint.load.clear
+dEQP-VK.renderpass.formats.r32g32_uint.load.draw
+dEQP-VK.renderpass.formats.r32g32_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sint.clear.clear
+dEQP-VK.renderpass.formats.r32g32_sint.clear.draw
+dEQP-VK.renderpass.formats.r32g32_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sint.load.clear
+dEQP-VK.renderpass.formats.r32g32_sint.load.draw
+dEQP-VK.renderpass.formats.r32g32_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r32g32_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.load.clear
+dEQP-VK.renderpass.formats.r32g32_sfloat.load.draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.clear.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.clear.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.load.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.load.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.clear.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.clear.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.load.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.load.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.clear.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.clear.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.load.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.load.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.r32g32b32a32_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.d16_unorm.clear.clear
+dEQP-VK.renderpass.formats.d16_unorm.clear.draw
+dEQP-VK.renderpass.formats.d16_unorm.clear.clear_draw
+dEQP-VK.renderpass.formats.d16_unorm.load.clear
+dEQP-VK.renderpass.formats.d16_unorm.load.draw
+dEQP-VK.renderpass.formats.d16_unorm.load.clear_draw
+dEQP-VK.renderpass.formats.d16_unorm.dont_care.clear
+dEQP-VK.renderpass.formats.d16_unorm.dont_care.draw
+dEQP-VK.renderpass.formats.d16_unorm.dont_care.clear_draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.clear.clear
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.clear.draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.clear.clear_draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.load.clear
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.load.draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.load.clear_draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.dont_care.clear
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.dont_care.draw
+dEQP-VK.renderpass.formats.x8_d24_unorm_pack32.dont_care.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat.clear.clear
+dEQP-VK.renderpass.formats.d32_sfloat.clear.draw
+dEQP-VK.renderpass.formats.d32_sfloat.clear.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat.load.clear
+dEQP-VK.renderpass.formats.d32_sfloat.load.draw
+dEQP-VK.renderpass.formats.d32_sfloat.load.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat.dont_care.clear
+dEQP-VK.renderpass.formats.d32_sfloat.dont_care.draw
+dEQP-VK.renderpass.formats.d32_sfloat.dont_care.clear_draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.clear.clear
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.clear.draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.load.clear
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.load.draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.load.clear_draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.dont_care.clear
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.dont_care.draw
+dEQP-VK.renderpass.formats.d24_unorm_s8_uint.dont_care.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.clear.clear
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.clear.draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.clear.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.load.clear
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.load.draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.load.clear_draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.dont_care.clear
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.dont_care.draw
+dEQP-VK.renderpass.formats.d32_sfloat_s8_uint.dont_care.clear_draw
+dEQP-VK.renderpass.attachment.1.0
+dEQP-VK.renderpass.attachment.1.1
+dEQP-VK.renderpass.attachment.1.2
+dEQP-VK.renderpass.attachment.1.3
+dEQP-VK.renderpass.attachment.1.4
+dEQP-VK.renderpass.attachment.1.5
+dEQP-VK.renderpass.attachment.1.6
+dEQP-VK.renderpass.attachment.1.7
+dEQP-VK.renderpass.attachment.1.8
+dEQP-VK.renderpass.attachment.1.9
+dEQP-VK.renderpass.attachment.1.10
+dEQP-VK.renderpass.attachment.1.11
+dEQP-VK.renderpass.attachment.1.12
+dEQP-VK.renderpass.attachment.1.13
+dEQP-VK.renderpass.attachment.1.14
+dEQP-VK.renderpass.attachment.1.15
+dEQP-VK.renderpass.attachment.1.16
+dEQP-VK.renderpass.attachment.1.17
+dEQP-VK.renderpass.attachment.1.18
+dEQP-VK.renderpass.attachment.1.19
+dEQP-VK.renderpass.attachment.1.20
+dEQP-VK.renderpass.attachment.1.21
+dEQP-VK.renderpass.attachment.1.22
+dEQP-VK.renderpass.attachment.1.23
+dEQP-VK.renderpass.attachment.1.24
+dEQP-VK.renderpass.attachment.1.25
+dEQP-VK.renderpass.attachment.1.26
+dEQP-VK.renderpass.attachment.1.27
+dEQP-VK.renderpass.attachment.1.28
+dEQP-VK.renderpass.attachment.1.29
+dEQP-VK.renderpass.attachment.1.30
+dEQP-VK.renderpass.attachment.1.31
+dEQP-VK.renderpass.attachment.1.32
+dEQP-VK.renderpass.attachment.1.33
+dEQP-VK.renderpass.attachment.1.34
+dEQP-VK.renderpass.attachment.1.35
+dEQP-VK.renderpass.attachment.1.36
+dEQP-VK.renderpass.attachment.1.37
+dEQP-VK.renderpass.attachment.1.38
+dEQP-VK.renderpass.attachment.1.39
+dEQP-VK.renderpass.attachment.1.40
+dEQP-VK.renderpass.attachment.1.41
+dEQP-VK.renderpass.attachment.1.42
+dEQP-VK.renderpass.attachment.1.43
+dEQP-VK.renderpass.attachment.1.44
+dEQP-VK.renderpass.attachment.1.45
+dEQP-VK.renderpass.attachment.1.46
+dEQP-VK.renderpass.attachment.1.47
+dEQP-VK.renderpass.attachment.1.48
+dEQP-VK.renderpass.attachment.1.49
+dEQP-VK.renderpass.attachment.1.50
+dEQP-VK.renderpass.attachment.1.51
+dEQP-VK.renderpass.attachment.1.52
+dEQP-VK.renderpass.attachment.1.53
+dEQP-VK.renderpass.attachment.1.54
+dEQP-VK.renderpass.attachment.1.55
+dEQP-VK.renderpass.attachment.1.56
+dEQP-VK.renderpass.attachment.1.57
+dEQP-VK.renderpass.attachment.1.58
+dEQP-VK.renderpass.attachment.1.59
+dEQP-VK.renderpass.attachment.1.60
+dEQP-VK.renderpass.attachment.1.61
+dEQP-VK.renderpass.attachment.1.62
+dEQP-VK.renderpass.attachment.1.63
+dEQP-VK.renderpass.attachment.1.64
+dEQP-VK.renderpass.attachment.1.65
+dEQP-VK.renderpass.attachment.1.66
+dEQP-VK.renderpass.attachment.1.67
+dEQP-VK.renderpass.attachment.1.68
+dEQP-VK.renderpass.attachment.1.69
+dEQP-VK.renderpass.attachment.1.70
+dEQP-VK.renderpass.attachment.1.71
+dEQP-VK.renderpass.attachment.1.72
+dEQP-VK.renderpass.attachment.1.73
+dEQP-VK.renderpass.attachment.1.74
+dEQP-VK.renderpass.attachment.1.75
+dEQP-VK.renderpass.attachment.1.76
+dEQP-VK.renderpass.attachment.1.77
+dEQP-VK.renderpass.attachment.1.78
+dEQP-VK.renderpass.attachment.1.79
+dEQP-VK.renderpass.attachment.1.80
+dEQP-VK.renderpass.attachment.1.81
+dEQP-VK.renderpass.attachment.1.82
+dEQP-VK.renderpass.attachment.1.83
+dEQP-VK.renderpass.attachment.1.84
+dEQP-VK.renderpass.attachment.1.85
+dEQP-VK.renderpass.attachment.1.86
+dEQP-VK.renderpass.attachment.1.87
+dEQP-VK.renderpass.attachment.1.88
+dEQP-VK.renderpass.attachment.1.89
+dEQP-VK.renderpass.attachment.1.90
+dEQP-VK.renderpass.attachment.1.91
+dEQP-VK.renderpass.attachment.1.92
+dEQP-VK.renderpass.attachment.1.93
+dEQP-VK.renderpass.attachment.1.94
+dEQP-VK.renderpass.attachment.1.95
+dEQP-VK.renderpass.attachment.1.96
+dEQP-VK.renderpass.attachment.1.97
+dEQP-VK.renderpass.attachment.1.98
+dEQP-VK.renderpass.attachment.1.99
+dEQP-VK.renderpass.attachment.3.200
+dEQP-VK.renderpass.attachment.3.201
+dEQP-VK.renderpass.attachment.3.202
+dEQP-VK.renderpass.attachment.3.203
+dEQP-VK.renderpass.attachment.3.204
+dEQP-VK.renderpass.attachment.3.205
+dEQP-VK.renderpass.attachment.3.206
+dEQP-VK.renderpass.attachment.3.207
+dEQP-VK.renderpass.attachment.3.208
+dEQP-VK.renderpass.attachment.3.209
+dEQP-VK.renderpass.attachment.3.210
+dEQP-VK.renderpass.attachment.3.211
+dEQP-VK.renderpass.attachment.3.212
+dEQP-VK.renderpass.attachment.3.213
+dEQP-VK.renderpass.attachment.3.214
+dEQP-VK.renderpass.attachment.3.215
+dEQP-VK.renderpass.attachment.3.216
+dEQP-VK.renderpass.attachment.3.217
+dEQP-VK.renderpass.attachment.3.218
+dEQP-VK.renderpass.attachment.3.219
+dEQP-VK.renderpass.attachment.3.220
+dEQP-VK.renderpass.attachment.3.221
+dEQP-VK.renderpass.attachment.3.222
+dEQP-VK.renderpass.attachment.3.223
+dEQP-VK.renderpass.attachment.3.224
+dEQP-VK.renderpass.attachment.3.225
+dEQP-VK.renderpass.attachment.3.226
+dEQP-VK.renderpass.attachment.3.227
+dEQP-VK.renderpass.attachment.3.228
+dEQP-VK.renderpass.attachment.3.229
+dEQP-VK.renderpass.attachment.3.230
+dEQP-VK.renderpass.attachment.3.231
+dEQP-VK.renderpass.attachment.3.232
+dEQP-VK.renderpass.attachment.3.233
+dEQP-VK.renderpass.attachment.3.234
+dEQP-VK.renderpass.attachment.3.235
+dEQP-VK.renderpass.attachment.3.236
+dEQP-VK.renderpass.attachment.3.237
+dEQP-VK.renderpass.attachment.3.238
+dEQP-VK.renderpass.attachment.3.239
+dEQP-VK.renderpass.attachment.3.240
+dEQP-VK.renderpass.attachment.3.241
+dEQP-VK.renderpass.attachment.3.242
+dEQP-VK.renderpass.attachment.3.243
+dEQP-VK.renderpass.attachment.3.244
+dEQP-VK.renderpass.attachment.3.245
+dEQP-VK.renderpass.attachment.3.246
+dEQP-VK.renderpass.attachment.3.247
+dEQP-VK.renderpass.attachment.3.248
+dEQP-VK.renderpass.attachment.3.249
+dEQP-VK.renderpass.attachment.3.250
+dEQP-VK.renderpass.attachment.3.251
+dEQP-VK.renderpass.attachment.3.252
+dEQP-VK.renderpass.attachment.3.253
+dEQP-VK.renderpass.attachment.3.254
+dEQP-VK.renderpass.attachment.3.255
+dEQP-VK.renderpass.attachment.3.256
+dEQP-VK.renderpass.attachment.3.257
+dEQP-VK.renderpass.attachment.3.258
+dEQP-VK.renderpass.attachment.3.259
+dEQP-VK.renderpass.attachment.3.260
+dEQP-VK.renderpass.attachment.3.261
+dEQP-VK.renderpass.attachment.3.262
+dEQP-VK.renderpass.attachment.3.263
+dEQP-VK.renderpass.attachment.3.264
+dEQP-VK.renderpass.attachment.3.265
+dEQP-VK.renderpass.attachment.3.266
+dEQP-VK.renderpass.attachment.3.267
+dEQP-VK.renderpass.attachment.3.268
+dEQP-VK.renderpass.attachment.3.269
+dEQP-VK.renderpass.attachment.3.270
+dEQP-VK.renderpass.attachment.3.271
+dEQP-VK.renderpass.attachment.3.272
+dEQP-VK.renderpass.attachment.3.273
+dEQP-VK.renderpass.attachment.3.274
+dEQP-VK.renderpass.attachment.3.275
+dEQP-VK.renderpass.attachment.3.276
+dEQP-VK.renderpass.attachment.3.277
+dEQP-VK.renderpass.attachment.3.278
+dEQP-VK.renderpass.attachment.3.279
+dEQP-VK.renderpass.attachment.3.280
+dEQP-VK.renderpass.attachment.3.281
+dEQP-VK.renderpass.attachment.3.282
+dEQP-VK.renderpass.attachment.3.283
+dEQP-VK.renderpass.attachment.3.284
+dEQP-VK.renderpass.attachment.3.285
+dEQP-VK.renderpass.attachment.3.286
+dEQP-VK.renderpass.attachment.3.287
+dEQP-VK.renderpass.attachment.3.288
+dEQP-VK.renderpass.attachment.3.289
+dEQP-VK.renderpass.attachment.3.290
+dEQP-VK.renderpass.attachment.3.291
+dEQP-VK.renderpass.attachment.3.292
+dEQP-VK.renderpass.attachment.3.293
+dEQP-VK.renderpass.attachment.3.294
+dEQP-VK.renderpass.attachment.3.295
+dEQP-VK.renderpass.attachment.3.296
+dEQP-VK.renderpass.attachment.3.297
+dEQP-VK.renderpass.attachment.3.298
+dEQP-VK.renderpass.attachment.3.299
+dEQP-VK.renderpass.attachment.3.300
+dEQP-VK.renderpass.attachment.3.301
+dEQP-VK.renderpass.attachment.3.302
+dEQP-VK.renderpass.attachment.3.303
+dEQP-VK.renderpass.attachment.3.304
+dEQP-VK.renderpass.attachment.3.305
+dEQP-VK.renderpass.attachment.3.306
+dEQP-VK.renderpass.attachment.3.307
+dEQP-VK.renderpass.attachment.3.308
+dEQP-VK.renderpass.attachment.3.309
+dEQP-VK.renderpass.attachment.3.310
+dEQP-VK.renderpass.attachment.3.311
+dEQP-VK.renderpass.attachment.3.312
+dEQP-VK.renderpass.attachment.3.313
+dEQP-VK.renderpass.attachment.3.314
+dEQP-VK.renderpass.attachment.3.315
+dEQP-VK.renderpass.attachment.3.316
+dEQP-VK.renderpass.attachment.3.317
+dEQP-VK.renderpass.attachment.3.318
+dEQP-VK.renderpass.attachment.3.319
+dEQP-VK.renderpass.attachment.3.320
+dEQP-VK.renderpass.attachment.3.321
+dEQP-VK.renderpass.attachment.3.322
+dEQP-VK.renderpass.attachment.3.323
+dEQP-VK.renderpass.attachment.3.324
+dEQP-VK.renderpass.attachment.3.325
+dEQP-VK.renderpass.attachment.3.326
+dEQP-VK.renderpass.attachment.3.327
+dEQP-VK.renderpass.attachment.3.328
+dEQP-VK.renderpass.attachment.3.329
+dEQP-VK.renderpass.attachment.3.330
+dEQP-VK.renderpass.attachment.3.331
+dEQP-VK.renderpass.attachment.3.332
+dEQP-VK.renderpass.attachment.3.333
+dEQP-VK.renderpass.attachment.3.334
+dEQP-VK.renderpass.attachment.3.335
+dEQP-VK.renderpass.attachment.3.336
+dEQP-VK.renderpass.attachment.3.337
+dEQP-VK.renderpass.attachment.3.338
+dEQP-VK.renderpass.attachment.3.339
+dEQP-VK.renderpass.attachment.3.340
+dEQP-VK.renderpass.attachment.3.341
+dEQP-VK.renderpass.attachment.3.342
+dEQP-VK.renderpass.attachment.3.343
+dEQP-VK.renderpass.attachment.3.344
+dEQP-VK.renderpass.attachment.3.345
+dEQP-VK.renderpass.attachment.3.346
+dEQP-VK.renderpass.attachment.3.347
+dEQP-VK.renderpass.attachment.3.348
+dEQP-VK.renderpass.attachment.3.349
+dEQP-VK.renderpass.attachment.3.350
+dEQP-VK.renderpass.attachment.3.351
+dEQP-VK.renderpass.attachment.3.352
+dEQP-VK.renderpass.attachment.3.353
+dEQP-VK.renderpass.attachment.3.354
+dEQP-VK.renderpass.attachment.3.355
+dEQP-VK.renderpass.attachment.3.356
+dEQP-VK.renderpass.attachment.3.357
+dEQP-VK.renderpass.attachment.3.358
+dEQP-VK.renderpass.attachment.3.359
+dEQP-VK.renderpass.attachment.3.360
+dEQP-VK.renderpass.attachment.3.361
+dEQP-VK.renderpass.attachment.3.362
+dEQP-VK.renderpass.attachment.3.363
+dEQP-VK.renderpass.attachment.3.364
+dEQP-VK.renderpass.attachment.3.365
+dEQP-VK.renderpass.attachment.3.366
+dEQP-VK.renderpass.attachment.3.367
+dEQP-VK.renderpass.attachment.3.368
+dEQP-VK.renderpass.attachment.3.369
+dEQP-VK.renderpass.attachment.3.370
+dEQP-VK.renderpass.attachment.3.371
+dEQP-VK.renderpass.attachment.3.372
+dEQP-VK.renderpass.attachment.3.373
+dEQP-VK.renderpass.attachment.3.374
+dEQP-VK.renderpass.attachment.3.375
+dEQP-VK.renderpass.attachment.3.376
+dEQP-VK.renderpass.attachment.3.377
+dEQP-VK.renderpass.attachment.3.378
+dEQP-VK.renderpass.attachment.3.379
+dEQP-VK.renderpass.attachment.3.380
+dEQP-VK.renderpass.attachment.3.381
+dEQP-VK.renderpass.attachment.3.382
+dEQP-VK.renderpass.attachment.3.383
+dEQP-VK.renderpass.attachment.3.384
+dEQP-VK.renderpass.attachment.3.385
+dEQP-VK.renderpass.attachment.3.386
+dEQP-VK.renderpass.attachment.3.387
+dEQP-VK.renderpass.attachment.3.388
+dEQP-VK.renderpass.attachment.3.389
+dEQP-VK.renderpass.attachment.3.390
+dEQP-VK.renderpass.attachment.3.391
+dEQP-VK.renderpass.attachment.3.392
+dEQP-VK.renderpass.attachment.3.393
+dEQP-VK.renderpass.attachment.3.394
+dEQP-VK.renderpass.attachment.3.395
+dEQP-VK.renderpass.attachment.3.396
+dEQP-VK.renderpass.attachment.3.397
+dEQP-VK.renderpass.attachment.3.398
+dEQP-VK.renderpass.attachment.3.399
+dEQP-VK.renderpass.attachment.4.400
+dEQP-VK.renderpass.attachment.4.401
+dEQP-VK.renderpass.attachment.4.402
+dEQP-VK.renderpass.attachment.4.403
+dEQP-VK.renderpass.attachment.4.404
+dEQP-VK.renderpass.attachment.4.405
+dEQP-VK.renderpass.attachment.4.406
+dEQP-VK.renderpass.attachment.4.407
+dEQP-VK.renderpass.attachment.4.408
+dEQP-VK.renderpass.attachment.4.409
+dEQP-VK.renderpass.attachment.4.410
+dEQP-VK.renderpass.attachment.4.411
+dEQP-VK.renderpass.attachment.4.412
+dEQP-VK.renderpass.attachment.4.413
+dEQP-VK.renderpass.attachment.4.414
+dEQP-VK.renderpass.attachment.4.415
+dEQP-VK.renderpass.attachment.4.416
+dEQP-VK.renderpass.attachment.4.417
+dEQP-VK.renderpass.attachment.4.418
+dEQP-VK.renderpass.attachment.4.419
+dEQP-VK.renderpass.attachment.4.420
+dEQP-VK.renderpass.attachment.4.421
+dEQP-VK.renderpass.attachment.4.422
+dEQP-VK.renderpass.attachment.4.423
+dEQP-VK.renderpass.attachment.4.424
+dEQP-VK.renderpass.attachment.4.425
+dEQP-VK.renderpass.attachment.4.426
+dEQP-VK.renderpass.attachment.4.427
+dEQP-VK.renderpass.attachment.4.428
+dEQP-VK.renderpass.attachment.4.429
+dEQP-VK.renderpass.attachment.4.430
+dEQP-VK.renderpass.attachment.4.431
+dEQP-VK.renderpass.attachment.4.432
+dEQP-VK.renderpass.attachment.4.433
+dEQP-VK.renderpass.attachment.4.434
+dEQP-VK.renderpass.attachment.4.435
+dEQP-VK.renderpass.attachment.4.436
+dEQP-VK.renderpass.attachment.4.437
+dEQP-VK.renderpass.attachment.4.438
+dEQP-VK.renderpass.attachment.4.439
+dEQP-VK.renderpass.attachment.4.440
+dEQP-VK.renderpass.attachment.4.441
+dEQP-VK.renderpass.attachment.4.442
+dEQP-VK.renderpass.attachment.4.443
+dEQP-VK.renderpass.attachment.4.444
+dEQP-VK.renderpass.attachment.4.445
+dEQP-VK.renderpass.attachment.4.446
+dEQP-VK.renderpass.attachment.4.447
+dEQP-VK.renderpass.attachment.4.448
+dEQP-VK.renderpass.attachment.4.449
+dEQP-VK.renderpass.attachment.4.450
+dEQP-VK.renderpass.attachment.4.451
+dEQP-VK.renderpass.attachment.4.452
+dEQP-VK.renderpass.attachment.4.453
+dEQP-VK.renderpass.attachment.4.454
+dEQP-VK.renderpass.attachment.4.455
+dEQP-VK.renderpass.attachment.4.456
+dEQP-VK.renderpass.attachment.4.457
+dEQP-VK.renderpass.attachment.4.458
+dEQP-VK.renderpass.attachment.4.459
+dEQP-VK.renderpass.attachment.4.460
+dEQP-VK.renderpass.attachment.4.461
+dEQP-VK.renderpass.attachment.4.462
+dEQP-VK.renderpass.attachment.4.463
+dEQP-VK.renderpass.attachment.4.464
+dEQP-VK.renderpass.attachment.4.465
+dEQP-VK.renderpass.attachment.4.466
+dEQP-VK.renderpass.attachment.4.467
+dEQP-VK.renderpass.attachment.4.468
+dEQP-VK.renderpass.attachment.4.469
+dEQP-VK.renderpass.attachment.4.470
+dEQP-VK.renderpass.attachment.4.471
+dEQP-VK.renderpass.attachment.4.472
+dEQP-VK.renderpass.attachment.4.473
+dEQP-VK.renderpass.attachment.4.474
+dEQP-VK.renderpass.attachment.4.475
+dEQP-VK.renderpass.attachment.4.476
+dEQP-VK.renderpass.attachment.4.477
+dEQP-VK.renderpass.attachment.4.478
+dEQP-VK.renderpass.attachment.4.479
+dEQP-VK.renderpass.attachment.4.480
+dEQP-VK.renderpass.attachment.4.481
+dEQP-VK.renderpass.attachment.4.482
+dEQP-VK.renderpass.attachment.4.483
+dEQP-VK.renderpass.attachment.4.484
+dEQP-VK.renderpass.attachment.4.485
+dEQP-VK.renderpass.attachment.4.486
+dEQP-VK.renderpass.attachment.4.487
+dEQP-VK.renderpass.attachment.4.488
+dEQP-VK.renderpass.attachment.4.489
+dEQP-VK.renderpass.attachment.4.490
+dEQP-VK.renderpass.attachment.4.491
+dEQP-VK.renderpass.attachment.4.492
+dEQP-VK.renderpass.attachment.4.493
+dEQP-VK.renderpass.attachment.4.494
+dEQP-VK.renderpass.attachment.4.495
+dEQP-VK.renderpass.attachment.4.496
+dEQP-VK.renderpass.attachment.4.497
+dEQP-VK.renderpass.attachment.4.498
+dEQP-VK.renderpass.attachment.4.499
+dEQP-VK.renderpass.attachment.4.500
+dEQP-VK.renderpass.attachment.4.501
+dEQP-VK.renderpass.attachment.4.502
+dEQP-VK.renderpass.attachment.4.503
+dEQP-VK.renderpass.attachment.4.504
+dEQP-VK.renderpass.attachment.4.505
+dEQP-VK.renderpass.attachment.4.506
+dEQP-VK.renderpass.attachment.4.507
+dEQP-VK.renderpass.attachment.4.508
+dEQP-VK.renderpass.attachment.4.509
+dEQP-VK.renderpass.attachment.4.510
+dEQP-VK.renderpass.attachment.4.511
+dEQP-VK.renderpass.attachment.4.512
+dEQP-VK.renderpass.attachment.4.513
+dEQP-VK.renderpass.attachment.4.514
+dEQP-VK.renderpass.attachment.4.515
+dEQP-VK.renderpass.attachment.4.516
+dEQP-VK.renderpass.attachment.4.517
+dEQP-VK.renderpass.attachment.4.518
+dEQP-VK.renderpass.attachment.4.519
+dEQP-VK.renderpass.attachment.4.520
+dEQP-VK.renderpass.attachment.4.521
+dEQP-VK.renderpass.attachment.4.522
+dEQP-VK.renderpass.attachment.4.523
+dEQP-VK.renderpass.attachment.4.524
+dEQP-VK.renderpass.attachment.4.525
+dEQP-VK.renderpass.attachment.4.526
+dEQP-VK.renderpass.attachment.4.527
+dEQP-VK.renderpass.attachment.4.528
+dEQP-VK.renderpass.attachment.4.529
+dEQP-VK.renderpass.attachment.4.530
+dEQP-VK.renderpass.attachment.4.531
+dEQP-VK.renderpass.attachment.4.532
+dEQP-VK.renderpass.attachment.4.533
+dEQP-VK.renderpass.attachment.4.534
+dEQP-VK.renderpass.attachment.4.535
+dEQP-VK.renderpass.attachment.4.536
+dEQP-VK.renderpass.attachment.4.537
+dEQP-VK.renderpass.attachment.4.538
+dEQP-VK.renderpass.attachment.4.539
+dEQP-VK.renderpass.attachment.4.540
+dEQP-VK.renderpass.attachment.4.541
+dEQP-VK.renderpass.attachment.4.542
+dEQP-VK.renderpass.attachment.4.543
+dEQP-VK.renderpass.attachment.4.544
+dEQP-VK.renderpass.attachment.4.545
+dEQP-VK.renderpass.attachment.4.546
+dEQP-VK.renderpass.attachment.4.547
+dEQP-VK.renderpass.attachment.4.548
+dEQP-VK.renderpass.attachment.4.549
+dEQP-VK.renderpass.attachment.4.550
+dEQP-VK.renderpass.attachment.4.551
+dEQP-VK.renderpass.attachment.4.552
+dEQP-VK.renderpass.attachment.4.553
+dEQP-VK.renderpass.attachment.4.554
+dEQP-VK.renderpass.attachment.4.555
+dEQP-VK.renderpass.attachment.4.556
+dEQP-VK.renderpass.attachment.4.557
+dEQP-VK.renderpass.attachment.4.558
+dEQP-VK.renderpass.attachment.4.559
+dEQP-VK.renderpass.attachment.4.560
+dEQP-VK.renderpass.attachment.4.561
+dEQP-VK.renderpass.attachment.4.562
+dEQP-VK.renderpass.attachment.4.563
+dEQP-VK.renderpass.attachment.4.564
+dEQP-VK.renderpass.attachment.4.565
+dEQP-VK.renderpass.attachment.4.566
+dEQP-VK.renderpass.attachment.4.567
+dEQP-VK.renderpass.attachment.4.568
+dEQP-VK.renderpass.attachment.4.569
+dEQP-VK.renderpass.attachment.4.570
+dEQP-VK.renderpass.attachment.4.571
+dEQP-VK.renderpass.attachment.4.572
+dEQP-VK.renderpass.attachment.4.573
+dEQP-VK.renderpass.attachment.4.574
+dEQP-VK.renderpass.attachment.4.575
+dEQP-VK.renderpass.attachment.4.576
+dEQP-VK.renderpass.attachment.4.577
+dEQP-VK.renderpass.attachment.4.578
+dEQP-VK.renderpass.attachment.4.579
+dEQP-VK.renderpass.attachment.4.580
+dEQP-VK.renderpass.attachment.4.581
+dEQP-VK.renderpass.attachment.4.582
+dEQP-VK.renderpass.attachment.4.583
+dEQP-VK.renderpass.attachment.4.584
+dEQP-VK.renderpass.attachment.4.585
+dEQP-VK.renderpass.attachment.4.586
+dEQP-VK.renderpass.attachment.4.587
+dEQP-VK.renderpass.attachment.4.588
+dEQP-VK.renderpass.attachment.4.589
+dEQP-VK.renderpass.attachment.4.590
+dEQP-VK.renderpass.attachment.4.591
+dEQP-VK.renderpass.attachment.4.592
+dEQP-VK.renderpass.attachment.4.593
+dEQP-VK.renderpass.attachment.4.594
+dEQP-VK.renderpass.attachment.4.595
+dEQP-VK.renderpass.attachment.4.596
+dEQP-VK.renderpass.attachment.4.597
+dEQP-VK.renderpass.attachment.4.598
+dEQP-VK.renderpass.attachment.4.599
+dEQP-VK.renderpass.attachment.8.600
+dEQP-VK.renderpass.attachment.8.601
+dEQP-VK.renderpass.attachment.8.602
+dEQP-VK.renderpass.attachment.8.603
+dEQP-VK.renderpass.attachment.8.604
+dEQP-VK.renderpass.attachment.8.605
+dEQP-VK.renderpass.attachment.8.606
+dEQP-VK.renderpass.attachment.8.607
+dEQP-VK.renderpass.attachment.8.608
+dEQP-VK.renderpass.attachment.8.609
+dEQP-VK.renderpass.attachment.8.610
+dEQP-VK.renderpass.attachment.8.611
+dEQP-VK.renderpass.attachment.8.612
+dEQP-VK.renderpass.attachment.8.613
+dEQP-VK.renderpass.attachment.8.614
+dEQP-VK.renderpass.attachment.8.615
+dEQP-VK.renderpass.attachment.8.616
+dEQP-VK.renderpass.attachment.8.617
+dEQP-VK.renderpass.attachment.8.618
+dEQP-VK.renderpass.attachment.8.619
+dEQP-VK.renderpass.attachment.8.620
+dEQP-VK.renderpass.attachment.8.621
+dEQP-VK.renderpass.attachment.8.622
+dEQP-VK.renderpass.attachment.8.623
+dEQP-VK.renderpass.attachment.8.624
+dEQP-VK.renderpass.attachment.8.625
+dEQP-VK.renderpass.attachment.8.626
+dEQP-VK.renderpass.attachment.8.627
+dEQP-VK.renderpass.attachment.8.628
+dEQP-VK.renderpass.attachment.8.629
+dEQP-VK.renderpass.attachment.8.630
+dEQP-VK.renderpass.attachment.8.631
+dEQP-VK.renderpass.attachment.8.632
+dEQP-VK.renderpass.attachment.8.633
+dEQP-VK.renderpass.attachment.8.634
+dEQP-VK.renderpass.attachment.8.635
+dEQP-VK.renderpass.attachment.8.636
+dEQP-VK.renderpass.attachment.8.637
+dEQP-VK.renderpass.attachment.8.638
+dEQP-VK.renderpass.attachment.8.639
+dEQP-VK.renderpass.attachment.8.640
+dEQP-VK.renderpass.attachment.8.641
+dEQP-VK.renderpass.attachment.8.642
+dEQP-VK.renderpass.attachment.8.643
+dEQP-VK.renderpass.attachment.8.644
+dEQP-VK.renderpass.attachment.8.645
+dEQP-VK.renderpass.attachment.8.646
+dEQP-VK.renderpass.attachment.8.647
+dEQP-VK.renderpass.attachment.8.648
+dEQP-VK.renderpass.attachment.8.649
+dEQP-VK.renderpass.attachment.8.650
+dEQP-VK.renderpass.attachment.8.651
+dEQP-VK.renderpass.attachment.8.652
+dEQP-VK.renderpass.attachment.8.653
+dEQP-VK.renderpass.attachment.8.654
+dEQP-VK.renderpass.attachment.8.655
+dEQP-VK.renderpass.attachment.8.656
+dEQP-VK.renderpass.attachment.8.657
+dEQP-VK.renderpass.attachment.8.658
+dEQP-VK.renderpass.attachment.8.659
+dEQP-VK.renderpass.attachment.8.660
+dEQP-VK.renderpass.attachment.8.661
+dEQP-VK.renderpass.attachment.8.662
+dEQP-VK.renderpass.attachment.8.663
+dEQP-VK.renderpass.attachment.8.664
+dEQP-VK.renderpass.attachment.8.665
+dEQP-VK.renderpass.attachment.8.666
+dEQP-VK.renderpass.attachment.8.667
+dEQP-VK.renderpass.attachment.8.668
+dEQP-VK.renderpass.attachment.8.669
+dEQP-VK.renderpass.attachment.8.670
+dEQP-VK.renderpass.attachment.8.671
+dEQP-VK.renderpass.attachment.8.672
+dEQP-VK.renderpass.attachment.8.673
+dEQP-VK.renderpass.attachment.8.674
+dEQP-VK.renderpass.attachment.8.675
+dEQP-VK.renderpass.attachment.8.676
+dEQP-VK.renderpass.attachment.8.677
+dEQP-VK.renderpass.attachment.8.678
+dEQP-VK.renderpass.attachment.8.679
+dEQP-VK.renderpass.attachment.8.680
+dEQP-VK.renderpass.attachment.8.681
+dEQP-VK.renderpass.attachment.8.682
+dEQP-VK.renderpass.attachment.8.683
+dEQP-VK.renderpass.attachment.8.684
+dEQP-VK.renderpass.attachment.8.685
+dEQP-VK.renderpass.attachment.8.686
+dEQP-VK.renderpass.attachment.8.687
+dEQP-VK.renderpass.attachment.8.688
+dEQP-VK.renderpass.attachment.8.689
+dEQP-VK.renderpass.attachment.8.690
+dEQP-VK.renderpass.attachment.8.691
+dEQP-VK.renderpass.attachment.8.692
+dEQP-VK.renderpass.attachment.8.693
+dEQP-VK.renderpass.attachment.8.694
+dEQP-VK.renderpass.attachment.8.695
+dEQP-VK.renderpass.attachment.8.696
+dEQP-VK.renderpass.attachment.8.697
+dEQP-VK.renderpass.attachment.8.698
+dEQP-VK.renderpass.attachment.8.699
+dEQP-VK.renderpass.attachment.8.700
+dEQP-VK.renderpass.attachment.8.701
+dEQP-VK.renderpass.attachment.8.702
+dEQP-VK.renderpass.attachment.8.703
+dEQP-VK.renderpass.attachment.8.704
+dEQP-VK.renderpass.attachment.8.705
+dEQP-VK.renderpass.attachment.8.706
+dEQP-VK.renderpass.attachment.8.707
+dEQP-VK.renderpass.attachment.8.708
+dEQP-VK.renderpass.attachment.8.709
+dEQP-VK.renderpass.attachment.8.710
+dEQP-VK.renderpass.attachment.8.711
+dEQP-VK.renderpass.attachment.8.712
+dEQP-VK.renderpass.attachment.8.713
+dEQP-VK.renderpass.attachment.8.714
+dEQP-VK.renderpass.attachment.8.715
+dEQP-VK.renderpass.attachment.8.716
+dEQP-VK.renderpass.attachment.8.717
+dEQP-VK.renderpass.attachment.8.718
+dEQP-VK.renderpass.attachment.8.719
+dEQP-VK.renderpass.attachment.8.720
+dEQP-VK.renderpass.attachment.8.721
+dEQP-VK.renderpass.attachment.8.722
+dEQP-VK.renderpass.attachment.8.723
+dEQP-VK.renderpass.attachment.8.724
+dEQP-VK.renderpass.attachment.8.725
+dEQP-VK.renderpass.attachment.8.726
+dEQP-VK.renderpass.attachment.8.727
+dEQP-VK.renderpass.attachment.8.728
+dEQP-VK.renderpass.attachment.8.729
+dEQP-VK.renderpass.attachment.8.730
+dEQP-VK.renderpass.attachment.8.731
+dEQP-VK.renderpass.attachment.8.732
+dEQP-VK.renderpass.attachment.8.733
+dEQP-VK.renderpass.attachment.8.734
+dEQP-VK.renderpass.attachment.8.735
+dEQP-VK.renderpass.attachment.8.736
+dEQP-VK.renderpass.attachment.8.737
+dEQP-VK.renderpass.attachment.8.738
+dEQP-VK.renderpass.attachment.8.739
+dEQP-VK.renderpass.attachment.8.740
+dEQP-VK.renderpass.attachment.8.741
+dEQP-VK.renderpass.attachment.8.742
+dEQP-VK.renderpass.attachment.8.743
+dEQP-VK.renderpass.attachment.8.744
+dEQP-VK.renderpass.attachment.8.745
+dEQP-VK.renderpass.attachment.8.746
+dEQP-VK.renderpass.attachment.8.747
+dEQP-VK.renderpass.attachment.8.748
+dEQP-VK.renderpass.attachment.8.749
+dEQP-VK.renderpass.attachment.8.750
+dEQP-VK.renderpass.attachment.8.751
+dEQP-VK.renderpass.attachment.8.752
+dEQP-VK.renderpass.attachment.8.753
+dEQP-VK.renderpass.attachment.8.754
+dEQP-VK.renderpass.attachment.8.755
+dEQP-VK.renderpass.attachment.8.756
+dEQP-VK.renderpass.attachment.8.757
+dEQP-VK.renderpass.attachment.8.758
+dEQP-VK.renderpass.attachment.8.759
+dEQP-VK.renderpass.attachment.8.760
+dEQP-VK.renderpass.attachment.8.761
+dEQP-VK.renderpass.attachment.8.762
+dEQP-VK.renderpass.attachment.8.763
+dEQP-VK.renderpass.attachment.8.764
+dEQP-VK.renderpass.attachment.8.765
+dEQP-VK.renderpass.attachment.8.766
+dEQP-VK.renderpass.attachment.8.767
+dEQP-VK.renderpass.attachment.8.768
+dEQP-VK.renderpass.attachment.8.769
+dEQP-VK.renderpass.attachment.8.770
+dEQP-VK.renderpass.attachment.8.771
+dEQP-VK.renderpass.attachment.8.772
+dEQP-VK.renderpass.attachment.8.773
+dEQP-VK.renderpass.attachment.8.774
+dEQP-VK.renderpass.attachment.8.775
+dEQP-VK.renderpass.attachment.8.776
+dEQP-VK.renderpass.attachment.8.777
+dEQP-VK.renderpass.attachment.8.778
+dEQP-VK.renderpass.attachment.8.779
+dEQP-VK.renderpass.attachment.8.780
+dEQP-VK.renderpass.attachment.8.781
+dEQP-VK.renderpass.attachment.8.782
+dEQP-VK.renderpass.attachment.8.783
+dEQP-VK.renderpass.attachment.8.784
+dEQP-VK.renderpass.attachment.8.785
+dEQP-VK.renderpass.attachment.8.786
+dEQP-VK.renderpass.attachment.8.787
+dEQP-VK.renderpass.attachment.8.788
+dEQP-VK.renderpass.attachment.8.789
+dEQP-VK.renderpass.attachment.8.790
+dEQP-VK.renderpass.attachment.8.791
+dEQP-VK.renderpass.attachment.8.792
+dEQP-VK.renderpass.attachment.8.793
+dEQP-VK.renderpass.attachment.8.794
+dEQP-VK.renderpass.attachment.8.795
+dEQP-VK.renderpass.attachment.8.796
+dEQP-VK.renderpass.attachment.8.797
+dEQP-VK.renderpass.attachment.8.798
+dEQP-VK.renderpass.attachment.8.799
+dEQP-VK.renderpass.attachment_allocation.grow.0
+dEQP-VK.renderpass.attachment_allocation.grow.1
+dEQP-VK.renderpass.attachment_allocation.grow.2
+dEQP-VK.renderpass.attachment_allocation.grow.3
+dEQP-VK.renderpass.attachment_allocation.grow.4
+dEQP-VK.renderpass.attachment_allocation.grow.5
+dEQP-VK.renderpass.attachment_allocation.grow.6
+dEQP-VK.renderpass.attachment_allocation.grow.7
+dEQP-VK.renderpass.attachment_allocation.grow.8
+dEQP-VK.renderpass.attachment_allocation.grow.9
+dEQP-VK.renderpass.attachment_allocation.grow.10
+dEQP-VK.renderpass.attachment_allocation.grow.11
+dEQP-VK.renderpass.attachment_allocation.grow.12
+dEQP-VK.renderpass.attachment_allocation.grow.13
+dEQP-VK.renderpass.attachment_allocation.grow.14
+dEQP-VK.renderpass.attachment_allocation.grow.15
+dEQP-VK.renderpass.attachment_allocation.grow.16
+dEQP-VK.renderpass.attachment_allocation.grow.17
+dEQP-VK.renderpass.attachment_allocation.grow.18
+dEQP-VK.renderpass.attachment_allocation.grow.19
+dEQP-VK.renderpass.attachment_allocation.grow.20
+dEQP-VK.renderpass.attachment_allocation.grow.21
+dEQP-VK.renderpass.attachment_allocation.grow.22
+dEQP-VK.renderpass.attachment_allocation.grow.23
+dEQP-VK.renderpass.attachment_allocation.grow.24
+dEQP-VK.renderpass.attachment_allocation.grow.25
+dEQP-VK.renderpass.attachment_allocation.grow.26
+dEQP-VK.renderpass.attachment_allocation.grow.27
+dEQP-VK.renderpass.attachment_allocation.grow.28
+dEQP-VK.renderpass.attachment_allocation.grow.29
+dEQP-VK.renderpass.attachment_allocation.grow.30
+dEQP-VK.renderpass.attachment_allocation.grow.31
+dEQP-VK.renderpass.attachment_allocation.grow.32
+dEQP-VK.renderpass.attachment_allocation.grow.33
+dEQP-VK.renderpass.attachment_allocation.grow.34
+dEQP-VK.renderpass.attachment_allocation.grow.35
+dEQP-VK.renderpass.attachment_allocation.grow.36
+dEQP-VK.renderpass.attachment_allocation.grow.37
+dEQP-VK.renderpass.attachment_allocation.grow.38
+dEQP-VK.renderpass.attachment_allocation.grow.39
+dEQP-VK.renderpass.attachment_allocation.grow.40
+dEQP-VK.renderpass.attachment_allocation.grow.41
+dEQP-VK.renderpass.attachment_allocation.grow.42
+dEQP-VK.renderpass.attachment_allocation.grow.43
+dEQP-VK.renderpass.attachment_allocation.grow.44
+dEQP-VK.renderpass.attachment_allocation.grow.45
+dEQP-VK.renderpass.attachment_allocation.grow.46
+dEQP-VK.renderpass.attachment_allocation.grow.47
+dEQP-VK.renderpass.attachment_allocation.grow.48
+dEQP-VK.renderpass.attachment_allocation.grow.49
+dEQP-VK.renderpass.attachment_allocation.grow.50
+dEQP-VK.renderpass.attachment_allocation.grow.51
+dEQP-VK.renderpass.attachment_allocation.grow.52
+dEQP-VK.renderpass.attachment_allocation.grow.53
+dEQP-VK.renderpass.attachment_allocation.grow.54
+dEQP-VK.renderpass.attachment_allocation.grow.55
+dEQP-VK.renderpass.attachment_allocation.grow.56
+dEQP-VK.renderpass.attachment_allocation.grow.57
+dEQP-VK.renderpass.attachment_allocation.grow.58
+dEQP-VK.renderpass.attachment_allocation.grow.59
+dEQP-VK.renderpass.attachment_allocation.grow.60
+dEQP-VK.renderpass.attachment_allocation.grow.61
+dEQP-VK.renderpass.attachment_allocation.grow.62
+dEQP-VK.renderpass.attachment_allocation.grow.63
+dEQP-VK.renderpass.attachment_allocation.grow.64
+dEQP-VK.renderpass.attachment_allocation.grow.65
+dEQP-VK.renderpass.attachment_allocation.grow.66
+dEQP-VK.renderpass.attachment_allocation.grow.67
+dEQP-VK.renderpass.attachment_allocation.grow.68
+dEQP-VK.renderpass.attachment_allocation.grow.69
+dEQP-VK.renderpass.attachment_allocation.grow.70
+dEQP-VK.renderpass.attachment_allocation.grow.71
+dEQP-VK.renderpass.attachment_allocation.grow.72
+dEQP-VK.renderpass.attachment_allocation.grow.73
+dEQP-VK.renderpass.attachment_allocation.grow.74
+dEQP-VK.renderpass.attachment_allocation.grow.75
+dEQP-VK.renderpass.attachment_allocation.grow.76
+dEQP-VK.renderpass.attachment_allocation.grow.77
+dEQP-VK.renderpass.attachment_allocation.grow.78
+dEQP-VK.renderpass.attachment_allocation.grow.79
+dEQP-VK.renderpass.attachment_allocation.grow.80
+dEQP-VK.renderpass.attachment_allocation.grow.81
+dEQP-VK.renderpass.attachment_allocation.grow.82
+dEQP-VK.renderpass.attachment_allocation.grow.83
+dEQP-VK.renderpass.attachment_allocation.grow.84
+dEQP-VK.renderpass.attachment_allocation.grow.85
+dEQP-VK.renderpass.attachment_allocation.grow.86
+dEQP-VK.renderpass.attachment_allocation.grow.87
+dEQP-VK.renderpass.attachment_allocation.grow.88
+dEQP-VK.renderpass.attachment_allocation.grow.89
+dEQP-VK.renderpass.attachment_allocation.grow.90
+dEQP-VK.renderpass.attachment_allocation.grow.91
+dEQP-VK.renderpass.attachment_allocation.grow.92
+dEQP-VK.renderpass.attachment_allocation.grow.93
+dEQP-VK.renderpass.attachment_allocation.grow.94
+dEQP-VK.renderpass.attachment_allocation.grow.95
+dEQP-VK.renderpass.attachment_allocation.grow.96
+dEQP-VK.renderpass.attachment_allocation.grow.97
+dEQP-VK.renderpass.attachment_allocation.grow.98
+dEQP-VK.renderpass.attachment_allocation.grow.99
+dEQP-VK.renderpass.attachment_allocation.shrink.0
+dEQP-VK.renderpass.attachment_allocation.shrink.1
+dEQP-VK.renderpass.attachment_allocation.shrink.2
+dEQP-VK.renderpass.attachment_allocation.shrink.3
+dEQP-VK.renderpass.attachment_allocation.shrink.4
+dEQP-VK.renderpass.attachment_allocation.shrink.5
+dEQP-VK.renderpass.attachment_allocation.shrink.6
+dEQP-VK.renderpass.attachment_allocation.shrink.7
+dEQP-VK.renderpass.attachment_allocation.shrink.8
+dEQP-VK.renderpass.attachment_allocation.shrink.9
+dEQP-VK.renderpass.attachment_allocation.shrink.10
+dEQP-VK.renderpass.attachment_allocation.shrink.11
+dEQP-VK.renderpass.attachment_allocation.shrink.12
+dEQP-VK.renderpass.attachment_allocation.shrink.13
+dEQP-VK.renderpass.attachment_allocation.shrink.14
+dEQP-VK.renderpass.attachment_allocation.shrink.15
+dEQP-VK.renderpass.attachment_allocation.shrink.16
+dEQP-VK.renderpass.attachment_allocation.shrink.17
+dEQP-VK.renderpass.attachment_allocation.shrink.18
+dEQP-VK.renderpass.attachment_allocation.shrink.19
+dEQP-VK.renderpass.attachment_allocation.shrink.20
+dEQP-VK.renderpass.attachment_allocation.shrink.21
+dEQP-VK.renderpass.attachment_allocation.shrink.22
+dEQP-VK.renderpass.attachment_allocation.shrink.23
+dEQP-VK.renderpass.attachment_allocation.shrink.24
+dEQP-VK.renderpass.attachment_allocation.shrink.25
+dEQP-VK.renderpass.attachment_allocation.shrink.26
+dEQP-VK.renderpass.attachment_allocation.shrink.27
+dEQP-VK.renderpass.attachment_allocation.shrink.28
+dEQP-VK.renderpass.attachment_allocation.shrink.29
+dEQP-VK.renderpass.attachment_allocation.shrink.30
+dEQP-VK.renderpass.attachment_allocation.shrink.31
+dEQP-VK.renderpass.attachment_allocation.shrink.32
+dEQP-VK.renderpass.attachment_allocation.shrink.33
+dEQP-VK.renderpass.attachment_allocation.shrink.34
+dEQP-VK.renderpass.attachment_allocation.shrink.35
+dEQP-VK.renderpass.attachment_allocation.shrink.36
+dEQP-VK.renderpass.attachment_allocation.shrink.37
+dEQP-VK.renderpass.attachment_allocation.shrink.38
+dEQP-VK.renderpass.attachment_allocation.shrink.39
+dEQP-VK.renderpass.attachment_allocation.shrink.40
+dEQP-VK.renderpass.attachment_allocation.shrink.41
+dEQP-VK.renderpass.attachment_allocation.shrink.42
+dEQP-VK.renderpass.attachment_allocation.shrink.43
+dEQP-VK.renderpass.attachment_allocation.shrink.44
+dEQP-VK.renderpass.attachment_allocation.shrink.45
+dEQP-VK.renderpass.attachment_allocation.shrink.46
+dEQP-VK.renderpass.attachment_allocation.shrink.47
+dEQP-VK.renderpass.attachment_allocation.shrink.48
+dEQP-VK.renderpass.attachment_allocation.shrink.49
+dEQP-VK.renderpass.attachment_allocation.shrink.50
+dEQP-VK.renderpass.attachment_allocation.shrink.51
+dEQP-VK.renderpass.attachment_allocation.shrink.52
+dEQP-VK.renderpass.attachment_allocation.shrink.53
+dEQP-VK.renderpass.attachment_allocation.shrink.54
+dEQP-VK.renderpass.attachment_allocation.shrink.55
+dEQP-VK.renderpass.attachment_allocation.shrink.56
+dEQP-VK.renderpass.attachment_allocation.shrink.57
+dEQP-VK.renderpass.attachment_allocation.shrink.58
+dEQP-VK.renderpass.attachment_allocation.shrink.59
+dEQP-VK.renderpass.attachment_allocation.shrink.60
+dEQP-VK.renderpass.attachment_allocation.shrink.61
+dEQP-VK.renderpass.attachment_allocation.shrink.62
+dEQP-VK.renderpass.attachment_allocation.shrink.63
+dEQP-VK.renderpass.attachment_allocation.shrink.64
+dEQP-VK.renderpass.attachment_allocation.shrink.65
+dEQP-VK.renderpass.attachment_allocation.shrink.66
+dEQP-VK.renderpass.attachment_allocation.shrink.67
+dEQP-VK.renderpass.attachment_allocation.shrink.68
+dEQP-VK.renderpass.attachment_allocation.shrink.69
+dEQP-VK.renderpass.attachment_allocation.shrink.70
+dEQP-VK.renderpass.attachment_allocation.shrink.71
+dEQP-VK.renderpass.attachment_allocation.shrink.72
+dEQP-VK.renderpass.attachment_allocation.shrink.73
+dEQP-VK.renderpass.attachment_allocation.shrink.74
+dEQP-VK.renderpass.attachment_allocation.shrink.75
+dEQP-VK.renderpass.attachment_allocation.shrink.76
+dEQP-VK.renderpass.attachment_allocation.shrink.77
+dEQP-VK.renderpass.attachment_allocation.shrink.78
+dEQP-VK.renderpass.attachment_allocation.shrink.79
+dEQP-VK.renderpass.attachment_allocation.shrink.80
+dEQP-VK.renderpass.attachment_allocation.shrink.81
+dEQP-VK.renderpass.attachment_allocation.shrink.82
+dEQP-VK.renderpass.attachment_allocation.shrink.83
+dEQP-VK.renderpass.attachment_allocation.shrink.84
+dEQP-VK.renderpass.attachment_allocation.shrink.85
+dEQP-VK.renderpass.attachment_allocation.shrink.86
+dEQP-VK.renderpass.attachment_allocation.shrink.87
+dEQP-VK.renderpass.attachment_allocation.shrink.88
+dEQP-VK.renderpass.attachment_allocation.shrink.89
+dEQP-VK.renderpass.attachment_allocation.shrink.90
+dEQP-VK.renderpass.attachment_allocation.shrink.91
+dEQP-VK.renderpass.attachment_allocation.shrink.92
+dEQP-VK.renderpass.attachment_allocation.shrink.93
+dEQP-VK.renderpass.attachment_allocation.shrink.94
+dEQP-VK.renderpass.attachment_allocation.shrink.95
+dEQP-VK.renderpass.attachment_allocation.shrink.96
+dEQP-VK.renderpass.attachment_allocation.shrink.97
+dEQP-VK.renderpass.attachment_allocation.shrink.98
+dEQP-VK.renderpass.attachment_allocation.shrink.99
+dEQP-VK.renderpass.attachment_allocation.roll.0
+dEQP-VK.renderpass.attachment_allocation.roll.1
+dEQP-VK.renderpass.attachment_allocation.roll.2
+dEQP-VK.renderpass.attachment_allocation.roll.3
+dEQP-VK.renderpass.attachment_allocation.roll.4
+dEQP-VK.renderpass.attachment_allocation.roll.5
+dEQP-VK.renderpass.attachment_allocation.roll.6
+dEQP-VK.renderpass.attachment_allocation.roll.7
+dEQP-VK.renderpass.attachment_allocation.roll.8
+dEQP-VK.renderpass.attachment_allocation.roll.9
+dEQP-VK.renderpass.attachment_allocation.roll.10
+dEQP-VK.renderpass.attachment_allocation.roll.11
+dEQP-VK.renderpass.attachment_allocation.roll.12
+dEQP-VK.renderpass.attachment_allocation.roll.13
+dEQP-VK.renderpass.attachment_allocation.roll.14
+dEQP-VK.renderpass.attachment_allocation.roll.15
+dEQP-VK.renderpass.attachment_allocation.roll.16
+dEQP-VK.renderpass.attachment_allocation.roll.17
+dEQP-VK.renderpass.attachment_allocation.roll.18
+dEQP-VK.renderpass.attachment_allocation.roll.19
+dEQP-VK.renderpass.attachment_allocation.roll.20
+dEQP-VK.renderpass.attachment_allocation.roll.21
+dEQP-VK.renderpass.attachment_allocation.roll.22
+dEQP-VK.renderpass.attachment_allocation.roll.23
+dEQP-VK.renderpass.attachment_allocation.roll.24
+dEQP-VK.renderpass.attachment_allocation.roll.25
+dEQP-VK.renderpass.attachment_allocation.roll.26
+dEQP-VK.renderpass.attachment_allocation.roll.27
+dEQP-VK.renderpass.attachment_allocation.roll.28
+dEQP-VK.renderpass.attachment_allocation.roll.29
+dEQP-VK.renderpass.attachment_allocation.roll.30
+dEQP-VK.renderpass.attachment_allocation.roll.31
+dEQP-VK.renderpass.attachment_allocation.roll.32
+dEQP-VK.renderpass.attachment_allocation.roll.33
+dEQP-VK.renderpass.attachment_allocation.roll.34
+dEQP-VK.renderpass.attachment_allocation.roll.35
+dEQP-VK.renderpass.attachment_allocation.roll.36
+dEQP-VK.renderpass.attachment_allocation.roll.37
+dEQP-VK.renderpass.attachment_allocation.roll.38
+dEQP-VK.renderpass.attachment_allocation.roll.39
+dEQP-VK.renderpass.attachment_allocation.roll.40
+dEQP-VK.renderpass.attachment_allocation.roll.41
+dEQP-VK.renderpass.attachment_allocation.roll.42
+dEQP-VK.renderpass.attachment_allocation.roll.43
+dEQP-VK.renderpass.attachment_allocation.roll.44
+dEQP-VK.renderpass.attachment_allocation.roll.45
+dEQP-VK.renderpass.attachment_allocation.roll.46
+dEQP-VK.renderpass.attachment_allocation.roll.47
+dEQP-VK.renderpass.attachment_allocation.roll.48
+dEQP-VK.renderpass.attachment_allocation.roll.49
+dEQP-VK.renderpass.attachment_allocation.roll.50
+dEQP-VK.renderpass.attachment_allocation.roll.51
+dEQP-VK.renderpass.attachment_allocation.roll.52
+dEQP-VK.renderpass.attachment_allocation.roll.53
+dEQP-VK.renderpass.attachment_allocation.roll.54
+dEQP-VK.renderpass.attachment_allocation.roll.55
+dEQP-VK.renderpass.attachment_allocation.roll.56
+dEQP-VK.renderpass.attachment_allocation.roll.57
+dEQP-VK.renderpass.attachment_allocation.roll.58
+dEQP-VK.renderpass.attachment_allocation.roll.59
+dEQP-VK.renderpass.attachment_allocation.roll.60
+dEQP-VK.renderpass.attachment_allocation.roll.61
+dEQP-VK.renderpass.attachment_allocation.roll.62
+dEQP-VK.renderpass.attachment_allocation.roll.63
+dEQP-VK.renderpass.attachment_allocation.roll.64
+dEQP-VK.renderpass.attachment_allocation.roll.65
+dEQP-VK.renderpass.attachment_allocation.roll.66
+dEQP-VK.renderpass.attachment_allocation.roll.67
+dEQP-VK.renderpass.attachment_allocation.roll.68
+dEQP-VK.renderpass.attachment_allocation.roll.69
+dEQP-VK.renderpass.attachment_allocation.roll.70
+dEQP-VK.renderpass.attachment_allocation.roll.71
+dEQP-VK.renderpass.attachment_allocation.roll.72
+dEQP-VK.renderpass.attachment_allocation.roll.73
+dEQP-VK.renderpass.attachment_allocation.roll.74
+dEQP-VK.renderpass.attachment_allocation.roll.75
+dEQP-VK.renderpass.attachment_allocation.roll.76
+dEQP-VK.renderpass.attachment_allocation.roll.77
+dEQP-VK.renderpass.attachment_allocation.roll.78
+dEQP-VK.renderpass.attachment_allocation.roll.79
+dEQP-VK.renderpass.attachment_allocation.roll.80
+dEQP-VK.renderpass.attachment_allocation.roll.81
+dEQP-VK.renderpass.attachment_allocation.roll.82
+dEQP-VK.renderpass.attachment_allocation.roll.83
+dEQP-VK.renderpass.attachment_allocation.roll.84
+dEQP-VK.renderpass.attachment_allocation.roll.85
+dEQP-VK.renderpass.attachment_allocation.roll.86
+dEQP-VK.renderpass.attachment_allocation.roll.87
+dEQP-VK.renderpass.attachment_allocation.roll.88
+dEQP-VK.renderpass.attachment_allocation.roll.89
+dEQP-VK.renderpass.attachment_allocation.roll.90
+dEQP-VK.renderpass.attachment_allocation.roll.91
+dEQP-VK.renderpass.attachment_allocation.roll.92
+dEQP-VK.renderpass.attachment_allocation.roll.93
+dEQP-VK.renderpass.attachment_allocation.roll.94
+dEQP-VK.renderpass.attachment_allocation.roll.95
+dEQP-VK.renderpass.attachment_allocation.roll.96
+dEQP-VK.renderpass.attachment_allocation.roll.97
+dEQP-VK.renderpass.attachment_allocation.roll.98
+dEQP-VK.renderpass.attachment_allocation.roll.99
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.0
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.1
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.2
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.3
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.4
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.5
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.6
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.7
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.8
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.9
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.10
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.11
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.12
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.13
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.14
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.15
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.16
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.17
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.18
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.19
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.20
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.21
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.22
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.23
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.24
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.25
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.26
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.27
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.28
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.29
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.30
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.31
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.32
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.33
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.34
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.35
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.36
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.37
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.38
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.39
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.40
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.41
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.42
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.43
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.44
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.45
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.46
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.47
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.48
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.49
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.50
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.51
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.52
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.53
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.54
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.55
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.56
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.57
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.58
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.59
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.60
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.61
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.62
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.63
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.64
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.65
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.66
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.67
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.68
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.69
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.70
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.71
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.72
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.73
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.74
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.75
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.76
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.77
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.78
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.79
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.80
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.81
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.82
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.83
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.84
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.85
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.86
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.87
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.88
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.89
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.90
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.91
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.92
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.93
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.94
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.95
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.96
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.97
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.98
+dEQP-VK.renderpass.attachment_allocation.grow_shrink.99
+dEQP-VK.memory.allocation.basic.size_64.forward.count_1
+dEQP-VK.memory.allocation.basic.size_64.forward.count_10
+dEQP-VK.memory.allocation.basic.size_64.forward.count_100
+dEQP-VK.memory.allocation.basic.size_64.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_64.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_64.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_64.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_64.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_64.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_64.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_64.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_64.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_128.forward.count_1
+dEQP-VK.memory.allocation.basic.size_128.forward.count_10
+dEQP-VK.memory.allocation.basic.size_128.forward.count_100
+dEQP-VK.memory.allocation.basic.size_128.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_128.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_128.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_128.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_128.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_128.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_128.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_128.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_128.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_256.forward.count_1
+dEQP-VK.memory.allocation.basic.size_256.forward.count_10
+dEQP-VK.memory.allocation.basic.size_256.forward.count_100
+dEQP-VK.memory.allocation.basic.size_256.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_256.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_256.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_256.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_256.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_256.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_256.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_256.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_256.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_512.forward.count_1
+dEQP-VK.memory.allocation.basic.size_512.forward.count_10
+dEQP-VK.memory.allocation.basic.size_512.forward.count_100
+dEQP-VK.memory.allocation.basic.size_512.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_512.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_512.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_512.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_512.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_512.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_512.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_512.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_512.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_1KiB.forward.count_1
+dEQP-VK.memory.allocation.basic.size_1KiB.forward.count_10
+dEQP-VK.memory.allocation.basic.size_1KiB.forward.count_100
+dEQP-VK.memory.allocation.basic.size_1KiB.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_1KiB.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_1KiB.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_1KiB.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_1KiB.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_1KiB.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_1KiB.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_1KiB.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_1KiB.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_4KiB.forward.count_1
+dEQP-VK.memory.allocation.basic.size_4KiB.forward.count_10
+dEQP-VK.memory.allocation.basic.size_4KiB.forward.count_100
+dEQP-VK.memory.allocation.basic.size_4KiB.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_4KiB.forward.count_4000
+dEQP-VK.memory.allocation.basic.size_4KiB.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_4KiB.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_4KiB.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_4KiB.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_4KiB.reverse.count_4000
+dEQP-VK.memory.allocation.basic.size_4KiB.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_4KiB.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_4KiB.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_4KiB.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_4KiB.mixed.count_4000
+dEQP-VK.memory.allocation.basic.size_8KiB.forward.count_1
+dEQP-VK.memory.allocation.basic.size_8KiB.forward.count_10
+dEQP-VK.memory.allocation.basic.size_8KiB.forward.count_100
+dEQP-VK.memory.allocation.basic.size_8KiB.forward.count_1000
+dEQP-VK.memory.allocation.basic.size_8KiB.forward.count_4000
+dEQP-VK.memory.allocation.basic.size_8KiB.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_8KiB.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_8KiB.reverse.count_100
+dEQP-VK.memory.allocation.basic.size_8KiB.reverse.count_1000
+dEQP-VK.memory.allocation.basic.size_8KiB.reverse.count_4000
+dEQP-VK.memory.allocation.basic.size_8KiB.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_8KiB.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_8KiB.mixed.count_100
+dEQP-VK.memory.allocation.basic.size_8KiB.mixed.count_1000
+dEQP-VK.memory.allocation.basic.size_8KiB.mixed.count_4000
+dEQP-VK.memory.allocation.basic.size_1MiB.forward.count_1
+dEQP-VK.memory.allocation.basic.size_1MiB.forward.count_10
+dEQP-VK.memory.allocation.basic.size_1MiB.forward.count_50
+dEQP-VK.memory.allocation.basic.size_1MiB.reverse.count_1
+dEQP-VK.memory.allocation.basic.size_1MiB.reverse.count_10
+dEQP-VK.memory.allocation.basic.size_1MiB.reverse.count_50
+dEQP-VK.memory.allocation.basic.size_1MiB.mixed.count_1
+dEQP-VK.memory.allocation.basic.size_1MiB.mixed.count_10
+dEQP-VK.memory.allocation.basic.size_1MiB.mixed.count_50
+dEQP-VK.memory.allocation.basic.percent_1.forward.count_12
+dEQP-VK.memory.allocation.basic.percent_1.reverse.count_12
+dEQP-VK.memory.allocation.basic.percent_1.mixed.count_12
+dEQP-VK.memory.allocation.random.0
+dEQP-VK.memory.allocation.random.1
+dEQP-VK.memory.allocation.random.2
+dEQP-VK.memory.allocation.random.3
+dEQP-VK.memory.allocation.random.4
+dEQP-VK.memory.allocation.random.5
+dEQP-VK.memory.allocation.random.6
+dEQP-VK.memory.allocation.random.7
+dEQP-VK.memory.allocation.random.8
+dEQP-VK.memory.allocation.random.9
+dEQP-VK.memory.allocation.random.10
+dEQP-VK.memory.allocation.random.11
+dEQP-VK.memory.allocation.random.12
+dEQP-VK.memory.allocation.random.13
+dEQP-VK.memory.allocation.random.14
+dEQP-VK.memory.allocation.random.15
+dEQP-VK.memory.allocation.random.16
+dEQP-VK.memory.allocation.random.17
+dEQP-VK.memory.allocation.random.18
+dEQP-VK.memory.allocation.random.19
+dEQP-VK.memory.allocation.random.20
+dEQP-VK.memory.allocation.random.21
+dEQP-VK.memory.allocation.random.22
+dEQP-VK.memory.allocation.random.23
+dEQP-VK.memory.allocation.random.24
+dEQP-VK.memory.allocation.random.25
+dEQP-VK.memory.allocation.random.26
+dEQP-VK.memory.allocation.random.27
+dEQP-VK.memory.allocation.random.28
+dEQP-VK.memory.allocation.random.29
+dEQP-VK.memory.allocation.random.30
+dEQP-VK.memory.allocation.random.31
+dEQP-VK.memory.allocation.random.32
+dEQP-VK.memory.allocation.random.33
+dEQP-VK.memory.allocation.random.34
+dEQP-VK.memory.allocation.random.35
+dEQP-VK.memory.allocation.random.36
+dEQP-VK.memory.allocation.random.37
+dEQP-VK.memory.allocation.random.38
+dEQP-VK.memory.allocation.random.39
+dEQP-VK.memory.allocation.random.40
+dEQP-VK.memory.allocation.random.41
+dEQP-VK.memory.allocation.random.42
+dEQP-VK.memory.allocation.random.43
+dEQP-VK.memory.allocation.random.44
+dEQP-VK.memory.allocation.random.45
+dEQP-VK.memory.allocation.random.46
+dEQP-VK.memory.allocation.random.47
+dEQP-VK.memory.allocation.random.48
+dEQP-VK.memory.allocation.random.49
+dEQP-VK.memory.allocation.random.50
+dEQP-VK.memory.allocation.random.51
+dEQP-VK.memory.allocation.random.52
+dEQP-VK.memory.allocation.random.53
+dEQP-VK.memory.allocation.random.54
+dEQP-VK.memory.allocation.random.55
+dEQP-VK.memory.allocation.random.56
+dEQP-VK.memory.allocation.random.57
+dEQP-VK.memory.allocation.random.58
+dEQP-VK.memory.allocation.random.59
+dEQP-VK.memory.allocation.random.60
+dEQP-VK.memory.allocation.random.61
+dEQP-VK.memory.allocation.random.62
+dEQP-VK.memory.allocation.random.63
+dEQP-VK.memory.allocation.random.64
+dEQP-VK.memory.allocation.random.65
+dEQP-VK.memory.allocation.random.66
+dEQP-VK.memory.allocation.random.67
+dEQP-VK.memory.allocation.random.68
+dEQP-VK.memory.allocation.random.69
+dEQP-VK.memory.allocation.random.70
+dEQP-VK.memory.allocation.random.71
+dEQP-VK.memory.allocation.random.72
+dEQP-VK.memory.allocation.random.73
+dEQP-VK.memory.allocation.random.74
+dEQP-VK.memory.allocation.random.75
+dEQP-VK.memory.allocation.random.76
+dEQP-VK.memory.allocation.random.77
+dEQP-VK.memory.allocation.random.78
+dEQP-VK.memory.allocation.random.79
+dEQP-VK.memory.allocation.random.80
+dEQP-VK.memory.allocation.random.81
+dEQP-VK.memory.allocation.random.82
+dEQP-VK.memory.allocation.random.83
+dEQP-VK.memory.allocation.random.84
+dEQP-VK.memory.allocation.random.85
+dEQP-VK.memory.allocation.random.86
+dEQP-VK.memory.allocation.random.87
+dEQP-VK.memory.allocation.random.88
+dEQP-VK.memory.allocation.random.89
+dEQP-VK.memory.allocation.random.90
+dEQP-VK.memory.allocation.random.91
+dEQP-VK.memory.allocation.random.92
+dEQP-VK.memory.allocation.random.93
+dEQP-VK.memory.allocation.random.94
+dEQP-VK.memory.allocation.random.95
+dEQP-VK.memory.allocation.random.96
+dEQP-VK.memory.allocation.random.97
+dEQP-VK.memory.allocation.random.98
+dEQP-VK.memory.allocation.random.99
+dEQP-VK.memory.mapping.full.33.simple
+dEQP-VK.memory.mapping.full.33.remap
+dEQP-VK.memory.mapping.full.33.flush
+dEQP-VK.memory.mapping.full.33.subflush
+dEQP-VK.memory.mapping.full.33.subflush_separate
+dEQP-VK.memory.mapping.full.33.subflush_overlapping
+dEQP-VK.memory.mapping.full.33.invalidate
+dEQP-VK.memory.mapping.full.33.subinvalidate
+dEQP-VK.memory.mapping.full.33.subinvalidate_separate
+dEQP-VK.memory.mapping.full.33.subinvalidate_overlapping
+dEQP-VK.memory.mapping.full.257.simple
+dEQP-VK.memory.mapping.full.257.remap
+dEQP-VK.memory.mapping.full.257.flush
+dEQP-VK.memory.mapping.full.257.subflush
+dEQP-VK.memory.mapping.full.257.subflush_separate
+dEQP-VK.memory.mapping.full.257.subflush_overlapping
+dEQP-VK.memory.mapping.full.257.invalidate
+dEQP-VK.memory.mapping.full.257.subinvalidate
+dEQP-VK.memory.mapping.full.257.subinvalidate_separate
+dEQP-VK.memory.mapping.full.257.subinvalidate_overlapping
+dEQP-VK.memory.mapping.full.4087.simple
+dEQP-VK.memory.mapping.full.4087.remap
+dEQP-VK.memory.mapping.full.4087.flush
+dEQP-VK.memory.mapping.full.4087.subflush
+dEQP-VK.memory.mapping.full.4087.subflush_separate
+dEQP-VK.memory.mapping.full.4087.subflush_overlapping
+dEQP-VK.memory.mapping.full.4087.invalidate
+dEQP-VK.memory.mapping.full.4087.subinvalidate
+dEQP-VK.memory.mapping.full.4087.subinvalidate_separate
+dEQP-VK.memory.mapping.full.4087.subinvalidate_overlapping
+dEQP-VK.memory.mapping.full.8095.simple
+dEQP-VK.memory.mapping.full.8095.remap
+dEQP-VK.memory.mapping.full.8095.flush
+dEQP-VK.memory.mapping.full.8095.subflush
+dEQP-VK.memory.mapping.full.8095.subflush_separate
+dEQP-VK.memory.mapping.full.8095.subflush_overlapping
+dEQP-VK.memory.mapping.full.8095.invalidate
+dEQP-VK.memory.mapping.full.8095.subinvalidate
+dEQP-VK.memory.mapping.full.8095.subinvalidate_separate
+dEQP-VK.memory.mapping.full.8095.subinvalidate_overlapping
+dEQP-VK.memory.mapping.full.1048577.simple
+dEQP-VK.memory.mapping.full.1048577.remap
+dEQP-VK.memory.mapping.full.1048577.flush
+dEQP-VK.memory.mapping.full.1048577.subflush
+dEQP-VK.memory.mapping.full.1048577.subflush_separate
+dEQP-VK.memory.mapping.full.1048577.subflush_overlapping
+dEQP-VK.memory.mapping.full.1048577.invalidate
+dEQP-VK.memory.mapping.full.1048577.subinvalidate
+dEQP-VK.memory.mapping.full.1048577.subinvalidate_separate
+dEQP-VK.memory.mapping.full.1048577.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.simple
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.remap
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.flush
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subflush
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.invalidate
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.33.offset_0.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.simple
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.remap
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.flush
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subflush
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.invalidate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.simple
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.remap
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.flush
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subflush
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.invalidate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.257.offset_0.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.simple
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.remap
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.flush
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subflush
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.invalidate
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.257.offset_17.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.simple
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.remap
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.flush
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subflush
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.invalidate
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.257.offset_129.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.simple
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.remap
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.flush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.simple
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.remap
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.flush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.simple
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.remap
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.flush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.simple
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.remap
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.flush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_0.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.simple
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.remap
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.flush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.simple
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.remap
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.flush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.simple
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.remap
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.flush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_17.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.simple
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.remap
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.flush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.simple
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.remap
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.flush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.simple
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.remap
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.flush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_129.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.simple
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.remap
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.flush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.simple
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.remap
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.flush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.simple
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.remap
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.flush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_255.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.simple
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.remap
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.flush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.simple
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.remap
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.flush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.simple
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.remap
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.flush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subflush
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.4087.offset_1025.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.simple
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.remap
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.flush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.simple
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.remap
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.flush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.simple
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.remap
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.flush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.simple
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.remap
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.flush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_0.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.simple
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.remap
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.flush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.simple
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.remap
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.flush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.simple
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.remap
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.flush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.simple
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.remap
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.flush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_17.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.simple
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.remap
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.flush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.simple
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.remap
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.flush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.simple
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.remap
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.flush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.simple
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.remap
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.flush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_129.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.simple
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.remap
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.flush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.simple
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.remap
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.flush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.simple
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.remap
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.flush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.simple
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.remap
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.flush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_255.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.simple
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.remap
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.flush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.simple
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.remap
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.flush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.simple
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.remap
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.flush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.simple
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.remap
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.flush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subflush
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.8095.offset_1025.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_0.size_1048575.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_17.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_129.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_255.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_1025.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_31.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_255.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_1025.subinvalidate_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.simple
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.remap
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.flush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subflush
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subflush_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subflush_overlapping
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.invalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subinvalidate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subinvalidate_separate
+dEQP-VK.memory.mapping.sub.1048577.offset_32769.size_4085.subinvalidate_overlapping
+dEQP-VK.memory.mapping.random.0
+dEQP-VK.memory.mapping.random.1
+dEQP-VK.memory.mapping.random.2
+dEQP-VK.memory.mapping.random.3
+dEQP-VK.memory.mapping.random.4
+dEQP-VK.memory.mapping.random.5
+dEQP-VK.memory.mapping.random.6
+dEQP-VK.memory.mapping.random.7
+dEQP-VK.memory.mapping.random.8
+dEQP-VK.memory.mapping.random.9
+dEQP-VK.memory.mapping.random.10
+dEQP-VK.memory.mapping.random.11
+dEQP-VK.memory.mapping.random.12
+dEQP-VK.memory.mapping.random.13
+dEQP-VK.memory.mapping.random.14
+dEQP-VK.memory.mapping.random.15
+dEQP-VK.memory.mapping.random.16
+dEQP-VK.memory.mapping.random.17
+dEQP-VK.memory.mapping.random.18
+dEQP-VK.memory.mapping.random.19
+dEQP-VK.memory.mapping.random.20
+dEQP-VK.memory.mapping.random.21
+dEQP-VK.memory.mapping.random.22
+dEQP-VK.memory.mapping.random.23
+dEQP-VK.memory.mapping.random.24
+dEQP-VK.memory.mapping.random.25
+dEQP-VK.memory.mapping.random.26
+dEQP-VK.memory.mapping.random.27
+dEQP-VK.memory.mapping.random.28
+dEQP-VK.memory.mapping.random.29
+dEQP-VK.memory.mapping.random.30
+dEQP-VK.memory.mapping.random.31
+dEQP-VK.memory.mapping.random.32
+dEQP-VK.memory.mapping.random.33
+dEQP-VK.memory.mapping.random.34
+dEQP-VK.memory.mapping.random.35
+dEQP-VK.memory.mapping.random.36
+dEQP-VK.memory.mapping.random.37
+dEQP-VK.memory.mapping.random.38
+dEQP-VK.memory.mapping.random.39
+dEQP-VK.memory.mapping.random.40
+dEQP-VK.memory.mapping.random.41
+dEQP-VK.memory.mapping.random.42
+dEQP-VK.memory.mapping.random.43
+dEQP-VK.memory.mapping.random.44
+dEQP-VK.memory.mapping.random.45
+dEQP-VK.memory.mapping.random.46
+dEQP-VK.memory.mapping.random.47
+dEQP-VK.memory.mapping.random.48
+dEQP-VK.memory.mapping.random.49
+dEQP-VK.memory.mapping.random.50
+dEQP-VK.memory.mapping.random.51
+dEQP-VK.memory.mapping.random.52
+dEQP-VK.memory.mapping.random.53
+dEQP-VK.memory.mapping.random.54
+dEQP-VK.memory.mapping.random.55
+dEQP-VK.memory.mapping.random.56
+dEQP-VK.memory.mapping.random.57
+dEQP-VK.memory.mapping.random.58
+dEQP-VK.memory.mapping.random.59
+dEQP-VK.memory.mapping.random.60
+dEQP-VK.memory.mapping.random.61
+dEQP-VK.memory.mapping.random.62
+dEQP-VK.memory.mapping.random.63
+dEQP-VK.memory.mapping.random.64
+dEQP-VK.memory.mapping.random.65
+dEQP-VK.memory.mapping.random.66
+dEQP-VK.memory.mapping.random.67
+dEQP-VK.memory.mapping.random.68
+dEQP-VK.memory.mapping.random.69
+dEQP-VK.memory.mapping.random.70
+dEQP-VK.memory.mapping.random.71
+dEQP-VK.memory.mapping.random.72
+dEQP-VK.memory.mapping.random.73
+dEQP-VK.memory.mapping.random.74
+dEQP-VK.memory.mapping.random.75
+dEQP-VK.memory.mapping.random.76
+dEQP-VK.memory.mapping.random.77
+dEQP-VK.memory.mapping.random.78
+dEQP-VK.memory.mapping.random.79
+dEQP-VK.memory.mapping.random.80
+dEQP-VK.memory.mapping.random.81
+dEQP-VK.memory.mapping.random.82
+dEQP-VK.memory.mapping.random.83
+dEQP-VK.memory.mapping.random.84
+dEQP-VK.memory.mapping.random.85
+dEQP-VK.memory.mapping.random.86
+dEQP-VK.memory.mapping.random.87
+dEQP-VK.memory.mapping.random.88
+dEQP-VK.memory.mapping.random.89
+dEQP-VK.memory.mapping.random.90
+dEQP-VK.memory.mapping.random.91
+dEQP-VK.memory.mapping.random.92
+dEQP-VK.memory.mapping.random.93
+dEQP-VK.memory.mapping.random.94
+dEQP-VK.memory.mapping.random.95
+dEQP-VK.memory.mapping.random.96
+dEQP-VK.memory.mapping.random.97
+dEQP-VK.memory.mapping.random.98
+dEQP-VK.memory.mapping.random.99
+dEQP-VK.memory.pipeline_barrier.host_read_host_write.1024
+dEQP-VK.memory.pipeline_barrier.host_read_host_write.8192
+dEQP-VK.memory.pipeline_barrier.host_read_host_write.65536
+dEQP-VK.memory.pipeline_barrier.host_read_host_write.1048576
+dEQP-VK.memory.pipeline_barrier.host_write_transfer_src.1024
+dEQP-VK.memory.pipeline_barrier.host_write_transfer_src.8192
+dEQP-VK.memory.pipeline_barrier.host_write_transfer_src.65536
+dEQP-VK.memory.pipeline_barrier.host_write_transfer_src.1048576
+dEQP-VK.memory.pipeline_barrier.host_write_vertex_buffer.1024
+dEQP-VK.memory.pipeline_barrier.host_write_vertex_buffer.8192
+dEQP-VK.memory.pipeline_barrier.host_write_vertex_buffer.65536
+dEQP-VK.memory.pipeline_barrier.host_write_vertex_buffer.1048576
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.1024
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.8192
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.65536
+dEQP-VK.memory.pipeline_barrier.host_write_index_buffer.1048576
+dEQP-VK.memory.pipeline_barrier.host_read_transfer_dst.1024
+dEQP-VK.memory.pipeline_barrier.host_read_transfer_dst.8192
+dEQP-VK.memory.pipeline_barrier.host_read_transfer_dst.65536
+dEQP-VK.memory.pipeline_barrier.host_read_transfer_dst.1048576
+dEQP-VK.memory.pipeline_barrier.transfer_src_transfer_dst.1024
+dEQP-VK.memory.pipeline_barrier.transfer_src_transfer_dst.8192
+dEQP-VK.memory.pipeline_barrier.transfer_src_transfer_dst.65536
+dEQP-VK.memory.pipeline_barrier.transfer_src_transfer_dst.1048576
+dEQP-VK.memory.pipeline_barrier.transfer_dst_vertex_buffer.1024
+dEQP-VK.memory.pipeline_barrier.transfer_dst_vertex_buffer.8192
+dEQP-VK.memory.pipeline_barrier.transfer_dst_vertex_buffer.65536
+dEQP-VK.memory.pipeline_barrier.transfer_dst_vertex_buffer.1048576
+dEQP-VK.memory.pipeline_barrier.transfer_dst_index_buffer.1024
+dEQP-VK.memory.pipeline_barrier.transfer_dst_index_buffer.8192
+dEQP-VK.memory.pipeline_barrier.transfer_dst_index_buffer.65536
+dEQP-VK.memory.pipeline_barrier.transfer_dst_index_buffer.1048576
+dEQP-VK.memory.pipeline_barrier.all.1024
+dEQP-VK.memory.pipeline_barrier.all.8192
+dEQP-VK.memory.pipeline_barrier.all.65536
+dEQP-VK.memory.pipeline_barrier.all.1048576
+dEQP-VK.memory.pipeline_barrier.all_device.1024
+dEQP-VK.memory.pipeline_barrier.all_device.8192
+dEQP-VK.memory.pipeline_barrier.all_device.65536
+dEQP-VK.memory.pipeline_barrier.all_device.1048576
+dEQP-VK.ubo.2_level_array.std140.float_vertex
+dEQP-VK.ubo.2_level_array.std140.float_fragment
+dEQP-VK.ubo.2_level_array.std140.float_both
+dEQP-VK.ubo.2_level_array.std140.vec2_vertex
+dEQP-VK.ubo.2_level_array.std140.vec2_fragment
+dEQP-VK.ubo.2_level_array.std140.vec2_both
+dEQP-VK.ubo.2_level_array.std140.vec3_vertex
+dEQP-VK.ubo.2_level_array.std140.vec3_fragment
+dEQP-VK.ubo.2_level_array.std140.vec3_both
+dEQP-VK.ubo.2_level_array.std140.vec4_vertex
+dEQP-VK.ubo.2_level_array.std140.vec4_fragment
+dEQP-VK.ubo.2_level_array.std140.vec4_both
+dEQP-VK.ubo.2_level_array.std140.int_vertex
+dEQP-VK.ubo.2_level_array.std140.int_fragment
+dEQP-VK.ubo.2_level_array.std140.int_both
+dEQP-VK.ubo.2_level_array.std140.ivec2_vertex
+dEQP-VK.ubo.2_level_array.std140.ivec2_fragment
+dEQP-VK.ubo.2_level_array.std140.ivec2_both
+dEQP-VK.ubo.2_level_array.std140.ivec3_vertex
+dEQP-VK.ubo.2_level_array.std140.ivec3_fragment
+dEQP-VK.ubo.2_level_array.std140.ivec3_both
+dEQP-VK.ubo.2_level_array.std140.ivec4_vertex
+dEQP-VK.ubo.2_level_array.std140.ivec4_fragment
+dEQP-VK.ubo.2_level_array.std140.ivec4_both
+dEQP-VK.ubo.2_level_array.std140.uint_vertex
+dEQP-VK.ubo.2_level_array.std140.uint_fragment
+dEQP-VK.ubo.2_level_array.std140.uint_both
+dEQP-VK.ubo.2_level_array.std140.uvec2_vertex
+dEQP-VK.ubo.2_level_array.std140.uvec2_fragment
+dEQP-VK.ubo.2_level_array.std140.uvec2_both
+dEQP-VK.ubo.2_level_array.std140.uvec3_vertex
+dEQP-VK.ubo.2_level_array.std140.uvec3_fragment
+dEQP-VK.ubo.2_level_array.std140.uvec3_both
+dEQP-VK.ubo.2_level_array.std140.uvec4_vertex
+dEQP-VK.ubo.2_level_array.std140.uvec4_fragment
+dEQP-VK.ubo.2_level_array.std140.uvec4_both
+dEQP-VK.ubo.2_level_array.std140.bool_vertex
+dEQP-VK.ubo.2_level_array.std140.bool_fragment
+dEQP-VK.ubo.2_level_array.std140.bool_both
+dEQP-VK.ubo.2_level_array.std140.bvec2_vertex
+dEQP-VK.ubo.2_level_array.std140.bvec2_fragment
+dEQP-VK.ubo.2_level_array.std140.bvec2_both
+dEQP-VK.ubo.2_level_array.std140.bvec3_vertex
+dEQP-VK.ubo.2_level_array.std140.bvec3_fragment
+dEQP-VK.ubo.2_level_array.std140.bvec3_both
+dEQP-VK.ubo.2_level_array.std140.bvec4_vertex
+dEQP-VK.ubo.2_level_array.std140.bvec4_fragment
+dEQP-VK.ubo.2_level_array.std140.bvec4_both
+dEQP-VK.ubo.2_level_array.std140.mat2_vertex
+dEQP-VK.ubo.2_level_array.std140.mat2_fragment
+dEQP-VK.ubo.2_level_array.std140.mat2_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2_both
+dEQP-VK.ubo.2_level_array.std140.mat3_vertex
+dEQP-VK.ubo.2_level_array.std140.mat3_fragment
+dEQP-VK.ubo.2_level_array.std140.mat3_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3_both
+dEQP-VK.ubo.2_level_array.std140.mat4_vertex
+dEQP-VK.ubo.2_level_array.std140.mat4_fragment
+dEQP-VK.ubo.2_level_array.std140.mat4_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4_both
+dEQP-VK.ubo.2_level_array.std140.mat2x3_vertex
+dEQP-VK.ubo.2_level_array.std140.mat2x3_fragment
+dEQP-VK.ubo.2_level_array.std140.mat2x3_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x3_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x3_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x3_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x3_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x3_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x3_both
+dEQP-VK.ubo.2_level_array.std140.mat2x4_vertex
+dEQP-VK.ubo.2_level_array.std140.mat2x4_fragment
+dEQP-VK.ubo.2_level_array.std140.mat2x4_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x4_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x4_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat2x4_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x4_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x4_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat2x4_both
+dEQP-VK.ubo.2_level_array.std140.mat3x2_vertex
+dEQP-VK.ubo.2_level_array.std140.mat3x2_fragment
+dEQP-VK.ubo.2_level_array.std140.mat3x2_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x2_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x2_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x2_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x2_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x2_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x2_both
+dEQP-VK.ubo.2_level_array.std140.mat3x4_vertex
+dEQP-VK.ubo.2_level_array.std140.mat3x4_fragment
+dEQP-VK.ubo.2_level_array.std140.mat3x4_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x4_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x4_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat3x4_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x4_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x4_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat3x4_both
+dEQP-VK.ubo.2_level_array.std140.mat4x2_vertex
+dEQP-VK.ubo.2_level_array.std140.mat4x2_fragment
+dEQP-VK.ubo.2_level_array.std140.mat4x2_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x2_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x2_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x2_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x2_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x2_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x2_both
+dEQP-VK.ubo.2_level_array.std140.mat4x3_vertex
+dEQP-VK.ubo.2_level_array.std140.mat4x3_fragment
+dEQP-VK.ubo.2_level_array.std140.mat4x3_both
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x3_vertex
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x3_fragment
+dEQP-VK.ubo.2_level_array.std140.row_major_mat4x3_both
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x3_vertex
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x3_fragment
+dEQP-VK.ubo.2_level_array.std140.column_major_mat4x3_both
+dEQP-VK.ubo.3_level_array.std140.float_vertex
+dEQP-VK.ubo.3_level_array.std140.float_fragment
+dEQP-VK.ubo.3_level_array.std140.float_both
+dEQP-VK.ubo.3_level_array.std140.vec2_vertex
+dEQP-VK.ubo.3_level_array.std140.vec2_fragment
+dEQP-VK.ubo.3_level_array.std140.vec2_both
+dEQP-VK.ubo.3_level_array.std140.vec3_vertex
+dEQP-VK.ubo.3_level_array.std140.vec3_fragment
+dEQP-VK.ubo.3_level_array.std140.vec3_both
+dEQP-VK.ubo.3_level_array.std140.vec4_vertex
+dEQP-VK.ubo.3_level_array.std140.vec4_fragment
+dEQP-VK.ubo.3_level_array.std140.vec4_both
+dEQP-VK.ubo.3_level_array.std140.int_vertex
+dEQP-VK.ubo.3_level_array.std140.int_fragment
+dEQP-VK.ubo.3_level_array.std140.int_both
+dEQP-VK.ubo.3_level_array.std140.ivec2_vertex
+dEQP-VK.ubo.3_level_array.std140.ivec2_fragment
+dEQP-VK.ubo.3_level_array.std140.ivec2_both
+dEQP-VK.ubo.3_level_array.std140.ivec3_vertex
+dEQP-VK.ubo.3_level_array.std140.ivec3_fragment
+dEQP-VK.ubo.3_level_array.std140.ivec3_both
+dEQP-VK.ubo.3_level_array.std140.ivec4_vertex
+dEQP-VK.ubo.3_level_array.std140.ivec4_fragment
+dEQP-VK.ubo.3_level_array.std140.ivec4_both
+dEQP-VK.ubo.3_level_array.std140.uint_vertex
+dEQP-VK.ubo.3_level_array.std140.uint_fragment
+dEQP-VK.ubo.3_level_array.std140.uint_both
+dEQP-VK.ubo.3_level_array.std140.uvec2_vertex
+dEQP-VK.ubo.3_level_array.std140.uvec2_fragment
+dEQP-VK.ubo.3_level_array.std140.uvec2_both
+dEQP-VK.ubo.3_level_array.std140.uvec3_vertex
+dEQP-VK.ubo.3_level_array.std140.uvec3_fragment
+dEQP-VK.ubo.3_level_array.std140.uvec3_both
+dEQP-VK.ubo.3_level_array.std140.uvec4_vertex
+dEQP-VK.ubo.3_level_array.std140.uvec4_fragment
+dEQP-VK.ubo.3_level_array.std140.uvec4_both
+dEQP-VK.ubo.3_level_array.std140.bool_vertex
+dEQP-VK.ubo.3_level_array.std140.bool_fragment
+dEQP-VK.ubo.3_level_array.std140.bool_both
+dEQP-VK.ubo.3_level_array.std140.bvec2_vertex
+dEQP-VK.ubo.3_level_array.std140.bvec2_fragment
+dEQP-VK.ubo.3_level_array.std140.bvec2_both
+dEQP-VK.ubo.3_level_array.std140.bvec3_vertex
+dEQP-VK.ubo.3_level_array.std140.bvec3_fragment
+dEQP-VK.ubo.3_level_array.std140.bvec3_both
+dEQP-VK.ubo.3_level_array.std140.bvec4_vertex
+dEQP-VK.ubo.3_level_array.std140.bvec4_fragment
+dEQP-VK.ubo.3_level_array.std140.bvec4_both
+dEQP-VK.ubo.3_level_array.std140.mat2_vertex
+dEQP-VK.ubo.3_level_array.std140.mat2_fragment
+dEQP-VK.ubo.3_level_array.std140.mat2_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2_both
+dEQP-VK.ubo.3_level_array.std140.mat3_vertex
+dEQP-VK.ubo.3_level_array.std140.mat3_fragment
+dEQP-VK.ubo.3_level_array.std140.mat3_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3_both
+dEQP-VK.ubo.3_level_array.std140.mat4_vertex
+dEQP-VK.ubo.3_level_array.std140.mat4_fragment
+dEQP-VK.ubo.3_level_array.std140.mat4_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4_both
+dEQP-VK.ubo.3_level_array.std140.mat2x3_vertex
+dEQP-VK.ubo.3_level_array.std140.mat2x3_fragment
+dEQP-VK.ubo.3_level_array.std140.mat2x3_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x3_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x3_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x3_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x3_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x3_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x3_both
+dEQP-VK.ubo.3_level_array.std140.mat2x4_vertex
+dEQP-VK.ubo.3_level_array.std140.mat2x4_fragment
+dEQP-VK.ubo.3_level_array.std140.mat2x4_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x4_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x4_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat2x4_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x4_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x4_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat2x4_both
+dEQP-VK.ubo.3_level_array.std140.mat3x2_vertex
+dEQP-VK.ubo.3_level_array.std140.mat3x2_fragment
+dEQP-VK.ubo.3_level_array.std140.mat3x2_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x2_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x2_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x2_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x2_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x2_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x2_both
+dEQP-VK.ubo.3_level_array.std140.mat3x4_vertex
+dEQP-VK.ubo.3_level_array.std140.mat3x4_fragment
+dEQP-VK.ubo.3_level_array.std140.mat3x4_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x4_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x4_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat3x4_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x4_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x4_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat3x4_both
+dEQP-VK.ubo.3_level_array.std140.mat4x2_vertex
+dEQP-VK.ubo.3_level_array.std140.mat4x2_fragment
+dEQP-VK.ubo.3_level_array.std140.mat4x2_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x2_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x2_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x2_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x2_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x2_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x2_both
+dEQP-VK.ubo.3_level_array.std140.mat4x3_vertex
+dEQP-VK.ubo.3_level_array.std140.mat4x3_fragment
+dEQP-VK.ubo.3_level_array.std140.mat4x3_both
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x3_vertex
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x3_fragment
+dEQP-VK.ubo.3_level_array.std140.row_major_mat4x3_both
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x3_vertex
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x3_fragment
+dEQP-VK.ubo.3_level_array.std140.column_major_mat4x3_both
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_vertex
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_fragment
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_both
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.2_level_struct_array.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.2_level_struct_array.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.2_level_struct_array.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.2_level_struct_array.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_float_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_float_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_float_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_float_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_float_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_float_both
+dEQP-VK.ubo.single_basic_type.std140.highp_float_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_float_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_float_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_vec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_vec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_vec2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_vec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_vec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_vec3_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_vec4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_vec4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_vec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_vec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_vec4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_int_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_int_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_int_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_int_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_int_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_int_both
+dEQP-VK.ubo.single_basic_type.std140.highp_int_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_int_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_int_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec3_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_ivec4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_ivec4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_ivec4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_uint_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_uint_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_uint_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_uint_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_uint_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_uint_both
+dEQP-VK.ubo.single_basic_type.std140.highp_uint_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_uint_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_uint_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec3_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_uvec4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_uvec4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_uvec4_both
+dEQP-VK.ubo.single_basic_type.std140.bool_vertex
+dEQP-VK.ubo.single_basic_type.std140.bool_fragment
+dEQP-VK.ubo.single_basic_type.std140.bool_both
+dEQP-VK.ubo.single_basic_type.std140.bvec2_vertex
+dEQP-VK.ubo.single_basic_type.std140.bvec2_fragment
+dEQP-VK.ubo.single_basic_type.std140.bvec2_both
+dEQP-VK.ubo.single_basic_type.std140.bvec3_vertex
+dEQP-VK.ubo.single_basic_type.std140.bvec3_fragment
+dEQP-VK.ubo.single_basic_type.std140.bvec3_both
+dEQP-VK.ubo.single_basic_type.std140.bvec4_vertex
+dEQP-VK.ubo.single_basic_type.std140.bvec4_fragment
+dEQP-VK.ubo.single_basic_type.std140.bvec4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x3_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat2x4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x4_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x4_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat3x4_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x2_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x2_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x2_both
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.lowp_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.mediump_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.highp_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_lowp_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_mediump_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.row_major_highp_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_lowp_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_mediump_mat4x3_both
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x3_vertex
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x3_fragment
+dEQP-VK.ubo.single_basic_type.std140.column_major_highp_mat4x3_both
+dEQP-VK.ubo.single_basic_array.std140.float_vertex
+dEQP-VK.ubo.single_basic_array.std140.float_fragment
+dEQP-VK.ubo.single_basic_array.std140.float_both
+dEQP-VK.ubo.single_basic_array.std140.vec2_vertex
+dEQP-VK.ubo.single_basic_array.std140.vec2_fragment
+dEQP-VK.ubo.single_basic_array.std140.vec2_both
+dEQP-VK.ubo.single_basic_array.std140.vec3_vertex
+dEQP-VK.ubo.single_basic_array.std140.vec3_fragment
+dEQP-VK.ubo.single_basic_array.std140.vec3_both
+dEQP-VK.ubo.single_basic_array.std140.vec4_vertex
+dEQP-VK.ubo.single_basic_array.std140.vec4_fragment
+dEQP-VK.ubo.single_basic_array.std140.vec4_both
+dEQP-VK.ubo.single_basic_array.std140.int_vertex
+dEQP-VK.ubo.single_basic_array.std140.int_fragment
+dEQP-VK.ubo.single_basic_array.std140.int_both
+dEQP-VK.ubo.single_basic_array.std140.ivec2_vertex
+dEQP-VK.ubo.single_basic_array.std140.ivec2_fragment
+dEQP-VK.ubo.single_basic_array.std140.ivec2_both
+dEQP-VK.ubo.single_basic_array.std140.ivec3_vertex
+dEQP-VK.ubo.single_basic_array.std140.ivec3_fragment
+dEQP-VK.ubo.single_basic_array.std140.ivec3_both
+dEQP-VK.ubo.single_basic_array.std140.ivec4_vertex
+dEQP-VK.ubo.single_basic_array.std140.ivec4_fragment
+dEQP-VK.ubo.single_basic_array.std140.ivec4_both
+dEQP-VK.ubo.single_basic_array.std140.uint_vertex
+dEQP-VK.ubo.single_basic_array.std140.uint_fragment
+dEQP-VK.ubo.single_basic_array.std140.uint_both
+dEQP-VK.ubo.single_basic_array.std140.uvec2_vertex
+dEQP-VK.ubo.single_basic_array.std140.uvec2_fragment
+dEQP-VK.ubo.single_basic_array.std140.uvec2_both
+dEQP-VK.ubo.single_basic_array.std140.uvec3_vertex
+dEQP-VK.ubo.single_basic_array.std140.uvec3_fragment
+dEQP-VK.ubo.single_basic_array.std140.uvec3_both
+dEQP-VK.ubo.single_basic_array.std140.uvec4_vertex
+dEQP-VK.ubo.single_basic_array.std140.uvec4_fragment
+dEQP-VK.ubo.single_basic_array.std140.uvec4_both
+dEQP-VK.ubo.single_basic_array.std140.bool_vertex
+dEQP-VK.ubo.single_basic_array.std140.bool_fragment
+dEQP-VK.ubo.single_basic_array.std140.bool_both
+dEQP-VK.ubo.single_basic_array.std140.bvec2_vertex
+dEQP-VK.ubo.single_basic_array.std140.bvec2_fragment
+dEQP-VK.ubo.single_basic_array.std140.bvec2_both
+dEQP-VK.ubo.single_basic_array.std140.bvec3_vertex
+dEQP-VK.ubo.single_basic_array.std140.bvec3_fragment
+dEQP-VK.ubo.single_basic_array.std140.bvec3_both
+dEQP-VK.ubo.single_basic_array.std140.bvec4_vertex
+dEQP-VK.ubo.single_basic_array.std140.bvec4_fragment
+dEQP-VK.ubo.single_basic_array.std140.bvec4_both
+dEQP-VK.ubo.single_basic_array.std140.mat2_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat2_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat2_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2_both
+dEQP-VK.ubo.single_basic_array.std140.mat3_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat3_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat3_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3_both
+dEQP-VK.ubo.single_basic_array.std140.mat4_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat4_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat4_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4_both
+dEQP-VK.ubo.single_basic_array.std140.mat2x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat2x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat2x3_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x3_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x3_both
+dEQP-VK.ubo.single_basic_array.std140.mat2x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat2x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat2x4_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat2x4_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat2x4_both
+dEQP-VK.ubo.single_basic_array.std140.mat3x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat3x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat3x2_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x2_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x2_both
+dEQP-VK.ubo.single_basic_array.std140.mat3x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat3x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat3x4_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat3x4_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x4_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x4_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat3x4_both
+dEQP-VK.ubo.single_basic_array.std140.mat4x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat4x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat4x2_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x2_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x2_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x2_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x2_both
+dEQP-VK.ubo.single_basic_array.std140.mat4x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.mat4x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.mat4x3_both
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.row_major_mat4x3_both
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x3_vertex
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x3_fragment
+dEQP-VK.ubo.single_basic_array.std140.column_major_mat4x3_both
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_vertex
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_fragment
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_both
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_struct.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_struct.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_struct.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_struct.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_vertex
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_fragment
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_both
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_struct_array.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_struct_array.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_struct_array.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_struct_array.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_vertex
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_fragment
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_both
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_nested_struct.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_nested_struct.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_nested_struct.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_nested_struct.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_vertex
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_fragment
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_both
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_nested_struct_array.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.single_nested_struct_array.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.single_nested_struct_array.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.single_nested_struct_array.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.instance_array_basic_type.std140.float_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.float_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.float_both
+dEQP-VK.ubo.instance_array_basic_type.std140.vec2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.vec2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.vec2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.vec3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.vec3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.vec3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.vec4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.vec4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.vec4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.int_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.int_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.int_both
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.ivec4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.uint_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.uint_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.uint_both
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.uvec4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.bool_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.bool_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.bool_both
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.bvec4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat2x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat2x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat2x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat3x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat3x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x4_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x4_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat3x4_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x2_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x2_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x2_both
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.mat4x3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.row_major_mat4x3_both
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x3_vertex
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x3_fragment
+dEQP-VK.ubo.instance_array_basic_type.std140.column_major_mat4x3_both
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_vertex
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_fragment
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_both
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_mixed
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.multi_basic_types.per_block_buffer.std140_instance_array_mixed
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_vertex
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_fragment
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_both
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_mixed
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.multi_basic_types.single_buffer.std140_instance_array_mixed
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_vertex
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_fragment
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_both
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_mixed
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_instance_array_both
+dEQP-VK.ubo.multi_nested_struct.per_block_buffer.std140_instance_array_mixed
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_vertex
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_fragment
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_both
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_mixed
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_instance_array_vertex
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_instance_array_fragment
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_instance_array_both
+dEQP-VK.ubo.multi_nested_struct.single_buffer.std140_instance_array_mixed
+dEQP-VK.ubo.random.scalar_types.0
+dEQP-VK.ubo.random.scalar_types.1
+dEQP-VK.ubo.random.scalar_types.2
+dEQP-VK.ubo.random.scalar_types.3
+dEQP-VK.ubo.random.scalar_types.4
+dEQP-VK.ubo.random.scalar_types.5
+dEQP-VK.ubo.random.scalar_types.6
+dEQP-VK.ubo.random.scalar_types.7
+dEQP-VK.ubo.random.scalar_types.8
+dEQP-VK.ubo.random.scalar_types.9
+dEQP-VK.ubo.random.scalar_types.10
+dEQP-VK.ubo.random.scalar_types.11
+dEQP-VK.ubo.random.scalar_types.12
+dEQP-VK.ubo.random.scalar_types.13
+dEQP-VK.ubo.random.scalar_types.14
+dEQP-VK.ubo.random.scalar_types.15
+dEQP-VK.ubo.random.scalar_types.16
+dEQP-VK.ubo.random.scalar_types.17
+dEQP-VK.ubo.random.scalar_types.18
+dEQP-VK.ubo.random.scalar_types.19
+dEQP-VK.ubo.random.scalar_types.20
+dEQP-VK.ubo.random.scalar_types.21
+dEQP-VK.ubo.random.scalar_types.22
+dEQP-VK.ubo.random.scalar_types.23
+dEQP-VK.ubo.random.scalar_types.24
+dEQP-VK.ubo.random.vector_types.0
+dEQP-VK.ubo.random.vector_types.1
+dEQP-VK.ubo.random.vector_types.2
+dEQP-VK.ubo.random.vector_types.3
+dEQP-VK.ubo.random.vector_types.4
+dEQP-VK.ubo.random.vector_types.5
+dEQP-VK.ubo.random.vector_types.6
+dEQP-VK.ubo.random.vector_types.7
+dEQP-VK.ubo.random.vector_types.8
+dEQP-VK.ubo.random.vector_types.9
+dEQP-VK.ubo.random.vector_types.10
+dEQP-VK.ubo.random.vector_types.11
+dEQP-VK.ubo.random.vector_types.12
+dEQP-VK.ubo.random.vector_types.13
+dEQP-VK.ubo.random.vector_types.14
+dEQP-VK.ubo.random.vector_types.15
+dEQP-VK.ubo.random.vector_types.16
+dEQP-VK.ubo.random.vector_types.17
+dEQP-VK.ubo.random.vector_types.18
+dEQP-VK.ubo.random.vector_types.19
+dEQP-VK.ubo.random.vector_types.20
+dEQP-VK.ubo.random.vector_types.21
+dEQP-VK.ubo.random.vector_types.22
+dEQP-VK.ubo.random.vector_types.23
+dEQP-VK.ubo.random.vector_types.24
+dEQP-VK.ubo.random.basic_types.0
+dEQP-VK.ubo.random.basic_types.1
+dEQP-VK.ubo.random.basic_types.2
+dEQP-VK.ubo.random.basic_types.3
+dEQP-VK.ubo.random.basic_types.4
+dEQP-VK.ubo.random.basic_types.5
+dEQP-VK.ubo.random.basic_types.6
+dEQP-VK.ubo.random.basic_types.7
+dEQP-VK.ubo.random.basic_types.8
+dEQP-VK.ubo.random.basic_types.9
+dEQP-VK.ubo.random.basic_types.10
+dEQP-VK.ubo.random.basic_types.11
+dEQP-VK.ubo.random.basic_types.12
+dEQP-VK.ubo.random.basic_types.13
+dEQP-VK.ubo.random.basic_types.14
+dEQP-VK.ubo.random.basic_types.15
+dEQP-VK.ubo.random.basic_types.16
+dEQP-VK.ubo.random.basic_types.17
+dEQP-VK.ubo.random.basic_types.18
+dEQP-VK.ubo.random.basic_types.19
+dEQP-VK.ubo.random.basic_types.20
+dEQP-VK.ubo.random.basic_types.21
+dEQP-VK.ubo.random.basic_types.22
+dEQP-VK.ubo.random.basic_types.23
+dEQP-VK.ubo.random.basic_types.24
+dEQP-VK.ubo.random.basic_arrays.0
+dEQP-VK.ubo.random.basic_arrays.1
+dEQP-VK.ubo.random.basic_arrays.2
+dEQP-VK.ubo.random.basic_arrays.3
+dEQP-VK.ubo.random.basic_arrays.4
+dEQP-VK.ubo.random.basic_arrays.5
+dEQP-VK.ubo.random.basic_arrays.6
+dEQP-VK.ubo.random.basic_arrays.7
+dEQP-VK.ubo.random.basic_arrays.8
+dEQP-VK.ubo.random.basic_arrays.9
+dEQP-VK.ubo.random.basic_arrays.10
+dEQP-VK.ubo.random.basic_arrays.11
+dEQP-VK.ubo.random.basic_arrays.12
+dEQP-VK.ubo.random.basic_arrays.13
+dEQP-VK.ubo.random.basic_arrays.14
+dEQP-VK.ubo.random.basic_arrays.15
+dEQP-VK.ubo.random.basic_arrays.16
+dEQP-VK.ubo.random.basic_arrays.17
+dEQP-VK.ubo.random.basic_arrays.18
+dEQP-VK.ubo.random.basic_arrays.19
+dEQP-VK.ubo.random.basic_arrays.20
+dEQP-VK.ubo.random.basic_arrays.21
+dEQP-VK.ubo.random.basic_arrays.22
+dEQP-VK.ubo.random.basic_arrays.23
+dEQP-VK.ubo.random.basic_arrays.24
+dEQP-VK.ubo.random.basic_instance_arrays.0
+dEQP-VK.ubo.random.basic_instance_arrays.1
+dEQP-VK.ubo.random.basic_instance_arrays.2
+dEQP-VK.ubo.random.basic_instance_arrays.3
+dEQP-VK.ubo.random.basic_instance_arrays.4
+dEQP-VK.ubo.random.basic_instance_arrays.5
+dEQP-VK.ubo.random.basic_instance_arrays.6
+dEQP-VK.ubo.random.basic_instance_arrays.7
+dEQP-VK.ubo.random.basic_instance_arrays.8
+dEQP-VK.ubo.random.basic_instance_arrays.9
+dEQP-VK.ubo.random.basic_instance_arrays.10
+dEQP-VK.ubo.random.basic_instance_arrays.11
+dEQP-VK.ubo.random.basic_instance_arrays.12
+dEQP-VK.ubo.random.basic_instance_arrays.13
+dEQP-VK.ubo.random.basic_instance_arrays.14
+dEQP-VK.ubo.random.basic_instance_arrays.15
+dEQP-VK.ubo.random.basic_instance_arrays.16
+dEQP-VK.ubo.random.basic_instance_arrays.17
+dEQP-VK.ubo.random.basic_instance_arrays.18
+dEQP-VK.ubo.random.basic_instance_arrays.19
+dEQP-VK.ubo.random.basic_instance_arrays.20
+dEQP-VK.ubo.random.basic_instance_arrays.21
+dEQP-VK.ubo.random.basic_instance_arrays.22
+dEQP-VK.ubo.random.basic_instance_arrays.23
+dEQP-VK.ubo.random.basic_instance_arrays.24
+dEQP-VK.ubo.random.nested_structs.0
+dEQP-VK.ubo.random.nested_structs.1
+dEQP-VK.ubo.random.nested_structs.2
+dEQP-VK.ubo.random.nested_structs.3
+dEQP-VK.ubo.random.nested_structs.4
+dEQP-VK.ubo.random.nested_structs.5
+dEQP-VK.ubo.random.nested_structs.6
+dEQP-VK.ubo.random.nested_structs.7
+dEQP-VK.ubo.random.nested_structs.8
+dEQP-VK.ubo.random.nested_structs.9
+dEQP-VK.ubo.random.nested_structs.10
+dEQP-VK.ubo.random.nested_structs.11
+dEQP-VK.ubo.random.nested_structs.12
+dEQP-VK.ubo.random.nested_structs.13
+dEQP-VK.ubo.random.nested_structs.14
+dEQP-VK.ubo.random.nested_structs.15
+dEQP-VK.ubo.random.nested_structs.16
+dEQP-VK.ubo.random.nested_structs.17
+dEQP-VK.ubo.random.nested_structs.18
+dEQP-VK.ubo.random.nested_structs.19
+dEQP-VK.ubo.random.nested_structs.20
+dEQP-VK.ubo.random.nested_structs.21
+dEQP-VK.ubo.random.nested_structs.22
+dEQP-VK.ubo.random.nested_structs.23
+dEQP-VK.ubo.random.nested_structs.24
+dEQP-VK.ubo.random.nested_structs_arrays.0
+dEQP-VK.ubo.random.nested_structs_arrays.1
+dEQP-VK.ubo.random.nested_structs_arrays.2
+dEQP-VK.ubo.random.nested_structs_arrays.3
+dEQP-VK.ubo.random.nested_structs_arrays.4
+dEQP-VK.ubo.random.nested_structs_arrays.5
+dEQP-VK.ubo.random.nested_structs_arrays.6
+dEQP-VK.ubo.random.nested_structs_arrays.7
+dEQP-VK.ubo.random.nested_structs_arrays.8
+dEQP-VK.ubo.random.nested_structs_arrays.9
+dEQP-VK.ubo.random.nested_structs_arrays.10
+dEQP-VK.ubo.random.nested_structs_arrays.11
+dEQP-VK.ubo.random.nested_structs_arrays.12
+dEQP-VK.ubo.random.nested_structs_arrays.13
+dEQP-VK.ubo.random.nested_structs_arrays.14
+dEQP-VK.ubo.random.nested_structs_arrays.15
+dEQP-VK.ubo.random.nested_structs_arrays.16
+dEQP-VK.ubo.random.nested_structs_arrays.17
+dEQP-VK.ubo.random.nested_structs_arrays.18
+dEQP-VK.ubo.random.nested_structs_arrays.19
+dEQP-VK.ubo.random.nested_structs_arrays.20
+dEQP-VK.ubo.random.nested_structs_arrays.21
+dEQP-VK.ubo.random.nested_structs_arrays.22
+dEQP-VK.ubo.random.nested_structs_arrays.23
+dEQP-VK.ubo.random.nested_structs_arrays.24
+dEQP-VK.ubo.random.nested_structs_instance_arrays.0
+dEQP-VK.ubo.random.nested_structs_instance_arrays.1
+dEQP-VK.ubo.random.nested_structs_instance_arrays.2
+dEQP-VK.ubo.random.nested_structs_instance_arrays.3
+dEQP-VK.ubo.random.nested_structs_instance_arrays.4
+dEQP-VK.ubo.random.nested_structs_instance_arrays.5
+dEQP-VK.ubo.random.nested_structs_instance_arrays.6
+dEQP-VK.ubo.random.nested_structs_instance_arrays.7
+dEQP-VK.ubo.random.nested_structs_instance_arrays.8
+dEQP-VK.ubo.random.nested_structs_instance_arrays.9
+dEQP-VK.ubo.random.nested_structs_instance_arrays.10
+dEQP-VK.ubo.random.nested_structs_instance_arrays.11
+dEQP-VK.ubo.random.nested_structs_instance_arrays.12
+dEQP-VK.ubo.random.nested_structs_instance_arrays.13
+dEQP-VK.ubo.random.nested_structs_instance_arrays.14
+dEQP-VK.ubo.random.nested_structs_instance_arrays.15
+dEQP-VK.ubo.random.nested_structs_instance_arrays.16
+dEQP-VK.ubo.random.nested_structs_instance_arrays.17
+dEQP-VK.ubo.random.nested_structs_instance_arrays.18
+dEQP-VK.ubo.random.nested_structs_instance_arrays.19
+dEQP-VK.ubo.random.nested_structs_instance_arrays.20
+dEQP-VK.ubo.random.nested_structs_instance_arrays.21
+dEQP-VK.ubo.random.nested_structs_instance_arrays.22
+dEQP-VK.ubo.random.nested_structs_instance_arrays.23
+dEQP-VK.ubo.random.nested_structs_instance_arrays.24
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.0
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.1
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.2
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.3
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.4
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.5
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.6
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.7
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.8
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.9
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.10
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.11
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.12
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.13
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.14
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.15
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.16
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.17
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.18
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.19
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.20
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.21
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.22
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.23
+dEQP-VK.ubo.random.nested_structs_arrays_instance_arrays.24
+dEQP-VK.ubo.random.all_per_block_buffers.0
+dEQP-VK.ubo.random.all_per_block_buffers.1
+dEQP-VK.ubo.random.all_per_block_buffers.2
+dEQP-VK.ubo.random.all_per_block_buffers.3
+dEQP-VK.ubo.random.all_per_block_buffers.4
+dEQP-VK.ubo.random.all_per_block_buffers.5
+dEQP-VK.ubo.random.all_per_block_buffers.6
+dEQP-VK.ubo.random.all_per_block_buffers.7
+dEQP-VK.ubo.random.all_per_block_buffers.8
+dEQP-VK.ubo.random.all_per_block_buffers.9
+dEQP-VK.ubo.random.all_per_block_buffers.10
+dEQP-VK.ubo.random.all_per_block_buffers.11
+dEQP-VK.ubo.random.all_per_block_buffers.12
+dEQP-VK.ubo.random.all_per_block_buffers.13
+dEQP-VK.ubo.random.all_per_block_buffers.14
+dEQP-VK.ubo.random.all_per_block_buffers.15
+dEQP-VK.ubo.random.all_per_block_buffers.16
+dEQP-VK.ubo.random.all_per_block_buffers.17
+dEQP-VK.ubo.random.all_per_block_buffers.18
+dEQP-VK.ubo.random.all_per_block_buffers.19
+dEQP-VK.ubo.random.all_per_block_buffers.20
+dEQP-VK.ubo.random.all_per_block_buffers.21
+dEQP-VK.ubo.random.all_per_block_buffers.22
+dEQP-VK.ubo.random.all_per_block_buffers.23
+dEQP-VK.ubo.random.all_per_block_buffers.24
+dEQP-VK.ubo.random.all_per_block_buffers.25
+dEQP-VK.ubo.random.all_per_block_buffers.26
+dEQP-VK.ubo.random.all_per_block_buffers.27
+dEQP-VK.ubo.random.all_per_block_buffers.28
+dEQP-VK.ubo.random.all_per_block_buffers.29
+dEQP-VK.ubo.random.all_per_block_buffers.30
+dEQP-VK.ubo.random.all_per_block_buffers.31
+dEQP-VK.ubo.random.all_per_block_buffers.32
+dEQP-VK.ubo.random.all_per_block_buffers.33
+dEQP-VK.ubo.random.all_per_block_buffers.34
+dEQP-VK.ubo.random.all_per_block_buffers.35
+dEQP-VK.ubo.random.all_per_block_buffers.36
+dEQP-VK.ubo.random.all_per_block_buffers.37
+dEQP-VK.ubo.random.all_per_block_buffers.38
+dEQP-VK.ubo.random.all_per_block_buffers.39
+dEQP-VK.ubo.random.all_per_block_buffers.40
+dEQP-VK.ubo.random.all_per_block_buffers.41
+dEQP-VK.ubo.random.all_per_block_buffers.42
+dEQP-VK.ubo.random.all_per_block_buffers.43
+dEQP-VK.ubo.random.all_per_block_buffers.44
+dEQP-VK.ubo.random.all_per_block_buffers.45
+dEQP-VK.ubo.random.all_per_block_buffers.46
+dEQP-VK.ubo.random.all_per_block_buffers.47
+dEQP-VK.ubo.random.all_per_block_buffers.48
+dEQP-VK.ubo.random.all_per_block_buffers.49
+dEQP-VK.ubo.random.all_shared_buffer.0
+dEQP-VK.ubo.random.all_shared_buffer.1
+dEQP-VK.ubo.random.all_shared_buffer.2
+dEQP-VK.ubo.random.all_shared_buffer.3
+dEQP-VK.ubo.random.all_shared_buffer.4
+dEQP-VK.ubo.random.all_shared_buffer.5
+dEQP-VK.ubo.random.all_shared_buffer.6
+dEQP-VK.ubo.random.all_shared_buffer.7
+dEQP-VK.ubo.random.all_shared_buffer.8
+dEQP-VK.ubo.random.all_shared_buffer.9
+dEQP-VK.ubo.random.all_shared_buffer.10
+dEQP-VK.ubo.random.all_shared_buffer.11
+dEQP-VK.ubo.random.all_shared_buffer.12
+dEQP-VK.ubo.random.all_shared_buffer.13
+dEQP-VK.ubo.random.all_shared_buffer.14
+dEQP-VK.ubo.random.all_shared_buffer.15
+dEQP-VK.ubo.random.all_shared_buffer.16
+dEQP-VK.ubo.random.all_shared_buffer.17
+dEQP-VK.ubo.random.all_shared_buffer.18
+dEQP-VK.ubo.random.all_shared_buffer.19
+dEQP-VK.ubo.random.all_shared_buffer.20
+dEQP-VK.ubo.random.all_shared_buffer.21
+dEQP-VK.ubo.random.all_shared_buffer.22
+dEQP-VK.ubo.random.all_shared_buffer.23
+dEQP-VK.ubo.random.all_shared_buffer.24
+dEQP-VK.ubo.random.all_shared_buffer.25
+dEQP-VK.ubo.random.all_shared_buffer.26
+dEQP-VK.ubo.random.all_shared_buffer.27
+dEQP-VK.ubo.random.all_shared_buffer.28
+dEQP-VK.ubo.random.all_shared_buffer.29
+dEQP-VK.ubo.random.all_shared_buffer.30
+dEQP-VK.ubo.random.all_shared_buffer.31
+dEQP-VK.ubo.random.all_shared_buffer.32
+dEQP-VK.ubo.random.all_shared_buffer.33
+dEQP-VK.ubo.random.all_shared_buffer.34
+dEQP-VK.ubo.random.all_shared_buffer.35
+dEQP-VK.ubo.random.all_shared_buffer.36
+dEQP-VK.ubo.random.all_shared_buffer.37
+dEQP-VK.ubo.random.all_shared_buffer.38
+dEQP-VK.ubo.random.all_shared_buffer.39
+dEQP-VK.ubo.random.all_shared_buffer.40
+dEQP-VK.ubo.random.all_shared_buffer.41
+dEQP-VK.ubo.random.all_shared_buffer.42
+dEQP-VK.ubo.random.all_shared_buffer.43
+dEQP-VK.ubo.random.all_shared_buffer.44
+dEQP-VK.ubo.random.all_shared_buffer.45
+dEQP-VK.ubo.random.all_shared_buffer.46
+dEQP-VK.ubo.random.all_shared_buffer.47
+dEQP-VK.ubo.random.all_shared_buffer.48
+dEQP-VK.ubo.random.all_shared_buffer.49
+dEQP-VK.dynamic_state.vp_state.viewport
+dEQP-VK.dynamic_state.vp_state.scissor
+dEQP-VK.dynamic_state.vp_state.viewport_array
+dEQP-VK.dynamic_state.rs_state.depth_bias
+dEQP-VK.dynamic_state.rs_state.depth_bias_clamp
+dEQP-VK.dynamic_state.rs_state.line_width
+dEQP-VK.dynamic_state.cb_state.blend_constants
+dEQP-VK.dynamic_state.ds_state.depth_bounds
+dEQP-VK.dynamic_state.ds_state.stencil_params_basic_1
+dEQP-VK.dynamic_state.ds_state.stencil_params_basic_2
+dEQP-VK.dynamic_state.ds_state.stencil_params_advanced
+dEQP-VK.dynamic_state.general_state.state_switch
+dEQP-VK.dynamic_state.general_state.bind_order
+dEQP-VK.dynamic_state.general_state.state_persistence
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_float
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_float
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_float
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_int
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_int
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_int
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_uint
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_uint
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_uint
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.bool
+dEQP-VK.ssbo.layout.single_basic_type.std140.bvec2
+dEQP-VK.ssbo.layout.single_basic_type.std140.bvec3
+dEQP-VK.ssbo.layout.single_basic_type.std140.bvec4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std140.lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.row_major_highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std140.column_major_highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_float
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_float
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_float
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_vec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_vec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_vec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_int
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_int
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_int
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_ivec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_ivec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_ivec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_uint
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_uint
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_uint
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_uvec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_uvec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_uvec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.bool
+dEQP-VK.ssbo.layout.single_basic_type.std430.bvec2
+dEQP-VK.ssbo.layout.single_basic_type.std430.bvec3
+dEQP-VK.ssbo.layout.single_basic_type.std430.bvec4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat2x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat2x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat3x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat3x4
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat4x2
+dEQP-VK.ssbo.layout.single_basic_type.std430.lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.row_major_highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_lowp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_mediump_mat4x3
+dEQP-VK.ssbo.layout.single_basic_type.std430.column_major_highp_mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.float
+dEQP-VK.ssbo.layout.single_basic_array.std140.vec2
+dEQP-VK.ssbo.layout.single_basic_array.std140.vec3
+dEQP-VK.ssbo.layout.single_basic_array.std140.vec4
+dEQP-VK.ssbo.layout.single_basic_array.std140.int
+dEQP-VK.ssbo.layout.single_basic_array.std140.ivec2
+dEQP-VK.ssbo.layout.single_basic_array.std140.ivec3
+dEQP-VK.ssbo.layout.single_basic_array.std140.ivec4
+dEQP-VK.ssbo.layout.single_basic_array.std140.uint
+dEQP-VK.ssbo.layout.single_basic_array.std140.uvec2
+dEQP-VK.ssbo.layout.single_basic_array.std140.uvec3
+dEQP-VK.ssbo.layout.single_basic_array.std140.uvec4
+dEQP-VK.ssbo.layout.single_basic_array.std140.bool
+dEQP-VK.ssbo.layout.single_basic_array.std140.bvec2
+dEQP-VK.ssbo.layout.single_basic_array.std140.bvec3
+dEQP-VK.ssbo.layout.single_basic_array.std140.bvec4
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat2
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat2
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat2
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat3
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat3
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat3
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat4
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat4
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat4
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std140.mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.float
+dEQP-VK.ssbo.layout.single_basic_array.std430.vec2
+dEQP-VK.ssbo.layout.single_basic_array.std430.vec3
+dEQP-VK.ssbo.layout.single_basic_array.std430.vec4
+dEQP-VK.ssbo.layout.single_basic_array.std430.int
+dEQP-VK.ssbo.layout.single_basic_array.std430.ivec2
+dEQP-VK.ssbo.layout.single_basic_array.std430.ivec3
+dEQP-VK.ssbo.layout.single_basic_array.std430.ivec4
+dEQP-VK.ssbo.layout.single_basic_array.std430.uint
+dEQP-VK.ssbo.layout.single_basic_array.std430.uvec2
+dEQP-VK.ssbo.layout.single_basic_array.std430.uvec3
+dEQP-VK.ssbo.layout.single_basic_array.std430.uvec4
+dEQP-VK.ssbo.layout.single_basic_array.std430.bool
+dEQP-VK.ssbo.layout.single_basic_array.std430.bvec2
+dEQP-VK.ssbo.layout.single_basic_array.std430.bvec3
+dEQP-VK.ssbo.layout.single_basic_array.std430.bvec4
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat2
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat2
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat2
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat3
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat3
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat3
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat4
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat4
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat4
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.single_basic_array.std430.mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.single_basic_array.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.float
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.vec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.vec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.vec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.int
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.ivec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.ivec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.ivec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.uint
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.uvec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.uvec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.uvec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.bool
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.bvec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.bvec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.bvec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.float
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.vec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.vec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.vec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.int
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.ivec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.ivec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.ivec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.uint
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.uvec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.uvec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.uvec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.bool
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.bvec2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.bvec3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.bvec4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.basic_unsized_array.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std140.float
+dEQP-VK.ssbo.layout.2_level_array.std140.vec2
+dEQP-VK.ssbo.layout.2_level_array.std140.vec3
+dEQP-VK.ssbo.layout.2_level_array.std140.vec4
+dEQP-VK.ssbo.layout.2_level_array.std140.int
+dEQP-VK.ssbo.layout.2_level_array.std140.ivec2
+dEQP-VK.ssbo.layout.2_level_array.std140.ivec3
+dEQP-VK.ssbo.layout.2_level_array.std140.ivec4
+dEQP-VK.ssbo.layout.2_level_array.std140.uint
+dEQP-VK.ssbo.layout.2_level_array.std140.uvec2
+dEQP-VK.ssbo.layout.2_level_array.std140.uvec3
+dEQP-VK.ssbo.layout.2_level_array.std140.uvec4
+dEQP-VK.ssbo.layout.2_level_array.std140.bool
+dEQP-VK.ssbo.layout.2_level_array.std140.bvec2
+dEQP-VK.ssbo.layout.2_level_array.std140.bvec3
+dEQP-VK.ssbo.layout.2_level_array.std140.bvec4
+dEQP-VK.ssbo.layout.2_level_array.std140.mat2
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat2
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat2
+dEQP-VK.ssbo.layout.2_level_array.std140.mat3
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat3
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat3
+dEQP-VK.ssbo.layout.2_level_array.std140.mat4
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat4
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat4
+dEQP-VK.ssbo.layout.2_level_array.std140.mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std140.mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std140.mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std140.mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std140.mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std140.mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std430.float
+dEQP-VK.ssbo.layout.2_level_array.std430.vec2
+dEQP-VK.ssbo.layout.2_level_array.std430.vec3
+dEQP-VK.ssbo.layout.2_level_array.std430.vec4
+dEQP-VK.ssbo.layout.2_level_array.std430.int
+dEQP-VK.ssbo.layout.2_level_array.std430.ivec2
+dEQP-VK.ssbo.layout.2_level_array.std430.ivec3
+dEQP-VK.ssbo.layout.2_level_array.std430.ivec4
+dEQP-VK.ssbo.layout.2_level_array.std430.uint
+dEQP-VK.ssbo.layout.2_level_array.std430.uvec2
+dEQP-VK.ssbo.layout.2_level_array.std430.uvec3
+dEQP-VK.ssbo.layout.2_level_array.std430.uvec4
+dEQP-VK.ssbo.layout.2_level_array.std430.bool
+dEQP-VK.ssbo.layout.2_level_array.std430.bvec2
+dEQP-VK.ssbo.layout.2_level_array.std430.bvec3
+dEQP-VK.ssbo.layout.2_level_array.std430.bvec4
+dEQP-VK.ssbo.layout.2_level_array.std430.mat2
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat2
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat2
+dEQP-VK.ssbo.layout.2_level_array.std430.mat3
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat3
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat3
+dEQP-VK.ssbo.layout.2_level_array.std430.mat4
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat4
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat4
+dEQP-VK.ssbo.layout.2_level_array.std430.mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.2_level_array.std430.mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.2_level_array.std430.mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.2_level_array.std430.mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.2_level_array.std430.mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.2_level_array.std430.mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.2_level_array.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std140.float
+dEQP-VK.ssbo.layout.3_level_array.std140.vec2
+dEQP-VK.ssbo.layout.3_level_array.std140.vec3
+dEQP-VK.ssbo.layout.3_level_array.std140.vec4
+dEQP-VK.ssbo.layout.3_level_array.std140.int
+dEQP-VK.ssbo.layout.3_level_array.std140.ivec2
+dEQP-VK.ssbo.layout.3_level_array.std140.ivec3
+dEQP-VK.ssbo.layout.3_level_array.std140.ivec4
+dEQP-VK.ssbo.layout.3_level_array.std140.uint
+dEQP-VK.ssbo.layout.3_level_array.std140.uvec2
+dEQP-VK.ssbo.layout.3_level_array.std140.uvec3
+dEQP-VK.ssbo.layout.3_level_array.std140.uvec4
+dEQP-VK.ssbo.layout.3_level_array.std140.bool
+dEQP-VK.ssbo.layout.3_level_array.std140.bvec2
+dEQP-VK.ssbo.layout.3_level_array.std140.bvec3
+dEQP-VK.ssbo.layout.3_level_array.std140.bvec4
+dEQP-VK.ssbo.layout.3_level_array.std140.mat2
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat2
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat2
+dEQP-VK.ssbo.layout.3_level_array.std140.mat3
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat3
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat3
+dEQP-VK.ssbo.layout.3_level_array.std140.mat4
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat4
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat4
+dEQP-VK.ssbo.layout.3_level_array.std140.mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std140.mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std140.mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std140.mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std140.mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std140.mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std430.float
+dEQP-VK.ssbo.layout.3_level_array.std430.vec2
+dEQP-VK.ssbo.layout.3_level_array.std430.vec3
+dEQP-VK.ssbo.layout.3_level_array.std430.vec4
+dEQP-VK.ssbo.layout.3_level_array.std430.int
+dEQP-VK.ssbo.layout.3_level_array.std430.ivec2
+dEQP-VK.ssbo.layout.3_level_array.std430.ivec3
+dEQP-VK.ssbo.layout.3_level_array.std430.ivec4
+dEQP-VK.ssbo.layout.3_level_array.std430.uint
+dEQP-VK.ssbo.layout.3_level_array.std430.uvec2
+dEQP-VK.ssbo.layout.3_level_array.std430.uvec3
+dEQP-VK.ssbo.layout.3_level_array.std430.uvec4
+dEQP-VK.ssbo.layout.3_level_array.std430.bool
+dEQP-VK.ssbo.layout.3_level_array.std430.bvec2
+dEQP-VK.ssbo.layout.3_level_array.std430.bvec3
+dEQP-VK.ssbo.layout.3_level_array.std430.bvec4
+dEQP-VK.ssbo.layout.3_level_array.std430.mat2
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat2
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat2
+dEQP-VK.ssbo.layout.3_level_array.std430.mat3
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat3
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat3
+dEQP-VK.ssbo.layout.3_level_array.std430.mat4
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat4
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat4
+dEQP-VK.ssbo.layout.3_level_array.std430.mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_array.std430.mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_array.std430.mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_array.std430.mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_array.std430.mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_array.std430.mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_array.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.float
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.vec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.vec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.vec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.int
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.ivec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.ivec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.ivec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.uint
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.uvec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.uvec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.uvec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.bool
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.bvec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.bvec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.bvec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.float
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.vec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.vec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.vec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.int
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.ivec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.ivec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.ivec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.uint
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.uvec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.uvec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.uvec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.bool
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.bvec2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.bvec3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.bvec4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.3_level_unsized_array.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.single_struct.per_block_buffer.std140
+dEQP-VK.ssbo.layout.single_struct.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_struct.per_block_buffer.std430
+dEQP-VK.ssbo.layout.single_struct.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_struct.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_struct.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_struct_array.per_block_buffer.std140
+dEQP-VK.ssbo.layout.single_struct_array.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_struct_array.per_block_buffer.std430
+dEQP-VK.ssbo.layout.single_struct_array.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_struct_array.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_struct_array.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct.per_block_buffer.std140
+dEQP-VK.ssbo.layout.single_nested_struct.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct.per_block_buffer.std430
+dEQP-VK.ssbo.layout.single_nested_struct.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct_array.per_block_buffer.std140
+dEQP-VK.ssbo.layout.single_nested_struct_array.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct_array.per_block_buffer.std430
+dEQP-VK.ssbo.layout.single_nested_struct_array.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct_array.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.single_nested_struct_array.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.unsized_struct_array.per_block_buffer.std140
+dEQP-VK.ssbo.layout.unsized_struct_array.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.unsized_struct_array.per_block_buffer.std430
+dEQP-VK.ssbo.layout.unsized_struct_array.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.unsized_struct_array.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.unsized_struct_array.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.per_block_buffer.std140
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.per_block_buffer.std430
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.2_level_unsized_struct_array.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.per_block_buffer.std140
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.per_block_buffer.std430
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.unsized_nested_struct_array.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.float
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.vec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.vec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.vec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.int
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.ivec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.ivec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.ivec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.uint
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.uvec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.uvec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.uvec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.bool
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.bvec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.bvec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.bvec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.mat4x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.row_major_mat4x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std140.column_major_mat4x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.float
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.vec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.vec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.vec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.int
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.ivec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.ivec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.ivec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.uint
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.uvec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.uvec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.uvec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.bool
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.bvec2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.bvec3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.bvec4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat2x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat2x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat3x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat3x4
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat4x2
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.mat4x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.row_major_mat4x3
+dEQP-VK.ssbo.layout.instance_array_basic_type.std430.column_major_mat4x3
+dEQP-VK.ssbo.layout.multi_basic_types.per_block_buffer.std140
+dEQP-VK.ssbo.layout.multi_basic_types.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.multi_basic_types.per_block_buffer.std430
+dEQP-VK.ssbo.layout.multi_basic_types.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.multi_basic_types.single_buffer.std140
+dEQP-VK.ssbo.layout.multi_basic_types.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.multi_basic_types.single_buffer.std430
+dEQP-VK.ssbo.layout.multi_basic_types.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.multi_nested_struct.per_block_buffer.std140
+dEQP-VK.ssbo.layout.multi_nested_struct.per_block_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.multi_nested_struct.per_block_buffer.std430
+dEQP-VK.ssbo.layout.multi_nested_struct.per_block_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.multi_nested_struct.single_buffer.std140
+dEQP-VK.ssbo.layout.multi_nested_struct.single_buffer.std140_instance_array
+dEQP-VK.ssbo.layout.multi_nested_struct.single_buffer.std430
+dEQP-VK.ssbo.layout.multi_nested_struct.single_buffer.std430_instance_array
+dEQP-VK.ssbo.layout.random.scalar_types.0
+dEQP-VK.ssbo.layout.random.scalar_types.1
+dEQP-VK.ssbo.layout.random.scalar_types.2
+dEQP-VK.ssbo.layout.random.scalar_types.3
+dEQP-VK.ssbo.layout.random.scalar_types.4
+dEQP-VK.ssbo.layout.random.scalar_types.5
+dEQP-VK.ssbo.layout.random.scalar_types.6
+dEQP-VK.ssbo.layout.random.scalar_types.7
+dEQP-VK.ssbo.layout.random.scalar_types.8
+dEQP-VK.ssbo.layout.random.scalar_types.9
+dEQP-VK.ssbo.layout.random.scalar_types.10
+dEQP-VK.ssbo.layout.random.scalar_types.11
+dEQP-VK.ssbo.layout.random.scalar_types.12
+dEQP-VK.ssbo.layout.random.scalar_types.13
+dEQP-VK.ssbo.layout.random.scalar_types.14
+dEQP-VK.ssbo.layout.random.scalar_types.15
+dEQP-VK.ssbo.layout.random.scalar_types.16
+dEQP-VK.ssbo.layout.random.scalar_types.17
+dEQP-VK.ssbo.layout.random.scalar_types.18
+dEQP-VK.ssbo.layout.random.scalar_types.19
+dEQP-VK.ssbo.layout.random.scalar_types.20
+dEQP-VK.ssbo.layout.random.scalar_types.21
+dEQP-VK.ssbo.layout.random.scalar_types.22
+dEQP-VK.ssbo.layout.random.scalar_types.23
+dEQP-VK.ssbo.layout.random.scalar_types.24
+dEQP-VK.ssbo.layout.random.vector_types.0
+dEQP-VK.ssbo.layout.random.vector_types.1
+dEQP-VK.ssbo.layout.random.vector_types.2
+dEQP-VK.ssbo.layout.random.vector_types.3
+dEQP-VK.ssbo.layout.random.vector_types.4
+dEQP-VK.ssbo.layout.random.vector_types.5
+dEQP-VK.ssbo.layout.random.vector_types.6
+dEQP-VK.ssbo.layout.random.vector_types.7
+dEQP-VK.ssbo.layout.random.vector_types.8
+dEQP-VK.ssbo.layout.random.vector_types.9
+dEQP-VK.ssbo.layout.random.vector_types.10
+dEQP-VK.ssbo.layout.random.vector_types.11
+dEQP-VK.ssbo.layout.random.vector_types.12
+dEQP-VK.ssbo.layout.random.vector_types.13
+dEQP-VK.ssbo.layout.random.vector_types.14
+dEQP-VK.ssbo.layout.random.vector_types.15
+dEQP-VK.ssbo.layout.random.vector_types.16
+dEQP-VK.ssbo.layout.random.vector_types.17
+dEQP-VK.ssbo.layout.random.vector_types.18
+dEQP-VK.ssbo.layout.random.vector_types.19
+dEQP-VK.ssbo.layout.random.vector_types.20
+dEQP-VK.ssbo.layout.random.vector_types.21
+dEQP-VK.ssbo.layout.random.vector_types.22
+dEQP-VK.ssbo.layout.random.vector_types.23
+dEQP-VK.ssbo.layout.random.vector_types.24
+dEQP-VK.ssbo.layout.random.basic_types.0
+dEQP-VK.ssbo.layout.random.basic_types.1
+dEQP-VK.ssbo.layout.random.basic_types.2
+dEQP-VK.ssbo.layout.random.basic_types.3
+dEQP-VK.ssbo.layout.random.basic_types.4
+dEQP-VK.ssbo.layout.random.basic_types.5
+dEQP-VK.ssbo.layout.random.basic_types.6
+dEQP-VK.ssbo.layout.random.basic_types.7
+dEQP-VK.ssbo.layout.random.basic_types.8
+dEQP-VK.ssbo.layout.random.basic_types.9
+dEQP-VK.ssbo.layout.random.basic_types.10
+dEQP-VK.ssbo.layout.random.basic_types.11
+dEQP-VK.ssbo.layout.random.basic_types.12
+dEQP-VK.ssbo.layout.random.basic_types.13
+dEQP-VK.ssbo.layout.random.basic_types.14
+dEQP-VK.ssbo.layout.random.basic_types.15
+dEQP-VK.ssbo.layout.random.basic_types.16
+dEQP-VK.ssbo.layout.random.basic_types.17
+dEQP-VK.ssbo.layout.random.basic_types.18
+dEQP-VK.ssbo.layout.random.basic_types.19
+dEQP-VK.ssbo.layout.random.basic_types.20
+dEQP-VK.ssbo.layout.random.basic_types.21
+dEQP-VK.ssbo.layout.random.basic_types.22
+dEQP-VK.ssbo.layout.random.basic_types.23
+dEQP-VK.ssbo.layout.random.basic_types.24
+dEQP-VK.ssbo.layout.random.basic_arrays.0
+dEQP-VK.ssbo.layout.random.basic_arrays.1
+dEQP-VK.ssbo.layout.random.basic_arrays.2
+dEQP-VK.ssbo.layout.random.basic_arrays.3
+dEQP-VK.ssbo.layout.random.basic_arrays.4
+dEQP-VK.ssbo.layout.random.basic_arrays.5
+dEQP-VK.ssbo.layout.random.basic_arrays.6
+dEQP-VK.ssbo.layout.random.basic_arrays.7
+dEQP-VK.ssbo.layout.random.basic_arrays.8
+dEQP-VK.ssbo.layout.random.basic_arrays.9
+dEQP-VK.ssbo.layout.random.basic_arrays.10
+dEQP-VK.ssbo.layout.random.basic_arrays.11
+dEQP-VK.ssbo.layout.random.basic_arrays.12
+dEQP-VK.ssbo.layout.random.basic_arrays.13
+dEQP-VK.ssbo.layout.random.basic_arrays.14
+dEQP-VK.ssbo.layout.random.basic_arrays.15
+dEQP-VK.ssbo.layout.random.basic_arrays.16
+dEQP-VK.ssbo.layout.random.basic_arrays.17
+dEQP-VK.ssbo.layout.random.basic_arrays.18
+dEQP-VK.ssbo.layout.random.basic_arrays.19
+dEQP-VK.ssbo.layout.random.basic_arrays.20
+dEQP-VK.ssbo.layout.random.basic_arrays.21
+dEQP-VK.ssbo.layout.random.basic_arrays.22
+dEQP-VK.ssbo.layout.random.basic_arrays.23
+dEQP-VK.ssbo.layout.random.basic_arrays.24
+dEQP-VK.ssbo.layout.random.unsized_arrays.0
+dEQP-VK.ssbo.layout.random.unsized_arrays.1
+dEQP-VK.ssbo.layout.random.unsized_arrays.2
+dEQP-VK.ssbo.layout.random.unsized_arrays.3
+dEQP-VK.ssbo.layout.random.unsized_arrays.4
+dEQP-VK.ssbo.layout.random.unsized_arrays.5
+dEQP-VK.ssbo.layout.random.unsized_arrays.6
+dEQP-VK.ssbo.layout.random.unsized_arrays.7
+dEQP-VK.ssbo.layout.random.unsized_arrays.8
+dEQP-VK.ssbo.layout.random.unsized_arrays.9
+dEQP-VK.ssbo.layout.random.unsized_arrays.10
+dEQP-VK.ssbo.layout.random.unsized_arrays.11
+dEQP-VK.ssbo.layout.random.unsized_arrays.12
+dEQP-VK.ssbo.layout.random.unsized_arrays.13
+dEQP-VK.ssbo.layout.random.unsized_arrays.14
+dEQP-VK.ssbo.layout.random.unsized_arrays.15
+dEQP-VK.ssbo.layout.random.unsized_arrays.16
+dEQP-VK.ssbo.layout.random.unsized_arrays.17
+dEQP-VK.ssbo.layout.random.unsized_arrays.18
+dEQP-VK.ssbo.layout.random.unsized_arrays.19
+dEQP-VK.ssbo.layout.random.unsized_arrays.20
+dEQP-VK.ssbo.layout.random.unsized_arrays.21
+dEQP-VK.ssbo.layout.random.unsized_arrays.22
+dEQP-VK.ssbo.layout.random.unsized_arrays.23
+dEQP-VK.ssbo.layout.random.unsized_arrays.24
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.0
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.1
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.2
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.3
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.4
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.5
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.6
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.7
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.8
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.9
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.10
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.11
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.12
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.13
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.14
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.15
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.16
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.17
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.18
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.19
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.20
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.21
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.22
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.23
+dEQP-VK.ssbo.layout.random.arrays_of_arrays.24
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.0
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.1
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.2
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.3
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.4
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.5
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.6
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.7
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.8
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.9
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.10
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.11
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.12
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.13
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.14
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.15
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.16
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.17
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.18
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.19
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.20
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.21
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.22
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.23
+dEQP-VK.ssbo.layout.random.basic_instance_arrays.24
+dEQP-VK.ssbo.layout.random.nested_structs.0
+dEQP-VK.ssbo.layout.random.nested_structs.1
+dEQP-VK.ssbo.layout.random.nested_structs.2
+dEQP-VK.ssbo.layout.random.nested_structs.3
+dEQP-VK.ssbo.layout.random.nested_structs.4
+dEQP-VK.ssbo.layout.random.nested_structs.5
+dEQP-VK.ssbo.layout.random.nested_structs.6
+dEQP-VK.ssbo.layout.random.nested_structs.7
+dEQP-VK.ssbo.layout.random.nested_structs.8
+dEQP-VK.ssbo.layout.random.nested_structs.9
+dEQP-VK.ssbo.layout.random.nested_structs.10
+dEQP-VK.ssbo.layout.random.nested_structs.11
+dEQP-VK.ssbo.layout.random.nested_structs.12
+dEQP-VK.ssbo.layout.random.nested_structs.13
+dEQP-VK.ssbo.layout.random.nested_structs.14
+dEQP-VK.ssbo.layout.random.nested_structs.15
+dEQP-VK.ssbo.layout.random.nested_structs.16
+dEQP-VK.ssbo.layout.random.nested_structs.17
+dEQP-VK.ssbo.layout.random.nested_structs.18
+dEQP-VK.ssbo.layout.random.nested_structs.19
+dEQP-VK.ssbo.layout.random.nested_structs.20
+dEQP-VK.ssbo.layout.random.nested_structs.21
+dEQP-VK.ssbo.layout.random.nested_structs.22
+dEQP-VK.ssbo.layout.random.nested_structs.23
+dEQP-VK.ssbo.layout.random.nested_structs.24
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.0
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.1
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.2
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.3
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.4
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.5
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.6
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.7
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.8
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.9
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.10
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.11
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.12
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.13
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.14
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.15
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.16
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.17
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.18
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.19
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.20
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.21
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.22
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.23
+dEQP-VK.ssbo.layout.random.nested_structs_arrays.24
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.0
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.1
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.2
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.3
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.4
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.5
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.6
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.7
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.8
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.9
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.10
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.11
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.12
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.13
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.14
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.15
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.16
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.17
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.18
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.19
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.20
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.21
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.22
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.23
+dEQP-VK.ssbo.layout.random.nested_structs_instance_arrays.24
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.0
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.1
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.2
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.3
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.4
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.5
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.6
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.7
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.8
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.9
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.10
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.11
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.12
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.13
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.14
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.15
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.16
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.17
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.18
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.19
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.20
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.21
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.22
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.23
+dEQP-VK.ssbo.layout.random.nested_structs_arrays_instance_arrays.24
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.0
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.1
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.2
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.3
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.4
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.5
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.6
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.7
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.8
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.9
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.10
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.11
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.12
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.13
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.14
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.15
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.16
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.17
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.18
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.19
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.20
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.21
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.22
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.23
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.24
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.25
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.26
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.27
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.28
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.29
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.30
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.31
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.32
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.33
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.34
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.35
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.36
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.37
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.38
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.39
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.40
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.41
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.42
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.43
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.44
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.45
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.46
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.47
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.48
+dEQP-VK.ssbo.layout.random.all_per_block_buffers.49
+dEQP-VK.ssbo.layout.random.all_shared_buffer.0
+dEQP-VK.ssbo.layout.random.all_shared_buffer.1
+dEQP-VK.ssbo.layout.random.all_shared_buffer.2
+dEQP-VK.ssbo.layout.random.all_shared_buffer.3
+dEQP-VK.ssbo.layout.random.all_shared_buffer.4
+dEQP-VK.ssbo.layout.random.all_shared_buffer.5
+dEQP-VK.ssbo.layout.random.all_shared_buffer.6
+dEQP-VK.ssbo.layout.random.all_shared_buffer.7
+dEQP-VK.ssbo.layout.random.all_shared_buffer.8
+dEQP-VK.ssbo.layout.random.all_shared_buffer.9
+dEQP-VK.ssbo.layout.random.all_shared_buffer.10
+dEQP-VK.ssbo.layout.random.all_shared_buffer.11
+dEQP-VK.ssbo.layout.random.all_shared_buffer.12
+dEQP-VK.ssbo.layout.random.all_shared_buffer.13
+dEQP-VK.ssbo.layout.random.all_shared_buffer.14
+dEQP-VK.ssbo.layout.random.all_shared_buffer.15
+dEQP-VK.ssbo.layout.random.all_shared_buffer.16
+dEQP-VK.ssbo.layout.random.all_shared_buffer.17
+dEQP-VK.ssbo.layout.random.all_shared_buffer.18
+dEQP-VK.ssbo.layout.random.all_shared_buffer.19
+dEQP-VK.ssbo.layout.random.all_shared_buffer.20
+dEQP-VK.ssbo.layout.random.all_shared_buffer.21
+dEQP-VK.ssbo.layout.random.all_shared_buffer.22
+dEQP-VK.ssbo.layout.random.all_shared_buffer.23
+dEQP-VK.ssbo.layout.random.all_shared_buffer.24
+dEQP-VK.ssbo.layout.random.all_shared_buffer.25
+dEQP-VK.ssbo.layout.random.all_shared_buffer.26
+dEQP-VK.ssbo.layout.random.all_shared_buffer.27
+dEQP-VK.ssbo.layout.random.all_shared_buffer.28
+dEQP-VK.ssbo.layout.random.all_shared_buffer.29
+dEQP-VK.ssbo.layout.random.all_shared_buffer.30
+dEQP-VK.ssbo.layout.random.all_shared_buffer.31
+dEQP-VK.ssbo.layout.random.all_shared_buffer.32
+dEQP-VK.ssbo.layout.random.all_shared_buffer.33
+dEQP-VK.ssbo.layout.random.all_shared_buffer.34
+dEQP-VK.ssbo.layout.random.all_shared_buffer.35
+dEQP-VK.ssbo.layout.random.all_shared_buffer.36
+dEQP-VK.ssbo.layout.random.all_shared_buffer.37
+dEQP-VK.ssbo.layout.random.all_shared_buffer.38
+dEQP-VK.ssbo.layout.random.all_shared_buffer.39
+dEQP-VK.ssbo.layout.random.all_shared_buffer.40
+dEQP-VK.ssbo.layout.random.all_shared_buffer.41
+dEQP-VK.ssbo.layout.random.all_shared_buffer.42
+dEQP-VK.ssbo.layout.random.all_shared_buffer.43
+dEQP-VK.ssbo.layout.random.all_shared_buffer.44
+dEQP-VK.ssbo.layout.random.all_shared_buffer.45
+dEQP-VK.ssbo.layout.random.all_shared_buffer.46
+dEQP-VK.ssbo.layout.random.all_shared_buffer.47
+dEQP-VK.ssbo.layout.random.all_shared_buffer.48
+dEQP-VK.ssbo.layout.random.all_shared_buffer.49
+dEQP-VK.query_pool.occlusion_query.basic_conservative
+dEQP-VK.query_pool.occlusion_query.basic_precise
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_32_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_32_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_conservative_size_64_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_conservative_size_64_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_queue_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_queue_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_query_without_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_query_with_availability_draw_points
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_32_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_32_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_queue_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_queue_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_precise_size_64_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_query_without_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.copy_results_precise_size_64_wait_query_with_availability_draw_triangles
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_4_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_8_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_12_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_16_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_20_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_52_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_4096_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_8_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_16_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_24_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_32_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_40_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_104_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_8192_without_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_8_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_12_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_16_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_20_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_52_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_32_stride_4096_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_16_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_24_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_32_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_40_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_104_with_availability
+dEQP-VK.query_pool.occlusion_query.get_results_size_64_stride_8192_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_4_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_8_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_12_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_16_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_20_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_52_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_4096_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_8_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_16_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_24_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_32_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_40_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_104_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_8192_without_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_8_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_12_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_16_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_20_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_52_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_32_stride_4096_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_16_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_24_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_32_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_40_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_104_with_availability
+dEQP-VK.query_pool.occlusion_query.copy_results_size_64_stride_8192_with_availability
+dEQP-VK.draw.simple_draw.simple_draw_triangle_list
+dEQP-VK.draw.simple_draw.simple_draw_triangle_strip
+dEQP-VK.draw.simple_draw.simple_draw_instanced_triangle_list
+dEQP-VK.draw.simple_draw.simple_draw_instanced_triangle_strip
+dEQP-VK.draw.indexed_draw.draw_indexed_triangle_list
+dEQP-VK.draw.indexed_draw.draw_indexed_triangle_strip
+dEQP-VK.draw.indexed_draw.draw_instanced_indexed_triangle_list
+dEQP-VK.draw.indexed_draw.draw_instanced_indexed_triangle_strip
+dEQP-VK.draw.indirect_draw.indirect_draw_triangle_list
+dEQP-VK.draw.indirect_draw.indirect_draw_triangle_strip
+dEQP-VK.draw.indirect_draw.indirect_draw_instanced_triangle_list
+dEQP-VK.draw.indirect_draw.indirect_draw_instanced_triangle_strip
+dEQP-VK.compute.basic.empty_shader
+dEQP-VK.compute.basic.ubo_to_ssbo_single_invocation
+dEQP-VK.compute.basic.ubo_to_ssbo_single_group
+dEQP-VK.compute.basic.ubo_to_ssbo_multiple_invocations
+dEQP-VK.compute.basic.ubo_to_ssbo_multiple_groups
+dEQP-VK.compute.basic.copy_ssbo_single_invocation
+dEQP-VK.compute.basic.copy_ssbo_multiple_invocations
+dEQP-VK.compute.basic.copy_ssbo_multiple_groups
+dEQP-VK.compute.basic.ssbo_rw_single_invocation
+dEQP-VK.compute.basic.ssbo_rw_multiple_groups
+dEQP-VK.compute.basic.ssbo_unsized_arr_single_invocation
+dEQP-VK.compute.basic.ssbo_unsized_arr_multiple_groups
+dEQP-VK.compute.basic.write_multiple_arr_single_invocation
+dEQP-VK.compute.basic.write_multiple_arr_multiple_groups
+dEQP-VK.compute.basic.write_multiple_unsized_arr_single_invocation
+dEQP-VK.compute.basic.write_multiple_unsized_arr_multiple_groups
+dEQP-VK.compute.basic.ssbo_local_barrier_single_invocation
+dEQP-VK.compute.basic.ssbo_local_barrier_single_group
+dEQP-VK.compute.basic.ssbo_local_barrier_multiple_groups
+dEQP-VK.compute.basic.ssbo_cmd_barrier_single
+dEQP-VK.compute.basic.ssbo_cmd_barrier_multiple
+dEQP-VK.compute.basic.shared_var_single_invocation
+dEQP-VK.compute.basic.shared_var_single_group
+dEQP-VK.compute.basic.shared_var_multiple_invocations
+dEQP-VK.compute.basic.shared_var_multiple_groups
+dEQP-VK.compute.basic.shared_atomic_op_single_invocation
+dEQP-VK.compute.basic.shared_atomic_op_single_group
+dEQP-VK.compute.basic.shared_atomic_op_multiple_invocations
+dEQP-VK.compute.basic.shared_atomic_op_multiple_groups
+dEQP-VK.compute.basic.copy_image_to_ssbo_small
+dEQP-VK.compute.basic.copy_image_to_ssbo_large
+dEQP-VK.compute.basic.copy_ssbo_to_image_small
+dEQP-VK.compute.basic.copy_ssbo_to_image_large
+dEQP-VK.compute.basic.image_atomic_op_local_size_1
+dEQP-VK.compute.basic.image_atomic_op_local_size_8
+dEQP-VK.compute.basic.image_barrier_single
+dEQP-VK.compute.basic.image_barrier_multiple
+dEQP-VK.compute.indirect_dispatch.upload_buffer.single_invocation
+dEQP-VK.compute.indirect_dispatch.upload_buffer.multiple_groups
+dEQP-VK.compute.indirect_dispatch.upload_buffer.multiple_groups_multiple_invocations
+dEQP-VK.compute.indirect_dispatch.upload_buffer.small_offset
+dEQP-VK.compute.indirect_dispatch.upload_buffer.large_offset
+dEQP-VK.compute.indirect_dispatch.upload_buffer.large_offset_multiple_invocations
+dEQP-VK.compute.indirect_dispatch.upload_buffer.empty_command
+dEQP-VK.compute.indirect_dispatch.upload_buffer.multi_dispatch
+dEQP-VK.compute.indirect_dispatch.upload_buffer.multi_dispatch_reuse_command
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.single_invocation
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.multiple_groups
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.multiple_groups_multiple_invocations
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.small_offset
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.large_offset
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.large_offset_multiple_invocations
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.empty_command
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.multi_dispatch
+dEQP-VK.compute.indirect_dispatch.gen_in_compute.multi_dispatch_reuse_command
+dEQP-VK.compute.builtin_var.num_work_groups
+dEQP-VK.compute.builtin_var.work_group_size
+dEQP-VK.compute.builtin_var.work_group_id
+dEQP-VK.compute.builtin_var.local_invocation_id
+dEQP-VK.compute.builtin_var.global_invocation_id
+dEQP-VK.compute.builtin_var.local_invocation_index
+dEQP-VK.image.store.1d.r32g32b32a32_sfloat
+dEQP-VK.image.store.1d.r16g16b16a16_sfloat
+dEQP-VK.image.store.1d.r32_sfloat
+dEQP-VK.image.store.1d.r32g32b32a32_uint
+dEQP-VK.image.store.1d.r16g16b16a16_uint
+dEQP-VK.image.store.1d.r8g8b8a8_uint
+dEQP-VK.image.store.1d.r32_uint
+dEQP-VK.image.store.1d.r32g32b32a32_sint
+dEQP-VK.image.store.1d.r16g16b16a16_sint
+dEQP-VK.image.store.1d.r8g8b8a8_sint
+dEQP-VK.image.store.1d.r32_sint
+dEQP-VK.image.store.1d.r8g8b8a8_unorm
+dEQP-VK.image.store.1d.r8g8b8a8_snorm
+dEQP-VK.image.store.1d_array.r32g32b32a32_sfloat
+dEQP-VK.image.store.1d_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.store.1d_array.r16g16b16a16_sfloat
+dEQP-VK.image.store.1d_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.store.1d_array.r32_sfloat
+dEQP-VK.image.store.1d_array.r32_sfloat_single_layer
+dEQP-VK.image.store.1d_array.r32g32b32a32_uint
+dEQP-VK.image.store.1d_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.store.1d_array.r16g16b16a16_uint
+dEQP-VK.image.store.1d_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.store.1d_array.r8g8b8a8_uint
+dEQP-VK.image.store.1d_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.store.1d_array.r32_uint
+dEQP-VK.image.store.1d_array.r32_uint_single_layer
+dEQP-VK.image.store.1d_array.r32g32b32a32_sint
+dEQP-VK.image.store.1d_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.store.1d_array.r16g16b16a16_sint
+dEQP-VK.image.store.1d_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.store.1d_array.r8g8b8a8_sint
+dEQP-VK.image.store.1d_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.store.1d_array.r32_sint
+dEQP-VK.image.store.1d_array.r32_sint_single_layer
+dEQP-VK.image.store.1d_array.r8g8b8a8_unorm
+dEQP-VK.image.store.1d_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.store.1d_array.r8g8b8a8_snorm
+dEQP-VK.image.store.1d_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.store.2d.r32g32b32a32_sfloat
+dEQP-VK.image.store.2d.r16g16b16a16_sfloat
+dEQP-VK.image.store.2d.r32_sfloat
+dEQP-VK.image.store.2d.r32g32b32a32_uint
+dEQP-VK.image.store.2d.r16g16b16a16_uint
+dEQP-VK.image.store.2d.r8g8b8a8_uint
+dEQP-VK.image.store.2d.r32_uint
+dEQP-VK.image.store.2d.r32g32b32a32_sint
+dEQP-VK.image.store.2d.r16g16b16a16_sint
+dEQP-VK.image.store.2d.r8g8b8a8_sint
+dEQP-VK.image.store.2d.r32_sint
+dEQP-VK.image.store.2d.r8g8b8a8_unorm
+dEQP-VK.image.store.2d.r8g8b8a8_snorm
+dEQP-VK.image.store.2d_array.r32g32b32a32_sfloat
+dEQP-VK.image.store.2d_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.store.2d_array.r16g16b16a16_sfloat
+dEQP-VK.image.store.2d_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.store.2d_array.r32_sfloat
+dEQP-VK.image.store.2d_array.r32_sfloat_single_layer
+dEQP-VK.image.store.2d_array.r32g32b32a32_uint
+dEQP-VK.image.store.2d_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.store.2d_array.r16g16b16a16_uint
+dEQP-VK.image.store.2d_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.store.2d_array.r8g8b8a8_uint
+dEQP-VK.image.store.2d_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.store.2d_array.r32_uint
+dEQP-VK.image.store.2d_array.r32_uint_single_layer
+dEQP-VK.image.store.2d_array.r32g32b32a32_sint
+dEQP-VK.image.store.2d_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.store.2d_array.r16g16b16a16_sint
+dEQP-VK.image.store.2d_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.store.2d_array.r8g8b8a8_sint
+dEQP-VK.image.store.2d_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.store.2d_array.r32_sint
+dEQP-VK.image.store.2d_array.r32_sint_single_layer
+dEQP-VK.image.store.2d_array.r8g8b8a8_unorm
+dEQP-VK.image.store.2d_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.store.2d_array.r8g8b8a8_snorm
+dEQP-VK.image.store.2d_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.store.3d.r32g32b32a32_sfloat
+dEQP-VK.image.store.3d.r16g16b16a16_sfloat
+dEQP-VK.image.store.3d.r32_sfloat
+dEQP-VK.image.store.3d.r32g32b32a32_uint
+dEQP-VK.image.store.3d.r16g16b16a16_uint
+dEQP-VK.image.store.3d.r8g8b8a8_uint
+dEQP-VK.image.store.3d.r32_uint
+dEQP-VK.image.store.3d.r32g32b32a32_sint
+dEQP-VK.image.store.3d.r16g16b16a16_sint
+dEQP-VK.image.store.3d.r8g8b8a8_sint
+dEQP-VK.image.store.3d.r32_sint
+dEQP-VK.image.store.3d.r8g8b8a8_unorm
+dEQP-VK.image.store.3d.r8g8b8a8_snorm
+dEQP-VK.image.store.cube.r32g32b32a32_sfloat
+dEQP-VK.image.store.cube.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.store.cube.r16g16b16a16_sfloat
+dEQP-VK.image.store.cube.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.store.cube.r32_sfloat
+dEQP-VK.image.store.cube.r32_sfloat_single_layer
+dEQP-VK.image.store.cube.r32g32b32a32_uint
+dEQP-VK.image.store.cube.r32g32b32a32_uint_single_layer
+dEQP-VK.image.store.cube.r16g16b16a16_uint
+dEQP-VK.image.store.cube.r16g16b16a16_uint_single_layer
+dEQP-VK.image.store.cube.r8g8b8a8_uint
+dEQP-VK.image.store.cube.r8g8b8a8_uint_single_layer
+dEQP-VK.image.store.cube.r32_uint
+dEQP-VK.image.store.cube.r32_uint_single_layer
+dEQP-VK.image.store.cube.r32g32b32a32_sint
+dEQP-VK.image.store.cube.r32g32b32a32_sint_single_layer
+dEQP-VK.image.store.cube.r16g16b16a16_sint
+dEQP-VK.image.store.cube.r16g16b16a16_sint_single_layer
+dEQP-VK.image.store.cube.r8g8b8a8_sint
+dEQP-VK.image.store.cube.r8g8b8a8_sint_single_layer
+dEQP-VK.image.store.cube.r32_sint
+dEQP-VK.image.store.cube.r32_sint_single_layer
+dEQP-VK.image.store.cube.r8g8b8a8_unorm
+dEQP-VK.image.store.cube.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.store.cube.r8g8b8a8_snorm
+dEQP-VK.image.store.cube.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.store.cube_array.r32g32b32a32_sfloat
+dEQP-VK.image.store.cube_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.store.cube_array.r16g16b16a16_sfloat
+dEQP-VK.image.store.cube_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.store.cube_array.r32_sfloat
+dEQP-VK.image.store.cube_array.r32_sfloat_single_layer
+dEQP-VK.image.store.cube_array.r32g32b32a32_uint
+dEQP-VK.image.store.cube_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.store.cube_array.r16g16b16a16_uint
+dEQP-VK.image.store.cube_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.store.cube_array.r8g8b8a8_uint
+dEQP-VK.image.store.cube_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.store.cube_array.r32_uint
+dEQP-VK.image.store.cube_array.r32_uint_single_layer
+dEQP-VK.image.store.cube_array.r32g32b32a32_sint
+dEQP-VK.image.store.cube_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.store.cube_array.r16g16b16a16_sint
+dEQP-VK.image.store.cube_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.store.cube_array.r8g8b8a8_sint
+dEQP-VK.image.store.cube_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.store.cube_array.r32_sint
+dEQP-VK.image.store.cube_array.r32_sint_single_layer
+dEQP-VK.image.store.cube_array.r8g8b8a8_unorm
+dEQP-VK.image.store.cube_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.store.cube_array.r8g8b8a8_snorm
+dEQP-VK.image.store.cube_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.store.buffer.r32g32b32a32_sfloat
+dEQP-VK.image.store.buffer.r16g16b16a16_sfloat
+dEQP-VK.image.store.buffer.r32_sfloat
+dEQP-VK.image.store.buffer.r32g32b32a32_uint
+dEQP-VK.image.store.buffer.r16g16b16a16_uint
+dEQP-VK.image.store.buffer.r8g8b8a8_uint
+dEQP-VK.image.store.buffer.r32_uint
+dEQP-VK.image.store.buffer.r32g32b32a32_sint
+dEQP-VK.image.store.buffer.r16g16b16a16_sint
+dEQP-VK.image.store.buffer.r8g8b8a8_sint
+dEQP-VK.image.store.buffer.r32_sint
+dEQP-VK.image.store.buffer.r8g8b8a8_unorm
+dEQP-VK.image.store.buffer.r8g8b8a8_snorm
+dEQP-VK.image.load_store.1d.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.1d.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.1d.r32_sfloat
+dEQP-VK.image.load_store.1d.r32g32b32a32_uint
+dEQP-VK.image.load_store.1d.r16g16b16a16_uint
+dEQP-VK.image.load_store.1d.r8g8b8a8_uint
+dEQP-VK.image.load_store.1d.r32_uint
+dEQP-VK.image.load_store.1d.r32g32b32a32_sint
+dEQP-VK.image.load_store.1d.r16g16b16a16_sint
+dEQP-VK.image.load_store.1d.r8g8b8a8_sint
+dEQP-VK.image.load_store.1d.r32_sint
+dEQP-VK.image.load_store.1d.r8g8b8a8_unorm
+dEQP-VK.image.load_store.1d.r8g8b8a8_snorm
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.load_store.1d_array.r32_sfloat
+dEQP-VK.image.load_store.1d_array.r32_sfloat_single_layer
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_uint
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_uint
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_uint
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.load_store.1d_array.r32_uint
+dEQP-VK.image.load_store.1d_array.r32_uint_single_layer
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_sint
+dEQP-VK.image.load_store.1d_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_sint
+dEQP-VK.image.load_store.1d_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_sint
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.load_store.1d_array.r32_sint
+dEQP-VK.image.load_store.1d_array.r32_sint_single_layer
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_unorm
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_snorm
+dEQP-VK.image.load_store.1d_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.load_store.2d.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.2d.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.2d.r32_sfloat
+dEQP-VK.image.load_store.2d.r32g32b32a32_uint
+dEQP-VK.image.load_store.2d.r16g16b16a16_uint
+dEQP-VK.image.load_store.2d.r8g8b8a8_uint
+dEQP-VK.image.load_store.2d.r32_uint
+dEQP-VK.image.load_store.2d.r32g32b32a32_sint
+dEQP-VK.image.load_store.2d.r16g16b16a16_sint
+dEQP-VK.image.load_store.2d.r8g8b8a8_sint
+dEQP-VK.image.load_store.2d.r32_sint
+dEQP-VK.image.load_store.2d.r8g8b8a8_unorm
+dEQP-VK.image.load_store.2d.r8g8b8a8_snorm
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.load_store.2d_array.r32_sfloat
+dEQP-VK.image.load_store.2d_array.r32_sfloat_single_layer
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_uint
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_uint
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_uint
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.load_store.2d_array.r32_uint
+dEQP-VK.image.load_store.2d_array.r32_uint_single_layer
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_sint
+dEQP-VK.image.load_store.2d_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_sint
+dEQP-VK.image.load_store.2d_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_sint
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.load_store.2d_array.r32_sint
+dEQP-VK.image.load_store.2d_array.r32_sint_single_layer
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_unorm
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_snorm
+dEQP-VK.image.load_store.2d_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.load_store.3d.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.3d.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.3d.r32_sfloat
+dEQP-VK.image.load_store.3d.r32g32b32a32_uint
+dEQP-VK.image.load_store.3d.r16g16b16a16_uint
+dEQP-VK.image.load_store.3d.r8g8b8a8_uint
+dEQP-VK.image.load_store.3d.r32_uint
+dEQP-VK.image.load_store.3d.r32g32b32a32_sint
+dEQP-VK.image.load_store.3d.r16g16b16a16_sint
+dEQP-VK.image.load_store.3d.r8g8b8a8_sint
+dEQP-VK.image.load_store.3d.r32_sint
+dEQP-VK.image.load_store.3d.r8g8b8a8_unorm
+dEQP-VK.image.load_store.3d.r8g8b8a8_snorm
+dEQP-VK.image.load_store.cube.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.cube.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.load_store.cube.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.cube.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.load_store.cube.r32_sfloat
+dEQP-VK.image.load_store.cube.r32_sfloat_single_layer
+dEQP-VK.image.load_store.cube.r32g32b32a32_uint
+dEQP-VK.image.load_store.cube.r32g32b32a32_uint_single_layer
+dEQP-VK.image.load_store.cube.r16g16b16a16_uint
+dEQP-VK.image.load_store.cube.r16g16b16a16_uint_single_layer
+dEQP-VK.image.load_store.cube.r8g8b8a8_uint
+dEQP-VK.image.load_store.cube.r8g8b8a8_uint_single_layer
+dEQP-VK.image.load_store.cube.r32_uint
+dEQP-VK.image.load_store.cube.r32_uint_single_layer
+dEQP-VK.image.load_store.cube.r32g32b32a32_sint
+dEQP-VK.image.load_store.cube.r32g32b32a32_sint_single_layer
+dEQP-VK.image.load_store.cube.r16g16b16a16_sint
+dEQP-VK.image.load_store.cube.r16g16b16a16_sint_single_layer
+dEQP-VK.image.load_store.cube.r8g8b8a8_sint
+dEQP-VK.image.load_store.cube.r8g8b8a8_sint_single_layer
+dEQP-VK.image.load_store.cube.r32_sint
+dEQP-VK.image.load_store.cube.r32_sint_single_layer
+dEQP-VK.image.load_store.cube.r8g8b8a8_unorm
+dEQP-VK.image.load_store.cube.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.load_store.cube.r8g8b8a8_snorm
+dEQP-VK.image.load_store.cube.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_sfloat_single_layer
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_sfloat_single_layer
+dEQP-VK.image.load_store.cube_array.r32_sfloat
+dEQP-VK.image.load_store.cube_array.r32_sfloat_single_layer
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_uint
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_uint_single_layer
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_uint
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_uint_single_layer
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_uint
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_uint_single_layer
+dEQP-VK.image.load_store.cube_array.r32_uint
+dEQP-VK.image.load_store.cube_array.r32_uint_single_layer
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_sint
+dEQP-VK.image.load_store.cube_array.r32g32b32a32_sint_single_layer
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_sint
+dEQP-VK.image.load_store.cube_array.r16g16b16a16_sint_single_layer
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_sint
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_sint_single_layer
+dEQP-VK.image.load_store.cube_array.r32_sint
+dEQP-VK.image.load_store.cube_array.r32_sint_single_layer
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_unorm
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_unorm_single_layer
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_snorm
+dEQP-VK.image.load_store.cube_array.r8g8b8a8_snorm_single_layer
+dEQP-VK.image.load_store.buffer.r32g32b32a32_sfloat
+dEQP-VK.image.load_store.buffer.r16g16b16a16_sfloat
+dEQP-VK.image.load_store.buffer.r32_sfloat
+dEQP-VK.image.load_store.buffer.r32g32b32a32_uint
+dEQP-VK.image.load_store.buffer.r16g16b16a16_uint
+dEQP-VK.image.load_store.buffer.r8g8b8a8_uint
+dEQP-VK.image.load_store.buffer.r32_uint
+dEQP-VK.image.load_store.buffer.r32g32b32a32_sint
+dEQP-VK.image.load_store.buffer.r16g16b16a16_sint
+dEQP-VK.image.load_store.buffer.r8g8b8a8_sint
+dEQP-VK.image.load_store.buffer.r32_sint
+dEQP-VK.image.load_store.buffer.r8g8b8a8_unorm
+dEQP-VK.image.load_store.buffer.r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.1d.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.1d.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.1d_array.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.1d_array.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.2d.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.2d.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.2d_array.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.2d_array.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.3d.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.3d.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.3d.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.cube.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.cube.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.cube_array.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.cube_array.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_sfloat_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_sfloat_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_sfloat_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_sfloat_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r32_sfloat_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_uint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_uint_r32g32b32a32_sint
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_uint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_uint_r16g16b16a16_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r32_uint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_sint_r32g32b32a32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r32g32b32a32_sint_r32g32b32a32_uint
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_sint_r16g16b16a16_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r16g16b16a16_sint_r16g16b16a16_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r8g8b8a8_unorm
+dEQP-VK.image.format_reinterpret.buffer.r32_sint_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_unorm_r8g8b8a8_snorm
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r32_sfloat
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r8g8b8a8_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r32_uint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r8g8b8a8_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r32_sint
+dEQP-VK.image.format_reinterpret.buffer.r8g8b8a8_snorm_r8g8b8a8_unorm
+dEQP-VK.image.qualifiers.coherent.1d_r32f
+dEQP-VK.image.qualifiers.coherent.1d_r32ui
+dEQP-VK.image.qualifiers.coherent.1d_r32i
+dEQP-VK.image.qualifiers.coherent.1d_array_r32f
+dEQP-VK.image.qualifiers.coherent.1d_array_r32ui
+dEQP-VK.image.qualifiers.coherent.1d_array_r32i
+dEQP-VK.image.qualifiers.coherent.2d_r32f
+dEQP-VK.image.qualifiers.coherent.2d_r32ui
+dEQP-VK.image.qualifiers.coherent.2d_r32i
+dEQP-VK.image.qualifiers.coherent.2d_array_r32f
+dEQP-VK.image.qualifiers.coherent.2d_array_r32ui
+dEQP-VK.image.qualifiers.coherent.2d_array_r32i
+dEQP-VK.image.qualifiers.coherent.3d_r32f
+dEQP-VK.image.qualifiers.coherent.3d_r32ui
+dEQP-VK.image.qualifiers.coherent.3d_r32i
+dEQP-VK.image.qualifiers.coherent.cube_r32f
+dEQP-VK.image.qualifiers.coherent.cube_r32ui
+dEQP-VK.image.qualifiers.coherent.cube_r32i
+dEQP-VK.image.qualifiers.coherent.cube_array_r32f
+dEQP-VK.image.qualifiers.coherent.cube_array_r32ui
+dEQP-VK.image.qualifiers.coherent.cube_array_r32i
+dEQP-VK.image.qualifiers.coherent.buffer_r32f
+dEQP-VK.image.qualifiers.coherent.buffer_r32ui
+dEQP-VK.image.qualifiers.coherent.buffer_r32i
+dEQP-VK.image.qualifiers.volatile.1d_r32f
+dEQP-VK.image.qualifiers.volatile.1d_r32ui
+dEQP-VK.image.qualifiers.volatile.1d_r32i
+dEQP-VK.image.qualifiers.volatile.1d_array_r32f
+dEQP-VK.image.qualifiers.volatile.1d_array_r32ui
+dEQP-VK.image.qualifiers.volatile.1d_array_r32i
+dEQP-VK.image.qualifiers.volatile.2d_r32f
+dEQP-VK.image.qualifiers.volatile.2d_r32ui
+dEQP-VK.image.qualifiers.volatile.2d_r32i
+dEQP-VK.image.qualifiers.volatile.2d_array_r32f
+dEQP-VK.image.qualifiers.volatile.2d_array_r32ui
+dEQP-VK.image.qualifiers.volatile.2d_array_r32i
+dEQP-VK.image.qualifiers.volatile.3d_r32f
+dEQP-VK.image.qualifiers.volatile.3d_r32ui
+dEQP-VK.image.qualifiers.volatile.3d_r32i
+dEQP-VK.image.qualifiers.volatile.cube_r32f
+dEQP-VK.image.qualifiers.volatile.cube_r32ui
+dEQP-VK.image.qualifiers.volatile.cube_r32i
+dEQP-VK.image.qualifiers.volatile.cube_array_r32f
+dEQP-VK.image.qualifiers.volatile.cube_array_r32ui
+dEQP-VK.image.qualifiers.volatile.cube_array_r32i
+dEQP-VK.image.qualifiers.volatile.buffer_r32f
+dEQP-VK.image.qualifiers.volatile.buffer_r32ui
+dEQP-VK.image.qualifiers.volatile.buffer_r32i
+dEQP-VK.image.qualifiers.restrict.1d
+dEQP-VK.image.qualifiers.restrict.1d_array
+dEQP-VK.image.qualifiers.restrict.2d
+dEQP-VK.image.qualifiers.restrict.2d_array
+dEQP-VK.image.qualifiers.restrict.3d
+dEQP-VK.image.qualifiers.restrict.cube
+dEQP-VK.image.qualifiers.restrict.cube_array
+dEQP-VK.image.qualifiers.restrict.buffer
+dEQP-VK.image.image_size.1d.readonly_32
+dEQP-VK.image.image_size.1d.readonly_12
+dEQP-VK.image.image_size.1d.readonly_1
+dEQP-VK.image.image_size.1d.readonly_7
+dEQP-VK.image.image_size.1d.writeonly_32
+dEQP-VK.image.image_size.1d.writeonly_12
+dEQP-VK.image.image_size.1d.writeonly_1
+dEQP-VK.image.image_size.1d.writeonly_7
+dEQP-VK.image.image_size.1d.readonly_writeonly_32
+dEQP-VK.image.image_size.1d.readonly_writeonly_12
+dEQP-VK.image.image_size.1d.readonly_writeonly_1
+dEQP-VK.image.image_size.1d.readonly_writeonly_7
+dEQP-VK.image.image_size.1d_array.readonly_32x32
+dEQP-VK.image.image_size.1d_array.readonly_12x34
+dEQP-VK.image.image_size.1d_array.readonly_1x1
+dEQP-VK.image.image_size.1d_array.readonly_7x1
+dEQP-VK.image.image_size.1d_array.writeonly_32x32
+dEQP-VK.image.image_size.1d_array.writeonly_12x34
+dEQP-VK.image.image_size.1d_array.writeonly_1x1
+dEQP-VK.image.image_size.1d_array.writeonly_7x1
+dEQP-VK.image.image_size.1d_array.readonly_writeonly_32x32
+dEQP-VK.image.image_size.1d_array.readonly_writeonly_12x34
+dEQP-VK.image.image_size.1d_array.readonly_writeonly_1x1
+dEQP-VK.image.image_size.1d_array.readonly_writeonly_7x1
+dEQP-VK.image.image_size.2d.readonly_32x32
+dEQP-VK.image.image_size.2d.readonly_12x34
+dEQP-VK.image.image_size.2d.readonly_1x1
+dEQP-VK.image.image_size.2d.readonly_7x1
+dEQP-VK.image.image_size.2d.writeonly_32x32
+dEQP-VK.image.image_size.2d.writeonly_12x34
+dEQP-VK.image.image_size.2d.writeonly_1x1
+dEQP-VK.image.image_size.2d.writeonly_7x1
+dEQP-VK.image.image_size.2d.readonly_writeonly_32x32
+dEQP-VK.image.image_size.2d.readonly_writeonly_12x34
+dEQP-VK.image.image_size.2d.readonly_writeonly_1x1
+dEQP-VK.image.image_size.2d.readonly_writeonly_7x1
+dEQP-VK.image.image_size.2d_array.readonly_32x32x32
+dEQP-VK.image.image_size.2d_array.readonly_12x34x56
+dEQP-VK.image.image_size.2d_array.readonly_1x1x1
+dEQP-VK.image.image_size.2d_array.readonly_7x1x1
+dEQP-VK.image.image_size.2d_array.writeonly_32x32x32
+dEQP-VK.image.image_size.2d_array.writeonly_12x34x56
+dEQP-VK.image.image_size.2d_array.writeonly_1x1x1
+dEQP-VK.image.image_size.2d_array.writeonly_7x1x1
+dEQP-VK.image.image_size.2d_array.readonly_writeonly_32x32x32
+dEQP-VK.image.image_size.2d_array.readonly_writeonly_12x34x56
+dEQP-VK.image.image_size.2d_array.readonly_writeonly_1x1x1
+dEQP-VK.image.image_size.2d_array.readonly_writeonly_7x1x1
+dEQP-VK.image.image_size.3d.readonly_32x32x32
+dEQP-VK.image.image_size.3d.readonly_12x34x56
+dEQP-VK.image.image_size.3d.readonly_1x1x1
+dEQP-VK.image.image_size.3d.readonly_7x1x1
+dEQP-VK.image.image_size.3d.writeonly_32x32x32
+dEQP-VK.image.image_size.3d.writeonly_12x34x56
+dEQP-VK.image.image_size.3d.writeonly_1x1x1
+dEQP-VK.image.image_size.3d.writeonly_7x1x1
+dEQP-VK.image.image_size.3d.readonly_writeonly_32x32x32
+dEQP-VK.image.image_size.3d.readonly_writeonly_12x34x56
+dEQP-VK.image.image_size.3d.readonly_writeonly_1x1x1
+dEQP-VK.image.image_size.3d.readonly_writeonly_7x1x1
+dEQP-VK.image.image_size.cube.readonly_32x32x6
+dEQP-VK.image.image_size.cube.readonly_12x12x6
+dEQP-VK.image.image_size.cube.readonly_1x1x6
+dEQP-VK.image.image_size.cube.readonly_7x7x6
+dEQP-VK.image.image_size.cube.writeonly_32x32x6
+dEQP-VK.image.image_size.cube.writeonly_12x12x6
+dEQP-VK.image.image_size.cube.writeonly_1x1x6
+dEQP-VK.image.image_size.cube.writeonly_7x7x6
+dEQP-VK.image.image_size.cube.readonly_writeonly_32x32x6
+dEQP-VK.image.image_size.cube.readonly_writeonly_12x12x6
+dEQP-VK.image.image_size.cube.readonly_writeonly_1x1x6
+dEQP-VK.image.image_size.cube.readonly_writeonly_7x7x6
+dEQP-VK.image.image_size.cube_array.readonly_32x32x12
+dEQP-VK.image.image_size.cube_array.readonly_12x12x12
+dEQP-VK.image.image_size.cube_array.readonly_1x1x12
+dEQP-VK.image.image_size.cube_array.readonly_7x7x12
+dEQP-VK.image.image_size.cube_array.writeonly_32x32x12
+dEQP-VK.image.image_size.cube_array.writeonly_12x12x12
+dEQP-VK.image.image_size.cube_array.writeonly_1x1x12
+dEQP-VK.image.image_size.cube_array.writeonly_7x7x12
+dEQP-VK.image.image_size.cube_array.readonly_writeonly_32x32x12
+dEQP-VK.image.image_size.cube_array.readonly_writeonly_12x12x12
+dEQP-VK.image.image_size.cube_array.readonly_writeonly_1x1x12
+dEQP-VK.image.image_size.cube_array.readonly_writeonly_7x7x12
+dEQP-VK.image.image_size.buffer.readonly_32
+dEQP-VK.image.image_size.buffer.readonly_12
+dEQP-VK.image.image_size.buffer.readonly_1
+dEQP-VK.image.image_size.buffer.readonly_7
+dEQP-VK.image.image_size.buffer.writeonly_32
+dEQP-VK.image.image_size.buffer.writeonly_12
+dEQP-VK.image.image_size.buffer.writeonly_1
+dEQP-VK.image.image_size.buffer.writeonly_7
+dEQP-VK.image.image_size.buffer.readonly_writeonly_32
+dEQP-VK.image.image_size.buffer.readonly_writeonly_12
+dEQP-VK.image.image_size.buffer.readonly_writeonly_1
+dEQP-VK.image.image_size.buffer.readonly_writeonly_7
diff --git a/external/vulkancts/verify_submission.py b/external/vulkancts/verify_submission.py
new file mode 100644
index 0000000..418eeee
--- /dev/null
+++ b/external/vulkancts/verify_submission.py
@@ -0,0 +1,312 @@
+# -*- coding: utf-8 -*-
+
+#-------------------------------------------------------------------------
+# Vulkan CTS
+# ----------
+#
+# Copyright (c) 2016 Google Inc.
+#
+# Permission is hereby granted, free of charge, to any person obtaining a
+# copy of this software and/or associated documentation files (the
+# "Materials"), to deal in the Materials without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Materials, and to
+# permit persons to whom the Materials are furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice(s) and this permission notice shall be
+# included in all copies or substantial portions of the Materials.
+#
+# The Materials are Confidential Information as defined by the
+# Khronos Membership Agreement until designated non-confidential by
+# Khronos, at which point this condition clause shall be removed.
+#
+# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+#
+#-------------------------------------------------------------------------
+
+import os
+import re
+import sys
+
+from fnmatch import fnmatch
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts"))
+sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "scripts", "log"))
+
+from build.common import readFile
+from log_parser import StatusCode, BatchResultParser
+
+ALLOWED_STATUS_CODES = set([
+		StatusCode.PASS,
+		StatusCode.NOT_SUPPORTED,
+		StatusCode.QUALITY_WARNING,
+		StatusCode.COMPATIBILITY_WARNING
+	])
+
+STATEMENT_PATTERN	= "STATEMENT-*"
+TEST_LOG_PATTERN	= "*.qpa"
+GIT_STATUS_PATTERN	= "git-status.txt"
+GIT_LOG_PATTERN		= "git-log.txt"
+PATCH_PATTERN		= "*.patch"
+
+class PackageDescription:
+	def __init__ (self, basePath, statement, testLogs, gitStatus, gitLog, patches, otherItems):
+		self.basePath		= basePath
+		self.statement		= statement
+		self.testLogs		= testLogs
+		self.gitStatus		= gitStatus
+		self.gitLog			= gitLog
+		self.patches		= patches
+		self.otherItems		= otherItems
+
+class ValidationMessage:
+	TYPE_ERROR		= 0
+	TYPE_WARNING	= 1
+
+	def __init__ (self, type, filename, message):
+		self.type		= type
+		self.filename	= filename
+		self.message	= message
+
+	def __str__ (self):
+		prefix = {self.TYPE_ERROR: "ERROR: ", self.TYPE_WARNING: "WARNING: "}
+		return prefix[self.type] + os.path.basename(self.filename) + ": " + self.message
+
+def error (filename, message):
+	return ValidationMessage(ValidationMessage.TYPE_ERROR, filename, message)
+
+def warning (filename, message):
+	return ValidationMessage(ValidationMessage.TYPE_WARNING, filename, message)
+
+def getPackageDescription (packagePath):
+	allItems	= os.listdir(packagePath)
+	statement	= None
+	testLogs	= []
+	gitStatus	= None
+	gitLog		= None
+	patches		= []
+	otherItems	= []
+
+	for item in allItems:
+		if fnmatch(item, STATEMENT_PATTERN):
+			assert statement == None
+			statement = item
+		elif fnmatch(item, TEST_LOG_PATTERN):
+			testLogs.append(item)
+		elif fnmatch(item, GIT_STATUS_PATTERN):
+			assert gitStatus == None
+			gitStatus = item
+		elif fnmatch(item, GIT_LOG_PATTERN):
+			assert gitLog == None
+			gitLog = item
+		elif fnmatch(item, PATCH_PATTERN):
+			patches.append(item)
+		else:
+			otherItems.append(item)
+
+	return PackageDescription(packagePath, statement, testLogs, gitStatus, gitLog, patches, otherItems)
+
+def readMustpass (filename):
+	f = open(filename, 'rb')
+	cases = []
+	for line in f:
+		s = line.strip()
+		if len(s) > 0:
+			cases.append(s)
+	return cases
+
+def readTestLog (filename):
+	parser = BatchResultParser()
+	return parser.parseFile(filename)
+
+def verifyTestLog (filename, mustpass):
+	results			= readTestLog(filename)
+	messages			= []
+	resultOrderOk	= True
+
+	# Mustpass case names must be unique
+	assert len(mustpass) == len(set(mustpass))
+
+	# Verify number of results
+	if len(results) != len(mustpass):
+		messages.append(error(filename, "Wrong number of test results, expected %d, found %d" % (len(mustpass), len(results))))
+
+	caseNameToResultNdx = {}
+	for ndx in xrange(len(results)):
+		result = results[ndx]
+		if not result in caseNameToResultNdx:
+			caseNameToResultNdx[result.name] = ndx
+		else:
+			messages.append(error(filename, "Multiple results for " + result.name))
+
+	# Verify that all results are present and valid
+	for ndx in xrange(len(mustpass)):
+		caseName = mustpass[ndx]
+
+		if caseName in caseNameToResultNdx:
+			resultNdx	= caseNameToResultNdx[caseName]
+			result		= results[resultNdx]
+
+			if resultNdx != ndx:
+				resultOrderOk = False
+
+			if not result.statusCode in ALLOWED_STATUS_CODES:
+				messages.append(error(filename, result.name + ": " + result.statusCode))
+		else:
+			messages.append(error(filename, "Missing result for " + caseName))
+
+	if len(results) == len(mustpass) and not resultOrderOk:
+		messages.append(error(filename, "Results are not in the expected order"))
+
+	return messages
+
+def beginsWith (str, prefix):
+	return str[:len(prefix)] == prefix
+
+def verifyStatement (package):
+	messages	= []
+
+	if package.statement != None:
+		statementPath	= os.path.join(package.basePath, package.statement)
+		statement		= readFile(statementPath)
+		hasVersion		= False
+		hasProduct		= False
+		hasCpu			= False
+		hasOs			= False
+
+		for line in statement.splitlines():
+			if beginsWith(line, "CONFORM_VERSION:"):
+				if hasVersion:
+					messages.append(error(statementPath, "Multiple CONFORM_VERSIONs"))
+				else:
+					hasVersion = True
+			elif beginsWith(line, "PRODUCT:"):
+				hasProduct = True # Multiple products allowed
+			elif beginsWith(line, "CPU:"):
+				if hasCpu:
+					messages.append(error(statementPath, "Multiple PRODUCTs"))
+				else:
+					hasCpu = True
+			elif beginsWith(line, "OS:"):
+				if hasOs:
+					messages.append(error(statementPath, "Multiple OSes"))
+				else:
+					hasOs = True
+
+		if not hasVersion:
+			messages.append(error(statementPath, "No CONFORM_VERSION"))
+		if not hasProduct:
+			messages.append(error(statementPath, "No PRODUCT"))
+		if not hasCpu:
+			messages.append(error(statementPath, "No CPU"))
+		if not hasOs:
+			messages.append(error(statementPath, "No OS"))
+	else:
+		messages.append(error(package.basePath, "Missing conformance statement file"))
+
+	return messages
+
+def verifyGitStatus (package):
+	messages = []
+
+	if package.gitStatus != None:
+		statusPath	= os.path.join(package.basePath, package.gitStatus)
+		status		= readFile(statusPath)
+
+		if status.find("nothing to commit, working directory clean") < 0:
+			messages.append(error(package.basePath, "Working directory is not clean"))
+	else:
+		messages.append(error(package.basePath, "Missing git-status.txt"))
+
+	return messages
+
+def isGitLogEmpty (package):
+	assert package.gitLog != None
+
+	logPath	= os.path.join(package.basePath, package.gitLog)
+	log		= readFile(logPath)
+
+	return len(log.strip()) == 0
+
+def verifyGitLog (package):
+	messages = []
+
+	if package.gitLog != None:
+		if not isGitLogEmpty(package):
+			messages.append(warning(os.path.join(package.basePath, package.gitLog), "Log is not empty"))
+	else:
+		messages.append(error(package.basePath, "Missing git-log.txt"))
+
+	return messages
+
+def verifyPatches (package):
+	messages	= []
+	hasPatches	= len(package.patches)
+	logEmpty	= package.gitLog and isGitLogEmpty(package)
+
+	if hasPatches and logEmpty:
+		messages.append(error(package.basePath, "Package includes patches but log is empty"))
+	elif not hasPatches and not logEmpty:
+		messages.append(error(package.basePath, "Test log is not empty but package doesn't contain patches"))
+
+	return messages
+
+def verifyTestLogs (package, mustpass):
+	messages	= []
+
+	for testLogFile in package.testLogs:
+		messages += verifyTestLog(os.path.join(package.basePath, testLogFile), mustpass)
+
+	if len(package.testLogs) == 0:
+		messages.append(error(package.basePath, "No test log files found"))
+
+	return messages
+
+def verifyPackage (package, mustpass):
+	messages = []
+
+	messages += verifyStatement(package)
+	messages += verifyGitStatus(package)
+	messages += verifyGitLog(package)
+	messages += verifyPatches(package)
+	messages += verifyTestLogs(package, mustpass)
+
+	for item in package.otherItems:
+		messages.append(warning(os.path.join(package.basePath, item), "Unknown file"))
+
+	return messages
+
+if __name__ == "__main__":
+	if len(sys.argv) != 3:
+		print "%s: [extracted submission package] [mustpass]" % sys.argv[0]
+		sys.exit(-1)
+
+	packagePath		= os.path.normpath(sys.argv[1])
+	mustpassPath	= sys.argv[2]
+	package			= getPackageDescription(packagePath)
+	mustpass		= readMustpass(mustpassPath)
+	messages		= verifyPackage(package, mustpass)
+
+	errors			= [m for m in messages if m.type == ValidationMessage.TYPE_ERROR]
+	warnings		= [m for m in messages if m.type == ValidationMessage.TYPE_WARNING]
+
+	for message in messages:
+		print str(message)
+
+	print ""
+
+	if len(errors) > 0:
+		print "Found %d validation errors and %d warnings!" % (len(errors), len(warnings))
+		sys.exit(-2)
+	elif len(warnings) > 0:
+		print "Found %d warnings, manual review required" % len(warnings)
+		sys.exit(-1)
+	else:
+		print "All validation checks passed"
diff --git a/framework/common/CMakeLists.txt b/framework/common/CMakeLists.txt
index 9529b56..61f0399 100644
--- a/framework/common/CMakeLists.txt
+++ b/framework/common/CMakeLists.txt
@@ -87,6 +87,8 @@
 	tcuTestHierarchyIterator.hpp
 	tcuTestHierarchyUtil.cpp
 	tcuTestHierarchyUtil.hpp
+	tcuAstcUtil.cpp
+	tcuAstcUtil.hpp
 	)
 
 set(TCUTIL_LIBS
diff --git a/framework/common/tcuAstcUtil.cpp b/framework/common/tcuAstcUtil.cpp
new file mode 100644
index 0000000..0146cc6
--- /dev/null
+++ b/framework/common/tcuAstcUtil.cpp
@@ -0,0 +1,3012 @@
+/*-------------------------------------------------------------------------
+ * drawElements Quality Program Tester Core
+ * ----------------------------------------
+ *
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief ASTC Utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuAstcUtil.hpp"
+#include "deFloat16.h"
+#include "deRandom.hpp"
+#include "deMeta.hpp"
+
+#include <algorithm>
+
+namespace tcu
+{
+namespace astc
+{
+
+using std::vector;
+
+namespace
+{
+
+// Common utilities
+
+enum
+{
+	MAX_BLOCK_WIDTH		= 12,
+	MAX_BLOCK_HEIGHT	= 12
+};
+
+inline deUint32 getBit (deUint32 src, int ndx)
+{
+	DE_ASSERT(de::inBounds(ndx, 0, 32));
+	return (src >> ndx) & 1;
+}
+
+inline deUint32 getBits (deUint32 src, int low, int high)
+{
+	const int numBits = (high-low) + 1;
+
+	DE_ASSERT(de::inRange(numBits, 1, 32));
+
+	if (numBits < 32)
+		return (deUint32)((src >> low) & ((1u<<numBits)-1));
+	else
+		return (deUint32)((src >> low) & 0xFFFFFFFFu);
+}
+
+inline bool isBitSet (deUint32 src, int ndx)
+{
+	return getBit(src, ndx) != 0;
+}
+
+inline deUint32 reverseBits (deUint32 src, int numBits)
+{
+	DE_ASSERT(de::inRange(numBits, 0, 32));
+	deUint32 result = 0;
+	for (int i = 0; i < numBits; i++)
+		result |= ((src >> i) & 1) << (numBits-1-i);
+	return result;
+}
+
+inline deUint32 bitReplicationScale (deUint32 src, int numSrcBits, int numDstBits)
+{
+	DE_ASSERT(numSrcBits <= numDstBits);
+	DE_ASSERT((src & ((1<<numSrcBits)-1)) == src);
+	deUint32 dst = 0;
+	for (int shift = numDstBits-numSrcBits; shift > -numSrcBits; shift -= numSrcBits)
+		dst |= shift >= 0 ? src << shift : src >> -shift;
+	return dst;
+}
+
+inline deInt32 signExtend (deInt32 src, int numSrcBits)
+{
+	DE_ASSERT(de::inRange(numSrcBits, 2, 31));
+	const bool negative = (src & (1 << (numSrcBits-1))) != 0;
+	return src | (negative ? ~((1 << numSrcBits) - 1) : 0);
+}
+
+inline bool isFloat16InfOrNan (deFloat16 v)
+{
+	return getBits(v, 10, 14) == 31;
+}
+
+enum ISEMode
+{
+	ISEMODE_TRIT = 0,
+	ISEMODE_QUINT,
+	ISEMODE_PLAIN_BIT,
+
+	ISEMODE_LAST
+};
+
+struct ISEParams
+{
+	ISEMode		mode;
+	int			numBits;
+
+	ISEParams (ISEMode mode_, int numBits_) : mode(mode_), numBits(numBits_) {}
+};
+
+inline int computeNumRequiredBits (const ISEParams& iseParams, int numValues)
+{
+	switch (iseParams.mode)
+	{
+		case ISEMODE_TRIT:			return deDivRoundUp32(numValues*8, 5) + numValues*iseParams.numBits;
+		case ISEMODE_QUINT:			return deDivRoundUp32(numValues*7, 3) + numValues*iseParams.numBits;
+		case ISEMODE_PLAIN_BIT:		return numValues*iseParams.numBits;
+		default:
+			DE_ASSERT(false);
+			return -1;
+	}
+}
+
+ISEParams computeMaximumRangeISEParams (int numAvailableBits, int numValuesInSequence)
+{
+	int curBitsForTritMode		= 6;
+	int curBitsForQuintMode		= 5;
+	int curBitsForPlainBitMode	= 8;
+
+	while (true)
+	{
+		DE_ASSERT(curBitsForTritMode > 0 || curBitsForQuintMode > 0 || curBitsForPlainBitMode > 0);
+
+		const int tritRange			= curBitsForTritMode > 0		? (3 << curBitsForTritMode) - 1			: -1;
+		const int quintRange		= curBitsForQuintMode > 0		? (5 << curBitsForQuintMode) - 1		: -1;
+		const int plainBitRange		= curBitsForPlainBitMode > 0	? (1 << curBitsForPlainBitMode) - 1		: -1;
+		const int maxRange			= de::max(de::max(tritRange, quintRange), plainBitRange);
+
+		if (maxRange == tritRange)
+		{
+			const ISEParams params(ISEMODE_TRIT, curBitsForTritMode);
+			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
+				return ISEParams(ISEMODE_TRIT, curBitsForTritMode);
+			curBitsForTritMode--;
+		}
+		else if (maxRange == quintRange)
+		{
+			const ISEParams params(ISEMODE_QUINT, curBitsForQuintMode);
+			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
+				return ISEParams(ISEMODE_QUINT, curBitsForQuintMode);
+			curBitsForQuintMode--;
+		}
+		else
+		{
+			const ISEParams params(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
+			DE_ASSERT(maxRange == plainBitRange);
+			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
+				return ISEParams(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
+			curBitsForPlainBitMode--;
+		}
+	}
+}
+
+inline int computeNumColorEndpointValues (deUint32 endpointMode)
+{
+	DE_ASSERT(endpointMode < 16);
+	return (endpointMode/4 + 1) * 2;
+}
+
+// Decompression utilities
+
+enum DecompressResult
+{
+	DECOMPRESS_RESULT_VALID_BLOCK	= 0,	//!< Decompressed valid block
+	DECOMPRESS_RESULT_ERROR,				//!< Encountered error while decompressing, error color written
+
+	DECOMPRESS_RESULT_LAST
+};
+
+// A helper for getting bits from a 128-bit block.
+class Block128
+{
+private:
+	typedef deUint64 Word;
+
+	enum
+	{
+		WORD_BYTES	= sizeof(Word),
+		WORD_BITS	= 8*WORD_BYTES,
+		NUM_WORDS	= 128 / WORD_BITS
+	};
+
+	DE_STATIC_ASSERT(128 % WORD_BITS == 0);
+
+public:
+	Block128 (const deUint8* src)
+	{
+		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
+		{
+			m_words[wordNdx] = 0;
+			for (int byteNdx = 0; byteNdx < WORD_BYTES; byteNdx++)
+				m_words[wordNdx] |= (Word)src[wordNdx*WORD_BYTES + byteNdx] << (8*byteNdx);
+		}
+	}
+
+	deUint32 getBit (int ndx) const
+	{
+		DE_ASSERT(de::inBounds(ndx, 0, 128));
+		return (m_words[ndx / WORD_BITS] >> (ndx % WORD_BITS)) & 1;
+	}
+
+	deUint32 getBits (int low, int high) const
+	{
+		DE_ASSERT(de::inBounds(low, 0, 128));
+		DE_ASSERT(de::inBounds(high, 0, 128));
+		DE_ASSERT(de::inRange(high-low+1, 0, 32));
+
+		if (high-low+1 == 0)
+			return 0;
+
+		const int word0Ndx = low / WORD_BITS;
+		const int word1Ndx = high / WORD_BITS;
+
+		// \note "foo << bar << 1" done instead of "foo << (bar+1)" to avoid overflow, i.e. shift amount being too big.
+
+		if (word0Ndx == word1Ndx)
+			return (deUint32)((m_words[word0Ndx] & ((((Word)1 << high%WORD_BITS << 1) - 1))) >> ((Word)low % WORD_BITS));
+		else
+		{
+			DE_ASSERT(word1Ndx == word0Ndx + 1);
+
+			return (deUint32)(m_words[word0Ndx] >> (low%WORD_BITS)) |
+				   (deUint32)((m_words[word1Ndx] & (((Word)1 << high%WORD_BITS << 1) - 1)) << (high-low - high%WORD_BITS));
+		}
+	}
+
+	bool isBitSet (int ndx) const
+	{
+		DE_ASSERT(de::inBounds(ndx, 0, 128));
+		return getBit(ndx) != 0;
+	}
+
+private:
+	Word m_words[NUM_WORDS];
+};
+
+// A helper for sequential access into a Block128.
+class BitAccessStream
+{
+public:
+	BitAccessStream (const Block128& src, int startNdxInSrc, int length, bool forward)
+		: m_src				(src)
+		, m_startNdxInSrc	(startNdxInSrc)
+		, m_length			(length)
+		, m_forward			(forward)
+		, m_ndx				(0)
+	{
+	}
+
+	// Get the next num bits. Bits at positions greater than or equal to m_length are zeros.
+	deUint32 getNext (int num)
+	{
+		if (num == 0 || m_ndx >= m_length)
+			return 0;
+
+		const int end				= m_ndx + num;
+		const int numBitsFromSrc	= de::max(0, de::min(m_length, end) - m_ndx);
+		const int low				= m_ndx;
+		const int high				= m_ndx + numBitsFromSrc - 1;
+
+		m_ndx += num;
+
+		return m_forward ?			   m_src.getBits(m_startNdxInSrc + low,  m_startNdxInSrc + high)
+						 : reverseBits(m_src.getBits(m_startNdxInSrc - high, m_startNdxInSrc - low), numBitsFromSrc);
+	}
+
+private:
+	const Block128&		m_src;
+	const int			m_startNdxInSrc;
+	const int			m_length;
+	const bool			m_forward;
+
+	int					m_ndx;
+};
+
+struct ISEDecodedResult
+{
+	deUint32 m;
+	deUint32 tq; //!< Trit or quint value, depending on ISE mode.
+	deUint32 v;
+};
+
+// Data from an ASTC block's "block mode" part (i.e. bits [0,10]).
+struct ASTCBlockMode
+{
+	bool		isError;
+	// \note Following fields only relevant if !isError.
+	bool		isVoidExtent;
+	// \note Following fields only relevant if !isVoidExtent.
+	bool		isDualPlane;
+	int			weightGridWidth;
+	int			weightGridHeight;
+	ISEParams	weightISEParams;
+
+	ASTCBlockMode (void)
+		: isError			(true)
+		, isVoidExtent		(true)
+		, isDualPlane		(true)
+		, weightGridWidth	(-1)
+		, weightGridHeight	(-1)
+		, weightISEParams	(ISEMODE_LAST, -1)
+	{
+	}
+};
+
+inline int computeNumWeights (const ASTCBlockMode& mode)
+{
+	return mode.weightGridWidth * mode.weightGridHeight * (mode.isDualPlane ? 2 : 1);
+}
+
+struct ColorEndpointPair
+{
+	UVec4 e0;
+	UVec4 e1;
+};
+
+struct TexelWeightPair
+{
+	deUint32 w[2];
+};
+
+ASTCBlockMode getASTCBlockMode (deUint32 blockModeData)
+{
+	ASTCBlockMode blockMode;
+	blockMode.isError = true; // \note Set to false later, if not error.
+
+	blockMode.isVoidExtent = getBits(blockModeData, 0, 8) == 0x1fc;
+
+	if (!blockMode.isVoidExtent)
+	{
+		if ((getBits(blockModeData, 0, 1) == 0 && getBits(blockModeData, 6, 8) == 7) || getBits(blockModeData, 0, 3) == 0)
+			return blockMode; // Invalid ("reserved").
+
+		deUint32 r = (deUint32)-1; // \note Set in the following branches.
+
+		if (getBits(blockModeData, 0, 1) == 0)
+		{
+			const deUint32 r0	= getBit(blockModeData, 4);
+			const deUint32 r1	= getBit(blockModeData, 2);
+			const deUint32 r2	= getBit(blockModeData, 3);
+			const deUint32 i78	= getBits(blockModeData, 7, 8);
+
+			r = (r2 << 2) | (r1 << 1) | (r0 << 0);
+
+			if (i78 == 3)
+			{
+				const bool i5 = isBitSet(blockModeData, 5);
+				blockMode.weightGridWidth	= i5 ? 10 : 6;
+				blockMode.weightGridHeight	= i5 ? 6  : 10;
+			}
+			else
+			{
+				const deUint32 a = getBits(blockModeData, 5, 6);
+				switch (i78)
+				{
+					case 0:		blockMode.weightGridWidth = 12;		blockMode.weightGridHeight = a + 2;									break;
+					case 1:		blockMode.weightGridWidth = a + 2;	blockMode.weightGridHeight = 12;									break;
+					case 2:		blockMode.weightGridWidth = a + 6;	blockMode.weightGridHeight = getBits(blockModeData, 9, 10) + 6;		break;
+					default: DE_ASSERT(false);
+				}
+			}
+		}
+		else
+		{
+			const deUint32 r0	= getBit(blockModeData, 4);
+			const deUint32 r1	= getBit(blockModeData, 0);
+			const deUint32 r2	= getBit(blockModeData, 1);
+			const deUint32 i23	= getBits(blockModeData, 2, 3);
+			const deUint32 a	= getBits(blockModeData, 5, 6);
+
+			r = (r2 << 2) | (r1 << 1) | (r0 << 0);
+
+			if (i23 == 3)
+			{
+				const deUint32	b	= getBit(blockModeData, 7);
+				const bool		i8	= isBitSet(blockModeData, 8);
+				blockMode.weightGridWidth	= i8 ? b+2 : a+2;
+				blockMode.weightGridHeight	= i8 ? a+2 : b+6;
+			}
+			else
+			{
+				const deUint32 b = getBits(blockModeData, 7, 8);
+
+				switch (i23)
+				{
+					case 0:		blockMode.weightGridWidth = b + 4;	blockMode.weightGridHeight = a + 2;	break;
+					case 1:		blockMode.weightGridWidth = b + 8;	blockMode.weightGridHeight = a + 2;	break;
+					case 2:		blockMode.weightGridWidth = a + 2;	blockMode.weightGridHeight = b + 8;	break;
+					default: DE_ASSERT(false);
+				}
+			}
+		}
+
+		const bool	zeroDH		= getBits(blockModeData, 0, 1) == 0 && getBits(blockModeData, 7, 8) == 2;
+		const bool	h			= zeroDH ? 0 : isBitSet(blockModeData, 9);
+		blockMode.isDualPlane	= zeroDH ? 0 : isBitSet(blockModeData, 10);
+
+		{
+			ISEMode&	m	= blockMode.weightISEParams.mode;
+			int&		b	= blockMode.weightISEParams.numBits;
+			m = ISEMODE_PLAIN_BIT;
+			b = 0;
+
+			if (h)
+			{
+				switch (r)
+				{
+					case 2:							m = ISEMODE_QUINT;	b = 1;	break;
+					case 3:		m = ISEMODE_TRIT;						b = 2;	break;
+					case 4:												b = 4;	break;
+					case 5:							m = ISEMODE_QUINT;	b = 2;	break;
+					case 6:		m = ISEMODE_TRIT;						b = 3;	break;
+					case 7:												b = 5;	break;
+					default:	DE_ASSERT(false);
+				}
+			}
+			else
+			{
+				switch (r)
+				{
+					case 2:												b = 1;	break;
+					case 3:		m = ISEMODE_TRIT;								break;
+					case 4:												b = 2;	break;
+					case 5:							m = ISEMODE_QUINT;			break;
+					case 6:		m = ISEMODE_TRIT;						b = 1;	break;
+					case 7:												b = 3;	break;
+					default:	DE_ASSERT(false);
+				}
+			}
+		}
+	}
+
+	blockMode.isError = false;
+	return blockMode;
+}
+
+inline void setASTCErrorColorBlock (void* dst, int blockWidth, int blockHeight, bool isSRGB)
+{
+	if (isSRGB)
+	{
+		deUint8* const dstU = (deUint8*)dst;
+
+		for (int i = 0; i < blockWidth*blockHeight; i++)
+		{
+			dstU[4*i + 0] = 0xff;
+			dstU[4*i + 1] = 0;
+			dstU[4*i + 2] = 0xff;
+			dstU[4*i + 3] = 0xff;
+		}
+	}
+	else
+	{
+		float* const dstF = (float*)dst;
+
+		for (int i = 0; i < blockWidth*blockHeight; i++)
+		{
+			dstF[4*i + 0] = 1.0f;
+			dstF[4*i + 1] = 0.0f;
+			dstF[4*i + 2] = 1.0f;
+			dstF[4*i + 3] = 1.0f;
+		}
+	}
+}
+
+DecompressResult decodeVoidExtentBlock (void* dst, const Block128& blockData, int blockWidth, int blockHeight, bool isSRGB, bool isLDRMode)
+{
+	const deUint32	minSExtent			= blockData.getBits(12, 24);
+	const deUint32	maxSExtent			= blockData.getBits(25, 37);
+	const deUint32	minTExtent			= blockData.getBits(38, 50);
+	const deUint32	maxTExtent			= blockData.getBits(51, 63);
+	const bool		allExtentsAllOnes	= minSExtent == 0x1fff && maxSExtent == 0x1fff && minTExtent == 0x1fff && maxTExtent == 0x1fff;
+	const bool		isHDRBlock			= blockData.isBitSet(9);
+
+	if ((isLDRMode && isHDRBlock) || (!allExtentsAllOnes && (minSExtent >= maxSExtent || minTExtent >= maxTExtent)))
+	{
+		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
+		return DECOMPRESS_RESULT_ERROR;
+	}
+
+	const deUint32 rgba[4] =
+	{
+		blockData.getBits(64,  79),
+		blockData.getBits(80,  95),
+		blockData.getBits(96,  111),
+		blockData.getBits(112, 127)
+	};
+
+	if (isSRGB)
+	{
+		deUint8* const dstU = (deUint8*)dst;
+		for (int i = 0; i < blockWidth*blockHeight; i++)
+		for (int c = 0; c < 4; c++)
+			dstU[i*4 + c] = (deUint8)((rgba[c] & 0xff00) >> 8);
+	}
+	else
+	{
+		float* const dstF = (float*)dst;
+
+		if (isHDRBlock)
+		{
+			for (int c = 0; c < 4; c++)
+			{
+				if (isFloat16InfOrNan((deFloat16)rgba[c]))
+					throw InternalError("Infinity or NaN color component in HDR void extent block in ASTC texture (behavior undefined by ASTC specification)");
+			}
+
+			for (int i = 0; i < blockWidth*blockHeight; i++)
+			for (int c = 0; c < 4; c++)
+				dstF[i*4 + c] = deFloat16To32((deFloat16)rgba[c]);
+		}
+		else
+		{
+			for (int i = 0; i < blockWidth*blockHeight; i++)
+			for (int c = 0; c < 4; c++)
+				dstF[i*4 + c] = rgba[c] == 65535 ? 1.0f : (float)rgba[c] / 65536.0f;
+		}
+	}
+
+	return DECOMPRESS_RESULT_VALID_BLOCK;
+}
+
+void decodeColorEndpointModes (deUint32* endpointModesDst, const Block128& blockData, int numPartitions, int extraCemBitsStart)
+{
+	if (numPartitions == 1)
+		endpointModesDst[0] = blockData.getBits(13, 16);
+	else
+	{
+		const deUint32 highLevelSelector = blockData.getBits(23, 24);
+
+		if (highLevelSelector == 0)
+		{
+			const deUint32 mode = blockData.getBits(25, 28);
+			for (int i = 0; i < numPartitions; i++)
+				endpointModesDst[i] = mode;
+		}
+		else
+		{
+			for (int partNdx = 0; partNdx < numPartitions; partNdx++)
+			{
+				const deUint32 cemClass		= highLevelSelector - (blockData.isBitSet(25 + partNdx) ? 0 : 1);
+				const deUint32 lowBit0Ndx	= numPartitions + 2*partNdx;
+				const deUint32 lowBit1Ndx	= numPartitions + 2*partNdx + 1;
+				const deUint32 lowBit0		= blockData.getBit(lowBit0Ndx < 4 ? 25+lowBit0Ndx : extraCemBitsStart+lowBit0Ndx-4);
+				const deUint32 lowBit1		= blockData.getBit(lowBit1Ndx < 4 ? 25+lowBit1Ndx : extraCemBitsStart+lowBit1Ndx-4);
+
+				endpointModesDst[partNdx] = (cemClass << 2) | (lowBit1 << 1) | lowBit0;
+			}
+		}
+	}
+}
+
+int computeNumColorEndpointValues (const deUint32* endpointModes, int numPartitions)
+{
+	int result = 0;
+	for (int i = 0; i < numPartitions; i++)
+		result += computeNumColorEndpointValues(endpointModes[i]);
+	return result;
+}
+
+void decodeISETritBlock (ISEDecodedResult* dst, int numValues, BitAccessStream& data, int numBits)
+{
+	DE_ASSERT(de::inRange(numValues, 1, 5));
+
+	deUint32 m[5];
+
+	m[0]			= data.getNext(numBits);
+	deUint32 T01	= data.getNext(2);
+	m[1]			= data.getNext(numBits);
+	deUint32 T23	= data.getNext(2);
+	m[2]			= data.getNext(numBits);
+	deUint32 T4		= data.getNext(1);
+	m[3]			= data.getNext(numBits);
+	deUint32 T56	= data.getNext(2);
+	m[4]			= data.getNext(numBits);
+	deUint32 T7		= data.getNext(1);
+
+	switch (numValues)
+	{
+		// \note Fall-throughs.
+		case 1: T23		= 0;
+		case 2: T4		= 0;
+		case 3: T56		= 0;
+		case 4: T7		= 0;
+		case 5: break;
+		default:
+			DE_ASSERT(false);
+	}
+
+	const deUint32 T = (T7 << 7) | (T56 << 5) | (T4 << 4) | (T23 << 2) | (T01 << 0);
+
+	static const deUint32 tritsFromT[256][5] =
+	{
+		{ 0,0,0,0,0 }, { 1,0,0,0,0 }, { 2,0,0,0,0 }, { 0,0,2,0,0 }, { 0,1,0,0,0 }, { 1,1,0,0,0 }, { 2,1,0,0,0 }, { 1,0,2,0,0 }, { 0,2,0,0,0 }, { 1,2,0,0,0 }, { 2,2,0,0,0 }, { 2,0,2,0,0 }, { 0,2,2,0,0 }, { 1,2,2,0,0 }, { 2,2,2,0,0 }, { 2,0,2,0,0 },
+		{ 0,0,1,0,0 }, { 1,0,1,0,0 }, { 2,0,1,0,0 }, { 0,1,2,0,0 }, { 0,1,1,0,0 }, { 1,1,1,0,0 }, { 2,1,1,0,0 }, { 1,1,2,0,0 }, { 0,2,1,0,0 }, { 1,2,1,0,0 }, { 2,2,1,0,0 }, { 2,1,2,0,0 }, { 0,0,0,2,2 }, { 1,0,0,2,2 }, { 2,0,0,2,2 }, { 0,0,2,2,2 },
+		{ 0,0,0,1,0 }, { 1,0,0,1,0 }, { 2,0,0,1,0 }, { 0,0,2,1,0 }, { 0,1,0,1,0 }, { 1,1,0,1,0 }, { 2,1,0,1,0 }, { 1,0,2,1,0 }, { 0,2,0,1,0 }, { 1,2,0,1,0 }, { 2,2,0,1,0 }, { 2,0,2,1,0 }, { 0,2,2,1,0 }, { 1,2,2,1,0 }, { 2,2,2,1,0 }, { 2,0,2,1,0 },
+		{ 0,0,1,1,0 }, { 1,0,1,1,0 }, { 2,0,1,1,0 }, { 0,1,2,1,0 }, { 0,1,1,1,0 }, { 1,1,1,1,0 }, { 2,1,1,1,0 }, { 1,1,2,1,0 }, { 0,2,1,1,0 }, { 1,2,1,1,0 }, { 2,2,1,1,0 }, { 2,1,2,1,0 }, { 0,1,0,2,2 }, { 1,1,0,2,2 }, { 2,1,0,2,2 }, { 1,0,2,2,2 },
+		{ 0,0,0,2,0 }, { 1,0,0,2,0 }, { 2,0,0,2,0 }, { 0,0,2,2,0 }, { 0,1,0,2,0 }, { 1,1,0,2,0 }, { 2,1,0,2,0 }, { 1,0,2,2,0 }, { 0,2,0,2,0 }, { 1,2,0,2,0 }, { 2,2,0,2,0 }, { 2,0,2,2,0 }, { 0,2,2,2,0 }, { 1,2,2,2,0 }, { 2,2,2,2,0 }, { 2,0,2,2,0 },
+		{ 0,0,1,2,0 }, { 1,0,1,2,0 }, { 2,0,1,2,0 }, { 0,1,2,2,0 }, { 0,1,1,2,0 }, { 1,1,1,2,0 }, { 2,1,1,2,0 }, { 1,1,2,2,0 }, { 0,2,1,2,0 }, { 1,2,1,2,0 }, { 2,2,1,2,0 }, { 2,1,2,2,0 }, { 0,2,0,2,2 }, { 1,2,0,2,2 }, { 2,2,0,2,2 }, { 2,0,2,2,2 },
+		{ 0,0,0,0,2 }, { 1,0,0,0,2 }, { 2,0,0,0,2 }, { 0,0,2,0,2 }, { 0,1,0,0,2 }, { 1,1,0,0,2 }, { 2,1,0,0,2 }, { 1,0,2,0,2 }, { 0,2,0,0,2 }, { 1,2,0,0,2 }, { 2,2,0,0,2 }, { 2,0,2,0,2 }, { 0,2,2,0,2 }, { 1,2,2,0,2 }, { 2,2,2,0,2 }, { 2,0,2,0,2 },
+		{ 0,0,1,0,2 }, { 1,0,1,0,2 }, { 2,0,1,0,2 }, { 0,1,2,0,2 }, { 0,1,1,0,2 }, { 1,1,1,0,2 }, { 2,1,1,0,2 }, { 1,1,2,0,2 }, { 0,2,1,0,2 }, { 1,2,1,0,2 }, { 2,2,1,0,2 }, { 2,1,2,0,2 }, { 0,2,2,2,2 }, { 1,2,2,2,2 }, { 2,2,2,2,2 }, { 2,0,2,2,2 },
+		{ 0,0,0,0,1 }, { 1,0,0,0,1 }, { 2,0,0,0,1 }, { 0,0,2,0,1 }, { 0,1,0,0,1 }, { 1,1,0,0,1 }, { 2,1,0,0,1 }, { 1,0,2,0,1 }, { 0,2,0,0,1 }, { 1,2,0,0,1 }, { 2,2,0,0,1 }, { 2,0,2,0,1 }, { 0,2,2,0,1 }, { 1,2,2,0,1 }, { 2,2,2,0,1 }, { 2,0,2,0,1 },
+		{ 0,0,1,0,1 }, { 1,0,1,0,1 }, { 2,0,1,0,1 }, { 0,1,2,0,1 }, { 0,1,1,0,1 }, { 1,1,1,0,1 }, { 2,1,1,0,1 }, { 1,1,2,0,1 }, { 0,2,1,0,1 }, { 1,2,1,0,1 }, { 2,2,1,0,1 }, { 2,1,2,0,1 }, { 0,0,1,2,2 }, { 1,0,1,2,2 }, { 2,0,1,2,2 }, { 0,1,2,2,2 },
+		{ 0,0,0,1,1 }, { 1,0,0,1,1 }, { 2,0,0,1,1 }, { 0,0,2,1,1 }, { 0,1,0,1,1 }, { 1,1,0,1,1 }, { 2,1,0,1,1 }, { 1,0,2,1,1 }, { 0,2,0,1,1 }, { 1,2,0,1,1 }, { 2,2,0,1,1 }, { 2,0,2,1,1 }, { 0,2,2,1,1 }, { 1,2,2,1,1 }, { 2,2,2,1,1 }, { 2,0,2,1,1 },
+		{ 0,0,1,1,1 }, { 1,0,1,1,1 }, { 2,0,1,1,1 }, { 0,1,2,1,1 }, { 0,1,1,1,1 }, { 1,1,1,1,1 }, { 2,1,1,1,1 }, { 1,1,2,1,1 }, { 0,2,1,1,1 }, { 1,2,1,1,1 }, { 2,2,1,1,1 }, { 2,1,2,1,1 }, { 0,1,1,2,2 }, { 1,1,1,2,2 }, { 2,1,1,2,2 }, { 1,1,2,2,2 },
+		{ 0,0,0,2,1 }, { 1,0,0,2,1 }, { 2,0,0,2,1 }, { 0,0,2,2,1 }, { 0,1,0,2,1 }, { 1,1,0,2,1 }, { 2,1,0,2,1 }, { 1,0,2,2,1 }, { 0,2,0,2,1 }, { 1,2,0,2,1 }, { 2,2,0,2,1 }, { 2,0,2,2,1 }, { 0,2,2,2,1 }, { 1,2,2,2,1 }, { 2,2,2,2,1 }, { 2,0,2,2,1 },
+		{ 0,0,1,2,1 }, { 1,0,1,2,1 }, { 2,0,1,2,1 }, { 0,1,2,2,1 }, { 0,1,1,2,1 }, { 1,1,1,2,1 }, { 2,1,1,2,1 }, { 1,1,2,2,1 }, { 0,2,1,2,1 }, { 1,2,1,2,1 }, { 2,2,1,2,1 }, { 2,1,2,2,1 }, { 0,2,1,2,2 }, { 1,2,1,2,2 }, { 2,2,1,2,2 }, { 2,1,2,2,2 },
+		{ 0,0,0,1,2 }, { 1,0,0,1,2 }, { 2,0,0,1,2 }, { 0,0,2,1,2 }, { 0,1,0,1,2 }, { 1,1,0,1,2 }, { 2,1,0,1,2 }, { 1,0,2,1,2 }, { 0,2,0,1,2 }, { 1,2,0,1,2 }, { 2,2,0,1,2 }, { 2,0,2,1,2 }, { 0,2,2,1,2 }, { 1,2,2,1,2 }, { 2,2,2,1,2 }, { 2,0,2,1,2 },
+		{ 0,0,1,1,2 }, { 1,0,1,1,2 }, { 2,0,1,1,2 }, { 0,1,2,1,2 }, { 0,1,1,1,2 }, { 1,1,1,1,2 }, { 2,1,1,1,2 }, { 1,1,2,1,2 }, { 0,2,1,1,2 }, { 1,2,1,1,2 }, { 2,2,1,1,2 }, { 2,1,2,1,2 }, { 0,2,2,2,2 }, { 1,2,2,2,2 }, { 2,2,2,2,2 }, { 2,1,2,2,2 }
+	};
+
+	const deUint32 (& trits)[5] = tritsFromT[T];
+
+	for (int i = 0; i < numValues; i++)
+	{
+		dst[i].m	= m[i];
+		dst[i].tq	= trits[i];
+		dst[i].v	= (trits[i] << numBits) + m[i];
+	}
+}
+
+void decodeISEQuintBlock (ISEDecodedResult* dst, int numValues, BitAccessStream& data, int numBits)
+{
+	DE_ASSERT(de::inRange(numValues, 1, 3));
+
+	deUint32 m[3];
+
+	m[0]			= data.getNext(numBits);
+	deUint32 Q012	= data.getNext(3);
+	m[1]			= data.getNext(numBits);
+	deUint32 Q34	= data.getNext(2);
+	m[2]			= data.getNext(numBits);
+	deUint32 Q56	= data.getNext(2);
+
+	switch (numValues)
+	{
+		// \note Fall-throughs.
+		case 1: Q34		= 0;
+		case 2: Q56		= 0;
+		case 3: break;
+		default:
+			DE_ASSERT(false);
+	}
+
+	const deUint32 Q = (Q56 << 5) | (Q34 << 3) | (Q012 << 0);
+
+	static const deUint32 quintsFromQ[256][3] =
+	{
+		{ 0,0,0 }, { 1,0,0 }, { 2,0,0 }, { 3,0,0 }, { 4,0,0 }, { 0,4,0 }, { 4,4,0 }, { 4,4,4 }, { 0,1,0 }, { 1,1,0 }, { 2,1,0 }, { 3,1,0 }, { 4,1,0 }, { 1,4,0 }, { 4,4,1 }, { 4,4,4 },
+		{ 0,2,0 }, { 1,2,0 }, { 2,2,0 }, { 3,2,0 }, { 4,2,0 }, { 2,4,0 }, { 4,4,2 }, { 4,4,4 }, { 0,3,0 }, { 1,3,0 }, { 2,3,0 }, { 3,3,0 }, { 4,3,0 }, { 3,4,0 }, { 4,4,3 }, { 4,4,4 },
+		{ 0,0,1 }, { 1,0,1 }, { 2,0,1 }, { 3,0,1 }, { 4,0,1 }, { 0,4,1 }, { 4,0,4 }, { 0,4,4 }, { 0,1,1 }, { 1,1,1 }, { 2,1,1 }, { 3,1,1 }, { 4,1,1 }, { 1,4,1 }, { 4,1,4 }, { 1,4,4 },
+		{ 0,2,1 }, { 1,2,1 }, { 2,2,1 }, { 3,2,1 }, { 4,2,1 }, { 2,4,1 }, { 4,2,4 }, { 2,4,4 }, { 0,3,1 }, { 1,3,1 }, { 2,3,1 }, { 3,3,1 }, { 4,3,1 }, { 3,4,1 }, { 4,3,4 }, { 3,4,4 },
+		{ 0,0,2 }, { 1,0,2 }, { 2,0,2 }, { 3,0,2 }, { 4,0,2 }, { 0,4,2 }, { 2,0,4 }, { 3,0,4 }, { 0,1,2 }, { 1,1,2 }, { 2,1,2 }, { 3,1,2 }, { 4,1,2 }, { 1,4,2 }, { 2,1,4 }, { 3,1,4 },
+		{ 0,2,2 }, { 1,2,2 }, { 2,2,2 }, { 3,2,2 }, { 4,2,2 }, { 2,4,2 }, { 2,2,4 }, { 3,2,4 }, { 0,3,2 }, { 1,3,2 }, { 2,3,2 }, { 3,3,2 }, { 4,3,2 }, { 3,4,2 }, { 2,3,4 }, { 3,3,4 },
+		{ 0,0,3 }, { 1,0,3 }, { 2,0,3 }, { 3,0,3 }, { 4,0,3 }, { 0,4,3 }, { 0,0,4 }, { 1,0,4 }, { 0,1,3 }, { 1,1,3 }, { 2,1,3 }, { 3,1,3 }, { 4,1,3 }, { 1,4,3 }, { 0,1,4 }, { 1,1,4 },
+		{ 0,2,3 }, { 1,2,3 }, { 2,2,3 }, { 3,2,3 }, { 4,2,3 }, { 2,4,3 }, { 0,2,4 }, { 1,2,4 }, { 0,3,3 }, { 1,3,3 }, { 2,3,3 }, { 3,3,3 }, { 4,3,3 }, { 3,4,3 }, { 0,3,4 }, { 1,3,4 }
+	};
+
+	const deUint32 (& quints)[3] = quintsFromQ[Q];
+
+	for (int i = 0; i < numValues; i++)
+	{
+		dst[i].m	= m[i];
+		dst[i].tq	= quints[i];
+		dst[i].v	= (quints[i] << numBits) + m[i];
+	}
+}
+
+inline void decodeISEBitBlock (ISEDecodedResult* dst, BitAccessStream& data, int numBits)
+{
+	dst[0].m = data.getNext(numBits);
+	dst[0].v = dst[0].m;
+}
+
+void decodeISE (ISEDecodedResult* dst, int numValues, BitAccessStream& data, const ISEParams& params)
+{
+	if (params.mode == ISEMODE_TRIT)
+	{
+		const int numBlocks = deDivRoundUp32(numValues, 5);
+		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+		{
+			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 5*(numBlocks-1) : 5;
+			decodeISETritBlock(&dst[5*blockNdx], numValuesInBlock, data, params.numBits);
+		}
+	}
+	else if (params.mode == ISEMODE_QUINT)
+	{
+		const int numBlocks = deDivRoundUp32(numValues, 3);
+		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+		{
+			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 3*(numBlocks-1) : 3;
+			decodeISEQuintBlock(&dst[3*blockNdx], numValuesInBlock, data, params.numBits);
+		}
+	}
+	else
+	{
+		DE_ASSERT(params.mode == ISEMODE_PLAIN_BIT);
+		for (int i = 0; i < numValues; i++)
+			decodeISEBitBlock(&dst[i], data, params.numBits);
+	}
+}
+
+void unquantizeColorEndpoints (deUint32* dst, const ISEDecodedResult* iseResults, int numEndpoints, const ISEParams& iseParams)
+{
+	if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
+	{
+		const int rangeCase				= iseParams.numBits*2 - (iseParams.mode == ISEMODE_TRIT ? 2 : 1);
+		DE_ASSERT(de::inRange(rangeCase, 0, 10));
+		static const deUint32	Ca[11]	= { 204, 113, 93, 54, 44, 26, 22, 13, 11, 6, 5 };
+		const deUint32			C		= Ca[rangeCase];
+
+		for (int endpointNdx = 0; endpointNdx < numEndpoints; endpointNdx++)
+		{
+			const deUint32 a = getBit(iseResults[endpointNdx].m, 0);
+			const deUint32 b = getBit(iseResults[endpointNdx].m, 1);
+			const deUint32 c = getBit(iseResults[endpointNdx].m, 2);
+			const deUint32 d = getBit(iseResults[endpointNdx].m, 3);
+			const deUint32 e = getBit(iseResults[endpointNdx].m, 4);
+			const deUint32 f = getBit(iseResults[endpointNdx].m, 5);
+
+			const deUint32 A = a == 0 ? 0 : (1<<9)-1;
+			const deUint32 B = rangeCase == 0	? 0
+							 : rangeCase == 1	? 0
+							 : rangeCase == 2	? (b << 8) |									(b << 4) |				(b << 2) |	(b << 1)
+							 : rangeCase == 3	? (b << 8) |												(b << 3) |	(b << 2)
+							 : rangeCase == 4	? (c << 8) | (b << 7) |										(c << 3) |	(b << 2) |	(c << 1) |	(b << 0)
+							 : rangeCase == 5	? (c << 8) | (b << 7) |													(c << 2) |	(b << 1) |	(c << 0)
+							 : rangeCase == 6	? (d << 8) | (c << 7) | (b << 6) |										(d << 2) |	(c << 1) |	(b << 0)
+							 : rangeCase == 7	? (d << 8) | (c << 7) | (b << 6) |													(d << 1) |	(c << 0)
+							 : rangeCase == 8	? (e << 8) | (d << 7) | (c << 6) | (b << 5) |										(e << 1) |	(d << 0)
+							 : rangeCase == 9	? (e << 8) | (d << 7) | (c << 6) | (b << 5) |													(e << 0)
+							 : rangeCase == 10	? (f << 8) | (e << 7) | (d << 6) | (c << 5) |	(b << 4) |										(f << 0)
+							 : (deUint32)-1;
+			DE_ASSERT(B != (deUint32)-1);
+
+			dst[endpointNdx] = (((iseResults[endpointNdx].tq*C + B) ^ A) >> 2) | (A & 0x80);
+		}
+	}
+	else
+	{
+		DE_ASSERT(iseParams.mode == ISEMODE_PLAIN_BIT);
+
+		for (int endpointNdx = 0; endpointNdx < numEndpoints; endpointNdx++)
+			dst[endpointNdx] = bitReplicationScale(iseResults[endpointNdx].v, iseParams.numBits, 8);
+	}
+}
+
+inline void bitTransferSigned (deInt32& a, deInt32& b)
+{
+	b >>= 1;
+	b |= a & 0x80;
+	a >>= 1;
+	a &= 0x3f;
+	if (isBitSet(a, 5))
+		a -= 0x40;
+}
+
+inline UVec4 clampedRGBA (const IVec4& rgba)
+{
+	return UVec4(de::clamp(rgba.x(), 0, 0xff),
+				 de::clamp(rgba.y(), 0, 0xff),
+				 de::clamp(rgba.z(), 0, 0xff),
+				 de::clamp(rgba.w(), 0, 0xff));
+}
+
+inline IVec4 blueContract (int r, int g, int b, int a)
+{
+	return IVec4((r+b)>>1, (g+b)>>1, b, a);
+}
+
+inline bool isColorEndpointModeHDR (deUint32 mode)
+{
+	return mode == 2	||
+		   mode == 3	||
+		   mode == 7	||
+		   mode == 11	||
+		   mode == 14	||
+		   mode == 15;
+}
+
+void decodeHDREndpointMode7 (UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3)
+{
+	const deUint32 m10		= getBit(v1, 7) | (getBit(v2, 7) << 1);
+	const deUint32 m23		= getBits(v0, 6, 7);
+	const deUint32 majComp	= m10 != 3	? m10
+							: m23 != 3	? m23
+							:			  0;
+	const deUint32 mode		= m10 != 3	? m23
+							: m23 != 3	? 4
+							:			  5;
+
+	deInt32			red		= (deInt32)getBits(v0, 0, 5);
+	deInt32			green	= (deInt32)getBits(v1, 0, 4);
+	deInt32			blue	= (deInt32)getBits(v2, 0, 4);
+	deInt32			scale	= (deInt32)getBits(v3, 0, 4);
+
+	{
+#define SHOR(DST_VAR, SHIFT, BIT_VAR) (DST_VAR) |= (BIT_VAR) << (SHIFT)
+#define ASSIGN_X_BITS(V0,S0, V1,S1, V2,S2, V3,S3, V4,S4, V5,S5, V6,S6) do { SHOR(V0,S0,x0); SHOR(V1,S1,x1); SHOR(V2,S2,x2); SHOR(V3,S3,x3); SHOR(V4,S4,x4); SHOR(V5,S5,x5); SHOR(V6,S6,x6); } while (false)
+
+		const deUint32	x0	= getBit(v1, 6);
+		const deUint32	x1	= getBit(v1, 5);
+		const deUint32	x2	= getBit(v2, 6);
+		const deUint32	x3	= getBit(v2, 5);
+		const deUint32	x4	= getBit(v3, 7);
+		const deUint32	x5	= getBit(v3, 6);
+		const deUint32	x6	= getBit(v3, 5);
+
+		deInt32&		R	= red;
+		deInt32&		G	= green;
+		deInt32&		B	= blue;
+		deInt32&		S	= scale;
+
+		switch (mode)
+		{
+			case 0: ASSIGN_X_BITS(R,9,  R,8,  R,7,  R,10,  R,6,  S,6,   S,5); break;
+			case 1: ASSIGN_X_BITS(R,8,  G,5,  R,7,  B,5,   R,6,  R,10,  R,9); break;
+			case 2: ASSIGN_X_BITS(R,9,  R,8,  R,7,  R,6,   S,7,  S,6,   S,5); break;
+			case 3: ASSIGN_X_BITS(R,8,  G,5,  R,7,  B,5,   R,6,  S,6,   S,5); break;
+			case 4: ASSIGN_X_BITS(G,6,  G,5,  B,6,  B,5,   R,6,  R,7,   S,5); break;
+			case 5: ASSIGN_X_BITS(G,6,  G,5,  B,6,  B,5,   R,6,  S,6,   S,5); break;
+			default:
+				DE_ASSERT(false);
+		}
+
+#undef ASSIGN_X_BITS
+#undef SHOR
+	}
+
+	static const int shiftAmounts[] = { 1, 1, 2, 3, 4, 5 };
+	DE_ASSERT(mode < DE_LENGTH_OF_ARRAY(shiftAmounts));
+
+	red		<<= shiftAmounts[mode];
+	green	<<= shiftAmounts[mode];
+	blue	<<= shiftAmounts[mode];
+	scale	<<= shiftAmounts[mode];
+
+	if (mode != 5)
+	{
+		green	= red - green;
+		blue	= red - blue;
+	}
+
+	if (majComp == 1)
+		std::swap(red, green);
+	else if (majComp == 2)
+		std::swap(red, blue);
+
+	e0 = UVec4(de::clamp(red	- scale,	0, 0xfff),
+			   de::clamp(green	- scale,	0, 0xfff),
+			   de::clamp(blue	- scale,	0, 0xfff),
+			   0x780);
+
+	e1 = UVec4(de::clamp(red,				0, 0xfff),
+			   de::clamp(green,				0, 0xfff),
+			   de::clamp(blue,				0, 0xfff),
+			   0x780);
+}
+
+void decodeHDREndpointMode11 (UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3, deUint32 v4, deUint32 v5)
+{
+	const deUint32 major = (getBit(v5, 7) << 1) | getBit(v4, 7);
+
+	if (major == 3)
+	{
+		e0 = UVec4(v0<<4, v2<<4, getBits(v4,0,6)<<5, 0x780);
+		e1 = UVec4(v1<<4, v3<<4, getBits(v5,0,6)<<5, 0x780);
+	}
+	else
+	{
+		const deUint32 mode = (getBit(v3, 7) << 2) | (getBit(v2, 7) << 1) | getBit(v1, 7);
+
+		deInt32 a	= (deInt32)((getBit(v1, 6) << 8) | v0);
+		deInt32 c	= (deInt32)(getBits(v1, 0, 5));
+		deInt32 b0	= (deInt32)(getBits(v2, 0, 5));
+		deInt32 b1	= (deInt32)(getBits(v3, 0, 5));
+		deInt32 d0	= (deInt32)(getBits(v4, 0, 4));
+		deInt32 d1	= (deInt32)(getBits(v5, 0, 4));
+
+		{
+#define SHOR(DST_VAR, SHIFT, BIT_VAR) (DST_VAR) |= (BIT_VAR) << (SHIFT)
+#define ASSIGN_X_BITS(V0,S0, V1,S1, V2,S2, V3,S3, V4,S4, V5,S5) do { SHOR(V0,S0,x0); SHOR(V1,S1,x1); SHOR(V2,S2,x2); SHOR(V3,S3,x3); SHOR(V4,S4,x4); SHOR(V5,S5,x5); } while (false)
+
+			const deUint32 x0 = getBit(v2, 6);
+			const deUint32 x1 = getBit(v3, 6);
+			const deUint32 x2 = getBit(v4, 6);
+			const deUint32 x3 = getBit(v5, 6);
+			const deUint32 x4 = getBit(v4, 5);
+			const deUint32 x5 = getBit(v5, 5);
+
+			switch (mode)
+			{
+				case 0: ASSIGN_X_BITS(b0,6,  b1,6,   d0,6,  d1,6,  d0,5,  d1,5); break;
+				case 1: ASSIGN_X_BITS(b0,6,  b1,6,   b0,7,  b1,7,  d0,5,  d1,5); break;
+				case 2: ASSIGN_X_BITS(a,9,   c,6,    d0,6,  d1,6,  d0,5,  d1,5); break;
+				case 3: ASSIGN_X_BITS(b0,6,  b1,6,   a,9,   c,6,   d0,5,  d1,5); break;
+				case 4: ASSIGN_X_BITS(b0,6,  b1,6,   b0,7,  b1,7,  a,9,   a,10); break;
+				case 5: ASSIGN_X_BITS(a,9,   a,10,   c,7,   c,6,   d0,5,  d1,5); break;
+				case 6: ASSIGN_X_BITS(b0,6,  b1,6,   a,11,  c,6,   a,9,   a,10); break;
+				case 7: ASSIGN_X_BITS(a,9,   a,10,   a,11,  c,6,   d0,5,  d1,5); break;
+				default:
+					DE_ASSERT(false);
+			}
+
+#undef ASSIGN_X_BITS
+#undef SHOR
+		}
+
+		static const int numDBits[] = { 7, 6, 7, 6, 5, 6, 5, 6 };
+		DE_ASSERT(mode < DE_LENGTH_OF_ARRAY(numDBits));
+
+		d0 = signExtend(d0, numDBits[mode]);
+		d1 = signExtend(d1, numDBits[mode]);
+
+		const int shiftAmount = (mode >> 1) ^ 3;
+		a	<<= shiftAmount;
+		c	<<= shiftAmount;
+		b0	<<= shiftAmount;
+		b1	<<= shiftAmount;
+		d0	<<= shiftAmount;
+		d1	<<= shiftAmount;
+
+		e0 = UVec4(de::clamp(a-c,			0, 0xfff),
+				   de::clamp(a-b0-c-d0,		0, 0xfff),
+				   de::clamp(a-b1-c-d1,		0, 0xfff),
+				   0x780);
+
+		e1 = UVec4(de::clamp(a,				0, 0xfff),
+				   de::clamp(a-b0,			0, 0xfff),
+				   de::clamp(a-b1,			0, 0xfff),
+				   0x780);
+
+		if (major == 1)
+		{
+			std::swap(e0.x(), e0.y());
+			std::swap(e1.x(), e1.y());
+		}
+		else if (major == 2)
+		{
+			std::swap(e0.x(), e0.z());
+			std::swap(e1.x(), e1.z());
+		}
+	}
+}
+
+void decodeHDREndpointMode15(UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3, deUint32 v4, deUint32 v5, deUint32 v6In, deUint32 v7In)
+{
+	decodeHDREndpointMode11(e0, e1, v0, v1, v2, v3, v4, v5);
+
+	const deUint32	mode	= (getBit(v7In, 7) << 1) | getBit(v6In, 7);
+	deInt32			v6		= (deInt32)getBits(v6In, 0, 6);
+	deInt32			v7		= (deInt32)getBits(v7In, 0, 6);
+
+	if (mode == 3)
+	{
+		e0.w() = v6 << 5;
+		e1.w() = v7 << 5;
+	}
+	else
+	{
+		v6 |= (v7 << (mode+1)) & 0x780;
+		v7 &= (0x3f >> mode);
+		v7 ^= 0x20 >> mode;
+		v7 -= 0x20 >> mode;
+		v6 <<= 4-mode;
+		v7 <<= 4-mode;
+
+		v7 += v6;
+		v7 = de::clamp(v7, 0, 0xfff);
+		e0.w() = v6;
+		e1.w() = v7;
+	}
+}
+
+void decodeColorEndpoints (ColorEndpointPair* dst, const deUint32* unquantizedEndpoints, const deUint32* endpointModes, int numPartitions)
+{
+	int unquantizedNdx = 0;
+
+	for (int partitionNdx = 0; partitionNdx < numPartitions; partitionNdx++)
+	{
+		const deUint32		endpointMode	= endpointModes[partitionNdx];
+		const deUint32*		v				= &unquantizedEndpoints[unquantizedNdx];
+		UVec4&				e0				= dst[partitionNdx].e0;
+		UVec4&				e1				= dst[partitionNdx].e1;
+
+		unquantizedNdx += computeNumColorEndpointValues(endpointMode);
+
+		switch (endpointMode)
+		{
+			case 0:
+				e0 = UVec4(v[0], v[0], v[0], 0xff);
+				e1 = UVec4(v[1], v[1], v[1], 0xff);
+				break;
+
+			case 1:
+			{
+				const deUint32 L0 = (v[0] >> 2) | (getBits(v[1], 6, 7) << 6);
+				const deUint32 L1 = de::min(0xffu, L0 + getBits(v[1], 0, 5));
+				e0 = UVec4(L0, L0, L0, 0xff);
+				e1 = UVec4(L1, L1, L1, 0xff);
+				break;
+			}
+
+			case 2:
+			{
+				const deUint32 v1Gr		= v[1] >= v[0];
+				const deUint32 y0		= v1Gr ? v[0]<<4 : (v[1]<<4) + 8;
+				const deUint32 y1		= v1Gr ? v[1]<<4 : (v[0]<<4) - 8;
+
+				e0 = UVec4(y0, y0, y0, 0x780);
+				e1 = UVec4(y1, y1, y1, 0x780);
+				break;
+			}
+
+			case 3:
+			{
+				const bool		m	= isBitSet(v[0], 7);
+				const deUint32	y0	= m ? (getBits(v[1], 5, 7) << 9) | (getBits(v[0], 0, 6) << 2)
+										: (getBits(v[1], 4, 7) << 8) | (getBits(v[0], 0, 6) << 1);
+				const deUint32	d	= m ? getBits(v[1], 0, 4) << 2
+										: getBits(v[1], 0, 3) << 1;
+				const deUint32	y1	= de::min(0xfffu, y0+d);
+
+				e0 = UVec4(y0, y0, y0, 0x780);
+				e1 = UVec4(y1, y1, y1, 0x780);
+				break;
+			}
+
+			case 4:
+				e0 = UVec4(v[0], v[0], v[0], v[2]);
+				e1 = UVec4(v[1], v[1], v[1], v[3]);
+				break;
+
+			case 5:
+			{
+				deInt32 v0 = (deInt32)v[0];
+				deInt32 v1 = (deInt32)v[1];
+				deInt32 v2 = (deInt32)v[2];
+				deInt32 v3 = (deInt32)v[3];
+				bitTransferSigned(v1, v0);
+				bitTransferSigned(v3, v2);
+
+				e0 = clampedRGBA(IVec4(v0,		v0,		v0,		v2));
+				e1 = clampedRGBA(IVec4(v0+v1,	v0+v1,	v0+v1,	v2+v3));
+				break;
+			}
+
+			case 6:
+				e0 = UVec4((v[0]*v[3]) >> 8,	(v[1]*v[3]) >> 8,	(v[2]*v[3]) >> 8,	0xff);
+				e1 = UVec4(v[0],				v[1],				v[2],				0xff);
+				break;
+
+			case 7:
+				decodeHDREndpointMode7(e0, e1, v[0], v[1], v[2], v[3]);
+				break;
+
+			case 8:
+				if (v[1]+v[3]+v[5] >= v[0]+v[2]+v[4])
+				{
+					e0 = UVec4(v[0], v[2], v[4], 0xff);
+					e1 = UVec4(v[1], v[3], v[5], 0xff);
+				}
+				else
+				{
+					e0 = blueContract(v[1], v[3], v[5], 0xff).asUint();
+					e1 = blueContract(v[0], v[2], v[4], 0xff).asUint();
+				}
+				break;
+
+			case 9:
+			{
+				deInt32 v0 = (deInt32)v[0];
+				deInt32 v1 = (deInt32)v[1];
+				deInt32 v2 = (deInt32)v[2];
+				deInt32 v3 = (deInt32)v[3];
+				deInt32 v4 = (deInt32)v[4];
+				deInt32 v5 = (deInt32)v[5];
+				bitTransferSigned(v1, v0);
+				bitTransferSigned(v3, v2);
+				bitTransferSigned(v5, v4);
+
+				if (v1+v3+v5 >= 0)
+				{
+					e0 = clampedRGBA(IVec4(v0,		v2,		v4,		0xff));
+					e1 = clampedRGBA(IVec4(v0+v1,	v2+v3,	v4+v5,	0xff));
+				}
+				else
+				{
+					e0 = clampedRGBA(blueContract(v0+v1,	v2+v3,	v4+v5,	0xff));
+					e1 = clampedRGBA(blueContract(v0,		v2,		v4,		0xff));
+				}
+				break;
+			}
+
+			case 10:
+				e0 = UVec4((v[0]*v[3]) >> 8,	(v[1]*v[3]) >> 8,	(v[2]*v[3]) >> 8,	v[4]);
+				e1 = UVec4(v[0],				v[1],				v[2],				v[5]);
+				break;
+
+			case 11:
+				decodeHDREndpointMode11(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5]);
+				break;
+
+			case 12:
+				if (v[1]+v[3]+v[5] >= v[0]+v[2]+v[4])
+				{
+					e0 = UVec4(v[0], v[2], v[4], v[6]);
+					e1 = UVec4(v[1], v[3], v[5], v[7]);
+				}
+				else
+				{
+					e0 = clampedRGBA(blueContract(v[1], v[3], v[5], v[7]));
+					e1 = clampedRGBA(blueContract(v[0], v[2], v[4], v[6]));
+				}
+				break;
+
+			case 13:
+			{
+				deInt32 v0 = (deInt32)v[0];
+				deInt32 v1 = (deInt32)v[1];
+				deInt32 v2 = (deInt32)v[2];
+				deInt32 v3 = (deInt32)v[3];
+				deInt32 v4 = (deInt32)v[4];
+				deInt32 v5 = (deInt32)v[5];
+				deInt32 v6 = (deInt32)v[6];
+				deInt32 v7 = (deInt32)v[7];
+				bitTransferSigned(v1, v0);
+				bitTransferSigned(v3, v2);
+				bitTransferSigned(v5, v4);
+				bitTransferSigned(v7, v6);
+
+				if (v1+v3+v5 >= 0)
+				{
+					e0 = clampedRGBA(IVec4(v0,		v2,		v4,		v6));
+					e1 = clampedRGBA(IVec4(v0+v1,	v2+v3,	v4+v5,	v6+v7));
+				}
+				else
+				{
+					e0 = clampedRGBA(blueContract(v0+v1,	v2+v3,	v4+v5,	v6+v7));
+					e1 = clampedRGBA(blueContract(v0,		v2,		v4,		v6));
+				}
+
+				break;
+			}
+
+			case 14:
+				decodeHDREndpointMode11(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5]);
+				e0.w() = v[6];
+				e1.w() = v[7];
+				break;
+
+			case 15:
+				decodeHDREndpointMode15(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7]);
+				break;
+
+			default:
+				DE_ASSERT(false);
+		}
+	}
+}
+
+void computeColorEndpoints (ColorEndpointPair* dst, const Block128& blockData, const deUint32* endpointModes, int numPartitions, int numColorEndpointValues, const ISEParams& iseParams, int numBitsAvailable)
+{
+	const int			colorEndpointDataStart = numPartitions == 1 ? 17 : 29;
+	ISEDecodedResult	colorEndpointData[18];
+
+	{
+		BitAccessStream dataStream(blockData, colorEndpointDataStart, numBitsAvailable, true);
+		decodeISE(&colorEndpointData[0], numColorEndpointValues, dataStream, iseParams);
+	}
+
+	{
+		deUint32 unquantizedEndpoints[18];
+		unquantizeColorEndpoints(&unquantizedEndpoints[0], &colorEndpointData[0], numColorEndpointValues, iseParams);
+		decodeColorEndpoints(dst, &unquantizedEndpoints[0], &endpointModes[0], numPartitions);
+	}
+}
+
+void unquantizeWeights (deUint32 dst[64], const ISEDecodedResult* weightGrid, const ASTCBlockMode& blockMode)
+{
+	const int			numWeights	= computeNumWeights(blockMode);
+	const ISEParams&	iseParams	= blockMode.weightISEParams;
+
+	if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
+	{
+		const int rangeCase = iseParams.numBits*2 + (iseParams.mode == ISEMODE_QUINT ? 1 : 0);
+
+		if (rangeCase == 0 || rangeCase == 1)
+		{
+			static const deUint32 map0[3]	= { 0, 32, 63 };
+			static const deUint32 map1[5]	= { 0, 16, 32, 47, 63 };
+			const deUint32* const map		= rangeCase == 0 ? &map0[0] : &map1[0];
+			for (int i = 0; i < numWeights; i++)
+			{
+				DE_ASSERT(weightGrid[i].v < (rangeCase == 0 ? 3u : 5u));
+				dst[i] = map[weightGrid[i].v];
+			}
+		}
+		else
+		{
+			DE_ASSERT(rangeCase <= 6);
+			static const deUint32	Ca[5]	= { 50, 28, 23, 13, 11 };
+			const deUint32			C		= Ca[rangeCase-2];
+
+			for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
+			{
+				const deUint32 a = getBit(weightGrid[weightNdx].m, 0);
+				const deUint32 b = getBit(weightGrid[weightNdx].m, 1);
+				const deUint32 c = getBit(weightGrid[weightNdx].m, 2);
+
+				const deUint32 A = a == 0 ? 0 : (1<<7)-1;
+				const deUint32 B = rangeCase == 2 ? 0
+								 : rangeCase == 3 ? 0
+								 : rangeCase == 4 ? (b << 6) |					(b << 2) |				(b << 0)
+								 : rangeCase == 5 ? (b << 6) |								(b << 1)
+								 : rangeCase == 6 ? (c << 6) | (b << 5) |					(c << 1) |	(b << 0)
+								 : (deUint32)-1;
+
+				dst[weightNdx] = (((weightGrid[weightNdx].tq*C + B) ^ A) >> 2) | (A & 0x20);
+			}
+		}
+	}
+	else
+	{
+		DE_ASSERT(iseParams.mode == ISEMODE_PLAIN_BIT);
+
+		for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
+			dst[weightNdx] = bitReplicationScale(weightGrid[weightNdx].v, iseParams.numBits, 6);
+	}
+
+	for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
+		dst[weightNdx] += dst[weightNdx] > 32 ? 1 : 0;
+
+	// Initialize nonexistent weights to poison values
+	for (int weightNdx = numWeights; weightNdx < 64; weightNdx++)
+		dst[weightNdx] = ~0u;
+
+}
+
+void interpolateWeights (TexelWeightPair* dst, const deUint32* unquantizedWeights, int blockWidth, int blockHeight, const ASTCBlockMode& blockMode)
+{
+	const int		numWeightsPerTexel	= blockMode.isDualPlane ? 2 : 1;
+	const deUint32	scaleX				= (1024 + blockWidth/2) / (blockWidth-1);
+	const deUint32	scaleY				= (1024 + blockHeight/2) / (blockHeight-1);
+
+	for (int texelY = 0; texelY < blockHeight; texelY++)
+	{
+		for (int texelX = 0; texelX < blockWidth; texelX++)
+		{
+			const deUint32 gX	= (scaleX*texelX*(blockMode.weightGridWidth-1) + 32) >> 6;
+			const deUint32 gY	= (scaleY*texelY*(blockMode.weightGridHeight-1) + 32) >> 6;
+			const deUint32 jX	= gX >> 4;
+			const deUint32 jY	= gY >> 4;
+			const deUint32 fX	= gX & 0xf;
+			const deUint32 fY	= gY & 0xf;
+			const deUint32 w11	= (fX*fY + 8) >> 4;
+			const deUint32 w10	= fY - w11;
+			const deUint32 w01	= fX - w11;
+			const deUint32 w00	= 16 - fX - fY + w11;
+			const deUint32 v0	= jY*blockMode.weightGridWidth + jX;
+
+			for (int texelWeightNdx = 0; texelWeightNdx < numWeightsPerTexel; texelWeightNdx++)
+			{
+				const deUint32 p00	= unquantizedWeights[(v0)									* numWeightsPerTexel + texelWeightNdx];
+				const deUint32 p01	= unquantizedWeights[(v0 + 1)								* numWeightsPerTexel + texelWeightNdx];
+				const deUint32 p10	= unquantizedWeights[(v0 + blockMode.weightGridWidth)		* numWeightsPerTexel + texelWeightNdx];
+				const deUint32 p11	= unquantizedWeights[(v0 + blockMode.weightGridWidth + 1)	* numWeightsPerTexel + texelWeightNdx];
+
+				dst[texelY*blockWidth + texelX].w[texelWeightNdx] = (p00*w00 + p01*w01 + p10*w10 + p11*w11 + 8) >> 4;
+			}
+		}
+	}
+}
+
+void computeTexelWeights (TexelWeightPair* dst, const Block128& blockData, int blockWidth, int blockHeight, const ASTCBlockMode& blockMode)
+{
+	ISEDecodedResult weightGrid[64];
+
+	{
+		BitAccessStream dataStream(blockData, 127, computeNumRequiredBits(blockMode.weightISEParams, computeNumWeights(blockMode)), false);
+		decodeISE(&weightGrid[0], computeNumWeights(blockMode), dataStream, blockMode.weightISEParams);
+	}
+
+	{
+		deUint32 unquantizedWeights[64];
+		unquantizeWeights(&unquantizedWeights[0], &weightGrid[0], blockMode);
+		interpolateWeights(dst, &unquantizedWeights[0], blockWidth, blockHeight, blockMode);
+	}
+}
+
+inline deUint32 hash52 (deUint32 v)
+{
+	deUint32 p = v;
+	p ^= p >> 15;	p -= p << 17;	p += p << 7;	p += p << 4;
+	p ^= p >>  5;	p += p << 16;	p ^= p >> 7;	p ^= p >> 3;
+	p ^= p <<  6;	p ^= p >> 17;
+	return p;
+}
+
+int computeTexelPartition (deUint32 seedIn, deUint32 xIn, deUint32 yIn, deUint32 zIn, int numPartitions, bool smallBlock)
+{
+	DE_ASSERT(zIn == 0);
+	const deUint32	x		= smallBlock ? xIn << 1 : xIn;
+	const deUint32	y		= smallBlock ? yIn << 1 : yIn;
+	const deUint32	z		= smallBlock ? zIn << 1 : zIn;
+	const deUint32	seed	= seedIn + 1024*(numPartitions-1);
+	const deUint32	rnum	= hash52(seed);
+	deUint8			seed1	= (deUint8)( rnum							& 0xf);
+	deUint8			seed2	= (deUint8)((rnum >>  4)					& 0xf);
+	deUint8			seed3	= (deUint8)((rnum >>  8)					& 0xf);
+	deUint8			seed4	= (deUint8)((rnum >> 12)					& 0xf);
+	deUint8			seed5	= (deUint8)((rnum >> 16)					& 0xf);
+	deUint8			seed6	= (deUint8)((rnum >> 20)					& 0xf);
+	deUint8			seed7	= (deUint8)((rnum >> 24)					& 0xf);
+	deUint8			seed8	= (deUint8)((rnum >> 28)					& 0xf);
+	deUint8			seed9	= (deUint8)((rnum >> 18)					& 0xf);
+	deUint8			seed10	= (deUint8)((rnum >> 22)					& 0xf);
+	deUint8			seed11	= (deUint8)((rnum >> 26)					& 0xf);
+	deUint8			seed12	= (deUint8)(((rnum >> 30) | (rnum << 2))	& 0xf);
+
+	seed1  = (deUint8)(seed1  * seed1 );
+	seed2  = (deUint8)(seed2  * seed2 );
+	seed3  = (deUint8)(seed3  * seed3 );
+	seed4  = (deUint8)(seed4  * seed4 );
+	seed5  = (deUint8)(seed5  * seed5 );
+	seed6  = (deUint8)(seed6  * seed6 );
+	seed7  = (deUint8)(seed7  * seed7 );
+	seed8  = (deUint8)(seed8  * seed8 );
+	seed9  = (deUint8)(seed9  * seed9 );
+	seed10 = (deUint8)(seed10 * seed10);
+	seed11 = (deUint8)(seed11 * seed11);
+	seed12 = (deUint8)(seed12 * seed12);
+
+	const int shA = (seed & 2) != 0		? 4		: 5;
+	const int shB = numPartitions == 3	? 6		: 5;
+	const int sh1 = (seed & 1) != 0		? shA	: shB;
+	const int sh2 = (seed & 1) != 0		? shB	: shA;
+	const int sh3 = (seed & 0x10) != 0	? sh1	: sh2;
+
+	seed1  = (deUint8)(seed1  >> sh1);
+	seed2  = (deUint8)(seed2  >> sh2);
+	seed3  = (deUint8)(seed3  >> sh1);
+	seed4  = (deUint8)(seed4  >> sh2);
+	seed5  = (deUint8)(seed5  >> sh1);
+	seed6  = (deUint8)(seed6  >> sh2);
+	seed7  = (deUint8)(seed7  >> sh1);
+	seed8  = (deUint8)(seed8  >> sh2);
+	seed9  = (deUint8)(seed9  >> sh3);
+	seed10 = (deUint8)(seed10 >> sh3);
+	seed11 = (deUint8)(seed11 >> sh3);
+	seed12 = (deUint8)(seed12 >> sh3);
+
+	const int a =						0x3f & (seed1*x + seed2*y + seed11*z + (rnum >> 14));
+	const int b =						0x3f & (seed3*x + seed4*y + seed12*z + (rnum >> 10));
+	const int c = numPartitions >= 3 ?	0x3f & (seed5*x + seed6*y + seed9*z  + (rnum >>  6))	: 0;
+	const int d = numPartitions >= 4 ?	0x3f & (seed7*x + seed8*y + seed10*z + (rnum >>  2))	: 0;
+
+	return a >= b && a >= c && a >= d	? 0
+		 : b >= c && b >= d				? 1
+		 : c >= d						? 2
+		 :								  3;
+}
+
+DecompressResult setTexelColors (void* dst, ColorEndpointPair* colorEndpoints, TexelWeightPair* texelWeights, int ccs, deUint32 partitionIndexSeed,
+								 int numPartitions, int blockWidth, int blockHeight, bool isSRGB, bool isLDRMode, const deUint32* colorEndpointModes)
+{
+	const bool			smallBlock	= blockWidth*blockHeight < 31;
+	DecompressResult	result		= DECOMPRESS_RESULT_VALID_BLOCK;
+	bool				isHDREndpoint[4];
+
+	for (int i = 0; i < numPartitions; i++)
+		isHDREndpoint[i] = isColorEndpointModeHDR(colorEndpointModes[i]);
+
+	for (int texelY = 0; texelY < blockHeight; texelY++)
+	for (int texelX = 0; texelX < blockWidth; texelX++)
+	{
+		const int				texelNdx			= texelY*blockWidth + texelX;
+		const int				colorEndpointNdx	= numPartitions == 1 ? 0 : computeTexelPartition(partitionIndexSeed, texelX, texelY, 0, numPartitions, smallBlock);
+		DE_ASSERT(colorEndpointNdx < numPartitions);
+		const UVec4&			e0					= colorEndpoints[colorEndpointNdx].e0;
+		const UVec4&			e1					= colorEndpoints[colorEndpointNdx].e1;
+		const TexelWeightPair&	weight				= texelWeights[texelNdx];
+
+		if (isLDRMode && isHDREndpoint[colorEndpointNdx])
+		{
+			if (isSRGB)
+			{
+				((deUint8*)dst)[texelNdx*4 + 0] = 0xff;
+				((deUint8*)dst)[texelNdx*4 + 1] = 0;
+				((deUint8*)dst)[texelNdx*4 + 2] = 0xff;
+				((deUint8*)dst)[texelNdx*4 + 3] = 0xff;
+			}
+			else
+			{
+				((float*)dst)[texelNdx*4 + 0] = 1.0f;
+				((float*)dst)[texelNdx*4 + 1] = 0;
+				((float*)dst)[texelNdx*4 + 2] = 1.0f;
+				((float*)dst)[texelNdx*4 + 3] = 1.0f;
+			}
+
+			result = DECOMPRESS_RESULT_ERROR;
+		}
+		else
+		{
+			for (int channelNdx = 0; channelNdx < 4; channelNdx++)
+			{
+				if (!isHDREndpoint[colorEndpointNdx] || (channelNdx == 3 && colorEndpointModes[colorEndpointNdx] == 14)) // \note Alpha for mode 14 is treated the same as LDR.
+				{
+					const deUint32 c0	= (e0[channelNdx] << 8) | (isSRGB ? 0x80 : e0[channelNdx]);
+					const deUint32 c1	= (e1[channelNdx] << 8) | (isSRGB ? 0x80 : e1[channelNdx]);
+					const deUint32 w	= weight.w[ccs == channelNdx ? 1 : 0];
+					const deUint32 c	= (c0*(64-w) + c1*w + 32) / 64;
+
+					if (isSRGB)
+						((deUint8*)dst)[texelNdx*4 + channelNdx] = (deUint8)((c & 0xff00) >> 8);
+					else
+						((float*)dst)[texelNdx*4 + channelNdx] = c == 65535 ? 1.0f : (float)c / 65536.0f;
+				}
+				else
+				{
+					DE_STATIC_ASSERT((de::meta::TypesSame<deFloat16, deUint16>::Value));
+					const deUint32		c0	= e0[channelNdx] << 4;
+					const deUint32		c1	= e1[channelNdx] << 4;
+					const deUint32		w	= weight.w[ccs == channelNdx ? 1 : 0];
+					const deUint32		c	= (c0*(64-w) + c1*w + 32) / 64;
+					const deUint32		e	= getBits(c, 11, 15);
+					const deUint32		m	= getBits(c, 0, 10);
+					const deUint32		mt	= m < 512		? 3*m
+											: m >= 1536		? 5*m - 2048
+											:				  4*m - 512;
+					const deFloat16		cf	= (deFloat16)((e << 10) + (mt >> 3));
+
+					((float*)dst)[texelNdx*4 + channelNdx] = deFloat16To32(isFloat16InfOrNan(cf) ? 0x7bff : cf);
+				}
+			}
+		}
+	}
+
+	return result;
+}
+
+DecompressResult decompressBlock (void* dst, const Block128& blockData, int blockWidth, int blockHeight, bool isSRGB, bool isLDR)
+{
+	DE_ASSERT(isLDR || !isSRGB);
+
+	// Decode block mode.
+
+	const ASTCBlockMode blockMode = getASTCBlockMode(blockData.getBits(0, 10));
+
+	// Check for block mode errors.
+
+	if (blockMode.isError)
+	{
+		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
+		return DECOMPRESS_RESULT_ERROR;
+	}
+
+	// Separate path for void-extent.
+
+	if (blockMode.isVoidExtent)
+		return decodeVoidExtentBlock(dst, blockData, blockWidth, blockHeight, isSRGB, isLDR);
+
+	// Compute weight grid values.
+
+	const int numWeights			= computeNumWeights(blockMode);
+	const int numWeightDataBits		= computeNumRequiredBits(blockMode.weightISEParams, numWeights);
+	const int numPartitions			= (int)blockData.getBits(11, 12) + 1;
+
+	// Check for errors in weight grid, partition and dual-plane parameters.
+
+	if (numWeights > 64								||
+		numWeightDataBits > 96						||
+		numWeightDataBits < 24						||
+		blockMode.weightGridWidth > blockWidth		||
+		blockMode.weightGridHeight > blockHeight	||
+		(numPartitions == 4 && blockMode.isDualPlane))
+	{
+		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
+		return DECOMPRESS_RESULT_ERROR;
+	}
+
+	// Compute number of bits available for color endpoint data.
+
+	const bool	isSingleUniqueCem			= numPartitions == 1 || blockData.getBits(23, 24) == 0;
+	const int	numConfigDataBits			= (numPartitions == 1 ? 17 : isSingleUniqueCem ? 29 : 25 + 3*numPartitions) +
+											  (blockMode.isDualPlane ? 2 : 0);
+	const int	numBitsForColorEndpoints	= 128 - numWeightDataBits - numConfigDataBits;
+	const int	extraCemBitsStart			= 127 - numWeightDataBits - (isSingleUniqueCem		? -1
+																		: numPartitions == 4	? 7
+																		: numPartitions == 3	? 4
+																		: numPartitions == 2	? 1
+																		: 0);
+	// Decode color endpoint modes.
+
+	deUint32 colorEndpointModes[4];
+	decodeColorEndpointModes(&colorEndpointModes[0], blockData, numPartitions, extraCemBitsStart);
+
+	const int numColorEndpointValues = computeNumColorEndpointValues(colorEndpointModes, numPartitions);
+
+	// Check for errors in color endpoint value count.
+
+	if (numColorEndpointValues > 18 || numBitsForColorEndpoints < deDivRoundUp32(13*numColorEndpointValues, 5))
+	{
+		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
+		return DECOMPRESS_RESULT_ERROR;
+	}
+
+	// Compute color endpoints.
+
+	ColorEndpointPair colorEndpoints[4];
+	computeColorEndpoints(&colorEndpoints[0], blockData, &colorEndpointModes[0], numPartitions, numColorEndpointValues,
+						  computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues), numBitsForColorEndpoints);
+
+	// Compute texel weights.
+
+	TexelWeightPair texelWeights[MAX_BLOCK_WIDTH*MAX_BLOCK_HEIGHT];
+	computeTexelWeights(&texelWeights[0], blockData, blockWidth, blockHeight, blockMode);
+
+	// Set texel colors.
+
+	const int		ccs						= blockMode.isDualPlane ? (int)blockData.getBits(extraCemBitsStart-2, extraCemBitsStart-1) : -1;
+	const deUint32	partitionIndexSeed		= numPartitions > 1 ? blockData.getBits(13, 22) : (deUint32)-1;
+
+	return setTexelColors(dst, &colorEndpoints[0], &texelWeights[0], ccs, partitionIndexSeed, numPartitions, blockWidth, blockHeight, isSRGB, isLDR, &colorEndpointModes[0]);
+}
+
+void decompress (const PixelBufferAccess& dst, const deUint8* data, bool isSRGB, bool isLDR)
+{
+	DE_ASSERT(isLDR || !isSRGB);
+
+	const int blockWidth = dst.getWidth();
+	const int blockHeight = dst.getHeight();
+
+	union
+	{
+		deUint8		sRGB[MAX_BLOCK_WIDTH*MAX_BLOCK_HEIGHT*4];
+		float		linear[MAX_BLOCK_WIDTH*MAX_BLOCK_HEIGHT*4];
+	} decompressedBuffer;
+
+	const Block128 blockData(data);
+	decompressBlock(isSRGB ? (void*)&decompressedBuffer.sRGB[0] : (void*)&decompressedBuffer.linear[0],
+					blockData, dst.getWidth(), dst.getHeight(), isSRGB, isLDR);
+
+	if (isSRGB)
+	{
+		for (int i = 0; i < blockHeight; i++)
+		for (int j = 0; j < blockWidth; j++)
+		{
+			dst.setPixel(IVec4(decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 0],
+							   decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 1],
+							   decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 2],
+							   decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 3]), j, i);
+		}
+	}
+	else
+	{
+		for (int i = 0; i < blockHeight; i++)
+		for (int j = 0; j < blockWidth; j++)
+		{
+			dst.setPixel(Vec4(decompressedBuffer.linear[(i*blockWidth + j) * 4 + 0],
+							  decompressedBuffer.linear[(i*blockWidth + j) * 4 + 1],
+							  decompressedBuffer.linear[(i*blockWidth + j) * 4 + 2],
+							  decompressedBuffer.linear[(i*blockWidth + j) * 4 + 3]), j, i);
+		}
+	}
+}
+
+// Helper class for setting bits in a 128-bit block.
+class AssignBlock128
+{
+private:
+	typedef deUint64 Word;
+
+	enum
+	{
+		WORD_BYTES	= sizeof(Word),
+		WORD_BITS	= 8*WORD_BYTES,
+		NUM_WORDS	= 128 / WORD_BITS
+	};
+
+	DE_STATIC_ASSERT(128 % WORD_BITS == 0);
+
+public:
+	AssignBlock128 (void)
+	{
+		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
+			m_words[wordNdx] = 0;
+	}
+
+	void setBit (int ndx, deUint32 val)
+	{
+		DE_ASSERT(de::inBounds(ndx, 0, 128));
+		DE_ASSERT((val & 1) == val);
+		const int wordNdx	= ndx / WORD_BITS;
+		const int bitNdx	= ndx % WORD_BITS;
+		m_words[wordNdx] = (m_words[wordNdx] & ~((Word)1 << bitNdx)) | ((Word)val << bitNdx);
+	}
+
+	void setBits (int low, int high, deUint32 bits)
+	{
+		DE_ASSERT(de::inBounds(low, 0, 128));
+		DE_ASSERT(de::inBounds(high, 0, 128));
+		DE_ASSERT(de::inRange(high-low+1, 0, 32));
+		DE_ASSERT((bits & (((Word)1 << (high-low+1)) - 1)) == bits);
+
+		if (high-low+1 == 0)
+			return;
+
+		const int word0Ndx		= low / WORD_BITS;
+		const int word1Ndx		= high / WORD_BITS;
+		const int lowNdxInW0	= low % WORD_BITS;
+
+		if (word0Ndx == word1Ndx)
+			m_words[word0Ndx] = (m_words[word0Ndx] & ~((((Word)1 << (high-low+1)) - 1) << lowNdxInW0)) | ((Word)bits << lowNdxInW0);
+		else
+		{
+			DE_ASSERT(word1Ndx == word0Ndx + 1);
+
+			const int	highNdxInW1			= high % WORD_BITS;
+			const int	numBitsToSetInW0	= WORD_BITS - lowNdxInW0;
+			const Word	bitsLowMask			= ((Word)1 << numBitsToSetInW0) - 1;
+
+			m_words[word0Ndx] = (m_words[word0Ndx] & (((Word)1 << lowNdxInW0) - 1))			| (((Word)bits & bitsLowMask) << lowNdxInW0);
+			m_words[word1Ndx] = (m_words[word1Ndx] & ~(((Word)1 << (highNdxInW1+1)) - 1))	| (((Word)bits & ~bitsLowMask) >> numBitsToSetInW0);
+		}
+	}
+
+	void assignToMemory (deUint8* dst) const
+	{
+		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
+		{
+			for (int byteNdx = 0; byteNdx < WORD_BYTES; byteNdx++)
+				dst[wordNdx*WORD_BYTES + byteNdx] = (deUint8)((m_words[wordNdx] >> (8*byteNdx)) & 0xff);
+		}
+	}
+
+	void pushBytesToVector (vector<deUint8>& dst) const
+	{
+		const int assignStartIndex = (int)dst.size();
+		dst.resize(dst.size() + BLOCK_SIZE_BYTES);
+		assignToMemory(&dst[assignStartIndex]);
+	}
+
+private:
+	Word m_words[NUM_WORDS];
+};
+
+// A helper for sequential access into a AssignBlock128.
+class BitAssignAccessStream
+{
+public:
+	BitAssignAccessStream (AssignBlock128& dst, int startNdxInSrc, int length, bool forward)
+		: m_dst				(dst)
+		, m_startNdxInSrc	(startNdxInSrc)
+		, m_length			(length)
+		, m_forward			(forward)
+		, m_ndx				(0)
+	{
+	}
+
+	// Set the next num bits. Bits at positions greater than or equal to m_length are not touched.
+	void setNext (int num, deUint32 bits)
+	{
+		DE_ASSERT((bits & (((deUint64)1 << num) - 1)) == bits);
+
+		if (num == 0 || m_ndx >= m_length)
+			return;
+
+		const int		end				= m_ndx + num;
+		const int		numBitsToDst	= de::max(0, de::min(m_length, end) - m_ndx);
+		const int		low				= m_ndx;
+		const int		high			= m_ndx + numBitsToDst - 1;
+		const deUint32	actualBits		= getBits(bits, 0, numBitsToDst-1);
+
+		m_ndx += num;
+
+		return m_forward ? m_dst.setBits(m_startNdxInSrc + low,  m_startNdxInSrc + high, actualBits)
+						 : m_dst.setBits(m_startNdxInSrc - high, m_startNdxInSrc - low, reverseBits(actualBits, numBitsToDst));
+	}
+
+private:
+	AssignBlock128&		m_dst;
+	const int			m_startNdxInSrc;
+	const int			m_length;
+	const bool			m_forward;
+
+	int					m_ndx;
+};
+
+struct VoidExtentParams
+{
+	DE_STATIC_ASSERT((de::meta::TypesSame<deFloat16, deUint16>::Value));
+	bool		isHDR;
+	deUint16	r;
+	deUint16	g;
+	deUint16	b;
+	deUint16	a;
+	// \note Currently extent coordinates are all set to all-ones.
+
+	VoidExtentParams (bool isHDR_, deUint16 r_, deUint16 g_, deUint16 b_, deUint16 a_) : isHDR(isHDR_), r(r_), g(g_), b(b_), a(a_) {}
+};
+
+static AssignBlock128 generateVoidExtentBlock (const VoidExtentParams& params)
+{
+	AssignBlock128 block;
+
+	block.setBits(0, 8, 0x1fc); // \note Marks void-extent block.
+	block.setBit(9, params.isHDR);
+	block.setBits(10, 11, 3); // \note Spec shows that these bits are both set, although they serve no purpose.
+
+	// Extent coordinates - currently all-ones.
+	block.setBits(12, 24, 0x1fff);
+	block.setBits(25, 37, 0x1fff);
+	block.setBits(38, 50, 0x1fff);
+	block.setBits(51, 63, 0x1fff);
+
+	DE_ASSERT(!params.isHDR || (!isFloat16InfOrNan(params.r) &&
+								!isFloat16InfOrNan(params.g) &&
+								!isFloat16InfOrNan(params.b) &&
+								!isFloat16InfOrNan(params.a)));
+
+	block.setBits(64,  79,  params.r);
+	block.setBits(80,  95,  params.g);
+	block.setBits(96,  111, params.b);
+	block.setBits(112, 127, params.a);
+
+	return block;
+}
+
+// An input array of ISE inputs for an entire ASTC block. Can be given as either single values in the
+// range [0, maximumValueOfISERange] or as explicit block value specifications. The latter is needed
+// so we can test all possible values of T and Q in a block, since multiple T or Q values may map
+// to the same set of decoded values.
+struct ISEInput
+{
+	struct Block
+	{
+		deUint32 tOrQValue; //!< The 8-bit T or 7-bit Q in a trit or quint ISE block.
+		deUint32 bitValues[5];
+	};
+
+	bool isGivenInBlockForm;
+	union
+	{
+		//!< \note 64 comes from the maximum number of weight values in an ASTC block.
+		deUint32	plain[64];
+		Block		block[64];
+	} value;
+
+	ISEInput (void)
+		: isGivenInBlockForm (false)
+	{
+	}
+};
+
+static inline deUint32 computeISERangeMax (const ISEParams& iseParams)
+{
+	switch (iseParams.mode)
+	{
+		case ISEMODE_TRIT:			return (1u << iseParams.numBits) * 3 - 1;
+		case ISEMODE_QUINT:			return (1u << iseParams.numBits) * 5 - 1;
+		case ISEMODE_PLAIN_BIT:		return (1u << iseParams.numBits)     - 1;
+		default:
+			DE_ASSERT(false);
+			return -1;
+	}
+}
+
+struct NormalBlockParams
+{
+	int					weightGridWidth;
+	int					weightGridHeight;
+	ISEParams			weightISEParams;
+	bool				isDualPlane;
+	deUint32			ccs; //! \note Irrelevant if !isDualPlane.
+	int					numPartitions;
+	deUint32			colorEndpointModes[4];
+	// \note Below members are irrelevant if numPartitions == 1.
+	bool				isMultiPartSingleCemMode; //! \note If true, the single CEM is at colorEndpointModes[0].
+	deUint32			partitionSeed;
+
+	NormalBlockParams (void)
+		: weightGridWidth			(-1)
+		, weightGridHeight			(-1)
+		, weightISEParams			(ISEMODE_LAST, -1)
+		, isDualPlane				(true)
+		, ccs						((deUint32)-1)
+		, numPartitions				(-1)
+		, isMultiPartSingleCemMode	(false)
+		, partitionSeed				((deUint32)-1)
+	{
+		colorEndpointModes[0] = 0;
+		colorEndpointModes[1] = 0;
+		colorEndpointModes[2] = 0;
+		colorEndpointModes[3] = 0;
+	}
+};
+
+struct NormalBlockISEInputs
+{
+	ISEInput weight;
+	ISEInput endpoint;
+
+	NormalBlockISEInputs (void)
+		: weight	()
+		, endpoint	()
+	{
+	}
+};
+
+static inline int computeNumWeights (const NormalBlockParams& params)
+{
+	return params.weightGridWidth * params.weightGridHeight * (params.isDualPlane ? 2 : 1);
+}
+
+static inline int computeNumBitsForColorEndpoints (const NormalBlockParams& params)
+{
+	const int numWeightBits			= computeNumRequiredBits(params.weightISEParams, computeNumWeights(params));
+	const int numConfigDataBits		= (params.numPartitions == 1 ? 17 : params.isMultiPartSingleCemMode ? 29 : 25 + 3*params.numPartitions) +
+									  (params.isDualPlane ? 2 : 0);
+
+	return 128 - numWeightBits - numConfigDataBits;
+}
+
+static inline int computeNumColorEndpointValues (const deUint32* endpointModes, int numPartitions, bool isMultiPartSingleCemMode)
+{
+	if (isMultiPartSingleCemMode)
+		return numPartitions * computeNumColorEndpointValues(endpointModes[0]);
+	else
+	{
+		int result = 0;
+		for (int i = 0; i < numPartitions; i++)
+			result += computeNumColorEndpointValues(endpointModes[i]);
+		return result;
+	}
+}
+
+static inline bool isValidBlockParams (const NormalBlockParams& params, int blockWidth, int blockHeight)
+{
+	const int numWeights				= computeNumWeights(params);
+	const int numWeightBits				= computeNumRequiredBits(params.weightISEParams, numWeights);
+	const int numColorEndpointValues	= computeNumColorEndpointValues(&params.colorEndpointModes[0], params.numPartitions, params.isMultiPartSingleCemMode);
+	const int numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(params);
+
+	return numWeights <= 64										&&
+		   de::inRange(numWeightBits, 24, 96)					&&
+		   params.weightGridWidth <= blockWidth					&&
+		   params.weightGridHeight <= blockHeight				&&
+		   !(params.numPartitions == 4 && params.isDualPlane)	&&
+		   numColorEndpointValues <= 18							&&
+		   numBitsForColorEndpoints >= deDivRoundUp32(13*numColorEndpointValues, 5);
+}
+
+// Write bits 0 to 10 of an ASTC block.
+static void writeBlockMode (AssignBlock128& dst, const NormalBlockParams& blockParams)
+{
+	const deUint32	d = blockParams.isDualPlane != 0;
+	// r and h initialized in switch below.
+	deUint32		r;
+	deUint32		h;
+	// a, b and blockModeLayoutNdx initialized in block mode layout index detecting loop below.
+	deUint32		a = (deUint32)-1;
+	deUint32		b = (deUint32)-1;
+	int				blockModeLayoutNdx;
+
+	// Find the values of r and h (ISE range).
+	switch (computeISERangeMax(blockParams.weightISEParams))
+	{
+		case 1:		r = 2; h = 0;	break;
+		case 2:		r = 3; h = 0;	break;
+		case 3:		r = 4; h = 0;	break;
+		case 4:		r = 5; h = 0;	break;
+		case 5:		r = 6; h = 0;	break;
+		case 7:		r = 7; h = 0;	break;
+
+		case 9:		r = 2; h = 1;	break;
+		case 11:	r = 3; h = 1;	break;
+		case 15:	r = 4; h = 1;	break;
+		case 19:	r = 5; h = 1;	break;
+		case 23:	r = 6; h = 1;	break;
+		case 31:	r = 7; h = 1;	break;
+
+		default:
+			DE_ASSERT(false);
+			r = (deUint32)-1;
+			h = (deUint32)-1;
+	}
+
+	// Find block mode layout index, i.e. appropriate row in the "2d block mode layout" table in ASTC spec.
+
+	{
+		enum BlockModeLayoutABVariable { Z=0, A=1, B=2 };
+
+		static const struct BlockModeLayout
+		{
+			int							aNumBits;
+			int							bNumBits;
+			BlockModeLayoutABVariable	gridWidthVariableTerm;
+			int							gridWidthConstantTerm;
+			BlockModeLayoutABVariable	gridHeightVariableTerm;
+			int							gridHeightConstantTerm;
+		} blockModeLayouts[] =
+		{
+			{ 2, 2,   B,  4,   A,  2},
+			{ 2, 2,   B,  8,   A,  2},
+			{ 2, 2,   A,  2,   B,  8},
+			{ 2, 1,   A,  2,   B,  6},
+			{ 2, 1,   B,  2,   A,  2},
+			{ 2, 0,   Z, 12,   A,  2},
+			{ 2, 0,   A,  2,   Z, 12},
+			{ 0, 0,   Z,  6,   Z, 10},
+			{ 0, 0,   Z, 10,   Z,  6},
+			{ 2, 2,   A,  6,   B,  6}
+		};
+
+		for (blockModeLayoutNdx = 0; blockModeLayoutNdx < DE_LENGTH_OF_ARRAY(blockModeLayouts); blockModeLayoutNdx++)
+		{
+			const BlockModeLayout&	layout					= blockModeLayouts[blockModeLayoutNdx];
+			const int				aMax					= (1 << layout.aNumBits) - 1;
+			const int				bMax					= (1 << layout.bNumBits) - 1;
+			const int				variableOffsetsMax[3]	= { 0, aMax, bMax };
+			const int				widthMin				= layout.gridWidthConstantTerm;
+			const int				heightMin				= layout.gridHeightConstantTerm;
+			const int				widthMax				= widthMin  + variableOffsetsMax[layout.gridWidthVariableTerm];
+			const int				heightMax				= heightMin + variableOffsetsMax[layout.gridHeightVariableTerm];
+
+			DE_ASSERT(layout.gridWidthVariableTerm != layout.gridHeightVariableTerm || layout.gridWidthVariableTerm == Z);
+
+			if (de::inRange(blockParams.weightGridWidth, widthMin, widthMax) &&
+				de::inRange(blockParams.weightGridHeight, heightMin, heightMax))
+			{
+				deUint32	dummy			= 0;
+				deUint32&	widthVariable	= layout.gridWidthVariableTerm == A  ? a : layout.gridWidthVariableTerm == B  ? b : dummy;
+				deUint32&	heightVariable	= layout.gridHeightVariableTerm == A ? a : layout.gridHeightVariableTerm == B ? b : dummy;
+
+				widthVariable	= blockParams.weightGridWidth  - layout.gridWidthConstantTerm;
+				heightVariable	= blockParams.weightGridHeight - layout.gridHeightConstantTerm;
+
+				break;
+			}
+		}
+	}
+
+	// Set block mode bits.
+
+	const deUint32 a0 = getBit(a, 0);
+	const deUint32 a1 = getBit(a, 1);
+	const deUint32 b0 = getBit(b, 0);
+	const deUint32 b1 = getBit(b, 1);
+	const deUint32 r0 = getBit(r, 0);
+	const deUint32 r1 = getBit(r, 1);
+	const deUint32 r2 = getBit(r, 2);
+
+#define SB(NDX, VAL) dst.setBit((NDX), (VAL))
+#define ASSIGN_BITS(B10, B9, B8, B7, B6, B5, B4, B3, B2, B1, B0) do { SB(10,(B10)); SB(9,(B9)); SB(8,(B8)); SB(7,(B7)); SB(6,(B6)); SB(5,(B5)); SB(4,(B4)); SB(3,(B3)); SB(2,(B2)); SB(1,(B1)); SB(0,(B0)); } while (false)
+
+	switch (blockModeLayoutNdx)
+	{
+		case 0: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 0,  0,  r2, r1);									break;
+		case 1: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 0,  1,  r2, r1);									break;
+		case 2: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 1,  0,  r2, r1);									break;
+		case 3: ASSIGN_BITS(d,  h,   0,  b, a1, a0, r0, 1,  1,  r2, r1);									break;
+		case 4: ASSIGN_BITS(d,  h,   1,  b, a1, a0, r0, 1,  1,  r2, r1);									break;
+		case 5: ASSIGN_BITS(d,  h,   0,  0, a1, a0, r0, r2, r1,  0,  0);									break;
+		case 6: ASSIGN_BITS(d,  h,   0,  1, a1, a0, r0, r2, r1,  0,  0);									break;
+		case 7: ASSIGN_BITS(d,  h,   1,  1,  0,  0, r0, r2, r1,  0,  0);									break;
+		case 8: ASSIGN_BITS(d,  h,   1,  1,  0,  1, r0, r2, r1,  0,  0);									break;
+		case 9: ASSIGN_BITS(b1, b0,  1,  0, a1, a0, r0, r2, r1,  0,  0); DE_ASSERT(d == 0 && h == 0);		break;
+		default:
+			DE_ASSERT(false);
+	}
+
+#undef ASSIGN_BITS
+#undef SB
+}
+
+// Write color endpoint mode data of an ASTC block.
+static void writeColorEndpointModes (AssignBlock128& dst, const deUint32* colorEndpointModes, bool isMultiPartSingleCemMode, int numPartitions, int extraCemBitsStart)
+{
+	if (numPartitions == 1)
+		dst.setBits(13, 16, colorEndpointModes[0]);
+	else
+	{
+		if (isMultiPartSingleCemMode)
+		{
+			dst.setBits(23, 24, 0);
+			dst.setBits(25, 28, colorEndpointModes[0]);
+		}
+		else
+		{
+			DE_ASSERT(numPartitions > 0);
+			const deUint32 minCem				= *std::min_element(&colorEndpointModes[0], &colorEndpointModes[numPartitions]);
+			const deUint32 maxCem				= *std::max_element(&colorEndpointModes[0], &colorEndpointModes[numPartitions]);
+			const deUint32 minCemClass			= minCem/4;
+			const deUint32 maxCemClass			= maxCem/4;
+			DE_ASSERT(maxCemClass - minCemClass <= 1);
+			DE_UNREF(minCemClass); // \note For non-debug builds.
+			const deUint32 highLevelSelector	= de::max(1u, maxCemClass);
+
+			dst.setBits(23, 24, highLevelSelector);
+
+			for (int partNdx = 0; partNdx < numPartitions; partNdx++)
+			{
+				const deUint32 c			= colorEndpointModes[partNdx] / 4 == highLevelSelector ? 1 : 0;
+				const deUint32 m			= colorEndpointModes[partNdx] % 4;
+				const deUint32 lowMBit0Ndx	= numPartitions + 2*partNdx;
+				const deUint32 lowMBit1Ndx	= numPartitions + 2*partNdx + 1;
+				dst.setBit(25 + partNdx, c);
+				dst.setBit(lowMBit0Ndx < 4 ? 25+lowMBit0Ndx : extraCemBitsStart+lowMBit0Ndx-4, getBit(m, 0));
+				dst.setBit(lowMBit1Ndx < 4 ? 25+lowMBit1Ndx : extraCemBitsStart+lowMBit1Ndx-4, getBit(m, 1));
+			}
+		}
+	}
+}
+
+static void encodeISETritBlock (BitAssignAccessStream& dst, int numBits, bool fromExplicitInputBlock, const ISEInput::Block& blockInput, const deUint32* nonBlockInput, int numValues)
+{
+	// tritBlockTValue[t0][t1][t2][t3][t4] is a value of T (not necessarily the only one) that will yield the given trits when decoded.
+	static const deUint32 tritBlockTValue[3][3][3][3][3] =
+	{
+		{
+			{{{0, 128, 96}, {32, 160, 224}, {64, 192, 28}}, {{16, 144, 112}, {48, 176, 240}, {80, 208, 156}}, {{3, 131, 99}, {35, 163, 227}, {67, 195, 31}}},
+			{{{4, 132, 100}, {36, 164, 228}, {68, 196, 60}}, {{20, 148, 116}, {52, 180, 244}, {84, 212, 188}}, {{19, 147, 115}, {51, 179, 243}, {83, 211, 159}}},
+			{{{8, 136, 104}, {40, 168, 232}, {72, 200, 92}}, {{24, 152, 120}, {56, 184, 248}, {88, 216, 220}}, {{12, 140, 108}, {44, 172, 236}, {76, 204, 124}}}
+		},
+		{
+			{{{1, 129, 97}, {33, 161, 225}, {65, 193, 29}}, {{17, 145, 113}, {49, 177, 241}, {81, 209, 157}}, {{7, 135, 103}, {39, 167, 231}, {71, 199, 63}}},
+			{{{5, 133, 101}, {37, 165, 229}, {69, 197, 61}}, {{21, 149, 117}, {53, 181, 245}, {85, 213, 189}}, {{23, 151, 119}, {55, 183, 247}, {87, 215, 191}}},
+			{{{9, 137, 105}, {41, 169, 233}, {73, 201, 93}}, {{25, 153, 121}, {57, 185, 249}, {89, 217, 221}}, {{13, 141, 109}, {45, 173, 237}, {77, 205, 125}}}
+		},
+		{
+			{{{2, 130, 98}, {34, 162, 226}, {66, 194, 30}}, {{18, 146, 114}, {50, 178, 242}, {82, 210, 158}}, {{11, 139, 107}, {43, 171, 235}, {75, 203, 95}}},
+			{{{6, 134, 102}, {38, 166, 230}, {70, 198, 62}}, {{22, 150, 118}, {54, 182, 246}, {86, 214, 190}}, {{27, 155, 123}, {59, 187, 251}, {91, 219, 223}}},
+			{{{10, 138, 106}, {42, 170, 234}, {74, 202, 94}}, {{26, 154, 122}, {58, 186, 250}, {90, 218, 222}}, {{14, 142, 110}, {46, 174, 238}, {78, 206, 126}}}
+		}
+	};
+
+	DE_ASSERT(de::inRange(numValues, 1, 5));
+
+	deUint32 tritParts[5];
+	deUint32 bitParts[5];
+
+	for (int i = 0; i < 5; i++)
+	{
+		if (i < numValues)
+		{
+			if (fromExplicitInputBlock)
+			{
+				bitParts[i]		= blockInput.bitValues[i];
+				tritParts[i]	= -1; // \note Won't be used, but silences warning.
+			}
+			else
+			{
+				// \todo [2016-01-20 pyry] numBits = 0 doesn't make sense
+				bitParts[i]		= numBits > 0 ? getBits(nonBlockInput[i], 0, numBits-1) : 0;
+				tritParts[i]	= nonBlockInput[i] >> numBits;
+			}
+		}
+		else
+		{
+			bitParts[i]		= 0;
+			tritParts[i]	= 0;
+		}
+	}
+
+	const deUint32 T = fromExplicitInputBlock ? blockInput.tOrQValue : tritBlockTValue[tritParts[0]]
+																					  [tritParts[1]]
+																					  [tritParts[2]]
+																					  [tritParts[3]]
+																					  [tritParts[4]];
+
+	dst.setNext(numBits,	bitParts[0]);
+	dst.setNext(2,			getBits(T, 0, 1));
+	dst.setNext(numBits,	bitParts[1]);
+	dst.setNext(2,			getBits(T, 2, 3));
+	dst.setNext(numBits,	bitParts[2]);
+	dst.setNext(1,			getBit(T, 4));
+	dst.setNext(numBits,	bitParts[3]);
+	dst.setNext(2,			getBits(T, 5, 6));
+	dst.setNext(numBits,	bitParts[4]);
+	dst.setNext(1,			getBit(T, 7));
+}
+
+static void encodeISEQuintBlock (BitAssignAccessStream& dst, int numBits, bool fromExplicitInputBlock, const ISEInput::Block& blockInput, const deUint32* nonBlockInput, int numValues)
+{
+	// quintBlockQValue[q0][q1][q2] is a value of Q (not necessarily the only one) that will yield the given quints when decoded.
+	static const deUint32 quintBlockQValue[5][5][5] =
+	{
+		{{0, 32, 64, 96, 102}, {8, 40, 72, 104, 110}, {16, 48, 80, 112, 118}, {24, 56, 88, 120, 126}, {5, 37, 69, 101, 39}},
+		{{1, 33, 65, 97, 103}, {9, 41, 73, 105, 111}, {17, 49, 81, 113, 119}, {25, 57, 89, 121, 127}, {13, 45, 77, 109, 47}},
+		{{2, 34, 66, 98, 70}, {10, 42, 74, 106, 78}, {18, 50, 82, 114, 86}, {26, 58, 90, 122, 94}, {21, 53, 85, 117, 55}},
+		{{3, 35, 67, 99, 71}, {11, 43, 75, 107, 79}, {19, 51, 83, 115, 87}, {27, 59, 91, 123, 95}, {29, 61, 93, 125, 63}},
+		{{4, 36, 68, 100, 38}, {12, 44, 76, 108, 46}, {20, 52, 84, 116, 54}, {28, 60, 92, 124, 62}, {6, 14, 22, 30, 7}}
+	};
+
+	DE_ASSERT(de::inRange(numValues, 1, 3));
+
+	deUint32 quintParts[3];
+	deUint32 bitParts[3];
+
+	for (int i = 0; i < 3; i++)
+	{
+		if (i < numValues)
+		{
+			if (fromExplicitInputBlock)
+			{
+				bitParts[i]		= blockInput.bitValues[i];
+				quintParts[i]	= -1; // \note Won't be used, but silences warning.
+			}
+			else
+			{
+				// \todo [2016-01-20 pyry] numBits = 0 doesn't make sense
+				bitParts[i]		= numBits > 0 ? getBits(nonBlockInput[i], 0, numBits-1) : 0;
+				quintParts[i]	= nonBlockInput[i] >> numBits;
+			}
+		}
+		else
+		{
+			bitParts[i]		= 0;
+			quintParts[i]	= 0;
+		}
+	}
+
+	const deUint32 Q = fromExplicitInputBlock ? blockInput.tOrQValue : quintBlockQValue[quintParts[0]]
+																					   [quintParts[1]]
+																					   [quintParts[2]];
+
+	dst.setNext(numBits,	bitParts[0]);
+	dst.setNext(3,			getBits(Q, 0, 2));
+	dst.setNext(numBits,	bitParts[1]);
+	dst.setNext(2,			getBits(Q, 3, 4));
+	dst.setNext(numBits,	bitParts[2]);
+	dst.setNext(2,			getBits(Q, 5, 6));
+}
+
+static void encodeISEBitBlock (BitAssignAccessStream& dst, int numBits, deUint32 value)
+{
+	DE_ASSERT(de::inRange(value, 0u, (1u<<numBits)-1));
+	dst.setNext(numBits, value);
+}
+
+static void encodeISE (BitAssignAccessStream& dst, const ISEParams& params, const ISEInput& input, int numValues)
+{
+	if (params.mode == ISEMODE_TRIT)
+	{
+		const int numBlocks = deDivRoundUp32(numValues, 5);
+		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+		{
+			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 5*(numBlocks-1) : 5;
+			encodeISETritBlock(dst, params.numBits, input.isGivenInBlockForm,
+							   input.isGivenInBlockForm ? input.value.block[blockNdx]	: ISEInput::Block(),
+							   input.isGivenInBlockForm ? DE_NULL						: &input.value.plain[5*blockNdx],
+							   numValuesInBlock);
+		}
+	}
+	else if (params.mode == ISEMODE_QUINT)
+	{
+		const int numBlocks = deDivRoundUp32(numValues, 3);
+		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+		{
+			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 3*(numBlocks-1) : 3;
+			encodeISEQuintBlock(dst, params.numBits, input.isGivenInBlockForm,
+								input.isGivenInBlockForm ? input.value.block[blockNdx]	: ISEInput::Block(),
+								input.isGivenInBlockForm ? DE_NULL						: &input.value.plain[3*blockNdx],
+								numValuesInBlock);
+		}
+	}
+	else
+	{
+		DE_ASSERT(params.mode == ISEMODE_PLAIN_BIT);
+		for (int i = 0; i < numValues; i++)
+			encodeISEBitBlock(dst, params.numBits, input.isGivenInBlockForm ? input.value.block[i].bitValues[0] : input.value.plain[i]);
+	}
+}
+
+static void writeWeightData (AssignBlock128& dst, const ISEParams& iseParams, const ISEInput& input, int numWeights)
+{
+	const int				numWeightBits	= computeNumRequiredBits(iseParams, numWeights);
+	BitAssignAccessStream	access			(dst, 127, numWeightBits, false);
+	encodeISE(access, iseParams, input, numWeights);
+}
+
+static void writeColorEndpointData (AssignBlock128& dst, const ISEParams& iseParams, const ISEInput& input, int numEndpoints, int numBitsForColorEndpoints, int colorEndpointDataStartNdx)
+{
+	BitAssignAccessStream access(dst, colorEndpointDataStartNdx, numBitsForColorEndpoints, true);
+	encodeISE(access, iseParams, input, numEndpoints);
+}
+
+static AssignBlock128 generateNormalBlock (const NormalBlockParams& blockParams, int blockWidth, int blockHeight, const NormalBlockISEInputs& iseInputs)
+{
+	DE_ASSERT(isValidBlockParams(blockParams, blockWidth, blockHeight));
+	DE_UNREF(blockWidth);	// \note For non-debug builds.
+	DE_UNREF(blockHeight);	// \note For non-debug builds.
+
+	AssignBlock128	block;
+	const int		numWeights		= computeNumWeights(blockParams);
+	const int		numWeightBits	= computeNumRequiredBits(blockParams.weightISEParams, numWeights);
+
+	writeBlockMode(block, blockParams);
+
+	block.setBits(11, 12, blockParams.numPartitions - 1);
+	if (blockParams.numPartitions > 1)
+		block.setBits(13, 22, blockParams.partitionSeed);
+
+	{
+		const int extraCemBitsStart = 127 - numWeightBits - (blockParams.numPartitions == 1 || blockParams.isMultiPartSingleCemMode		? -1
+															: blockParams.numPartitions == 4											? 7
+															: blockParams.numPartitions == 3											? 4
+															: blockParams.numPartitions == 2											? 1
+															: 0);
+
+		writeColorEndpointModes(block, &blockParams.colorEndpointModes[0], blockParams.isMultiPartSingleCemMode, blockParams.numPartitions, extraCemBitsStart);
+
+		if (blockParams.isDualPlane)
+			block.setBits(extraCemBitsStart-2, extraCemBitsStart-1, blockParams.ccs);
+	}
+
+	writeWeightData(block, blockParams.weightISEParams, iseInputs.weight, numWeights);
+
+	{
+		const int			numColorEndpointValues		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
+		const int			numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(blockParams);
+		const int			colorEndpointDataStartNdx	= blockParams.numPartitions == 1 ? 17 : 29;
+		const ISEParams&	colorEndpointISEParams		= computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues);
+
+		writeColorEndpointData(block, colorEndpointISEParams, iseInputs.endpoint, numColorEndpointValues, numBitsForColorEndpoints, colorEndpointDataStartNdx);
+	}
+
+	return block;
+}
+
+// Generate default ISE inputs for weight and endpoint data - gradient-ish values.
+static NormalBlockISEInputs generateDefaultISEInputs (const NormalBlockParams& blockParams)
+{
+	NormalBlockISEInputs result;
+
+	{
+		result.weight.isGivenInBlockForm = false;
+
+		const int numWeights		= computeNumWeights(blockParams);
+		const int weightRangeMax	= computeISERangeMax(blockParams.weightISEParams);
+
+		if (blockParams.isDualPlane)
+		{
+			for (int i = 0; i < numWeights; i += 2)
+				result.weight.value.plain[i] = (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
+
+			for (int i = 1; i < numWeights; i += 2)
+				result.weight.value.plain[i] = weightRangeMax - (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
+		}
+		else
+		{
+			for (int i = 0; i < numWeights; i++)
+				result.weight.value.plain[i] = (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
+		}
+	}
+
+	{
+		result.endpoint.isGivenInBlockForm = false;
+
+		const int			numColorEndpointValues		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
+		const int			numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(blockParams);
+		const ISEParams&	colorEndpointISEParams		= computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues);
+		const int			colorEndpointRangeMax		= computeISERangeMax(colorEndpointISEParams);
+
+		for (int i = 0; i < numColorEndpointValues; i++)
+			result.endpoint.value.plain[i] = (i*colorEndpointRangeMax + (numColorEndpointValues-1)/2) / (numColorEndpointValues-1);
+	}
+
+	return result;
+}
+
+static const ISEParams s_weightISEParamsCandidates[] =
+{
+	ISEParams(ISEMODE_PLAIN_BIT,	1),
+	ISEParams(ISEMODE_TRIT,			0),
+	ISEParams(ISEMODE_PLAIN_BIT,	2),
+	ISEParams(ISEMODE_QUINT,		0),
+	ISEParams(ISEMODE_TRIT,			1),
+	ISEParams(ISEMODE_PLAIN_BIT,	3),
+	ISEParams(ISEMODE_QUINT,		1),
+	ISEParams(ISEMODE_TRIT,			2),
+	ISEParams(ISEMODE_PLAIN_BIT,	4),
+	ISEParams(ISEMODE_QUINT,		2),
+	ISEParams(ISEMODE_TRIT,			3),
+	ISEParams(ISEMODE_PLAIN_BIT,	5)
+};
+
+void generateRandomBlock (deUint8* dst, const IVec3& blockSize, de::Random& rnd)
+{
+	DE_ASSERT(blockSize.z() == 1);
+
+	if (rnd.getFloat() < 0.1f)
+	{
+		// Void extent block.
+		const bool		isVoidExtentHDR		= rnd.getBool();
+		const deUint16	r					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
+		const deUint16	g					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
+		const deUint16	b					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
+		const deUint16	a					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
+		generateVoidExtentBlock(VoidExtentParams(isVoidExtentHDR, r, g, b, a)).assignToMemory(dst);
+	}
+	else
+	{
+		// Not void extent block.
+
+		// Generate block params.
+
+		NormalBlockParams blockParams;
+
+		do
+		{
+			blockParams.weightGridWidth				= rnd.getInt(2, blockSize.x());
+			blockParams.weightGridHeight			= rnd.getInt(2, blockSize.y());
+			blockParams.weightISEParams				= s_weightISEParamsCandidates[rnd.getInt(0, DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates)-1)];
+			blockParams.numPartitions				= rnd.getInt(1, 4);
+			blockParams.isMultiPartSingleCemMode	= rnd.getFloat() < 0.25f;
+			blockParams.isDualPlane					= blockParams.numPartitions != 4 && rnd.getBool();
+			blockParams.ccs							= rnd.getInt(0, 3);
+			blockParams.partitionSeed				= rnd.getInt(0, 1023);
+
+			blockParams.colorEndpointModes[0] = rnd.getInt(0, 15);
+
+			{
+				const int cemDiff = blockParams.isMultiPartSingleCemMode		? 0
+									: blockParams.colorEndpointModes[0] == 0	? 1
+									: blockParams.colorEndpointModes[0] == 15	? -1
+									: rnd.getBool()								? 1 : -1;
+
+				for (int i = 1; i < blockParams.numPartitions; i++)
+					blockParams.colorEndpointModes[i] = blockParams.colorEndpointModes[0] + (cemDiff == -1 ? rnd.getInt(-1, 0) : cemDiff == 1 ? rnd.getInt(0, 1) : 0);
+			}
+		} while (!isValidBlockParams(blockParams, blockSize.x(), blockSize.y()));
+
+		// Generate ISE inputs for both weight and endpoint data.
+
+		NormalBlockISEInputs iseInputs;
+
+		for (int weightOrEndpoints = 0; weightOrEndpoints <= 1; weightOrEndpoints++)
+		{
+			const bool			setWeights	= weightOrEndpoints == 0;
+			const int			numValues	= setWeights ? computeNumWeights(blockParams) :
+												computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
+			const ISEParams		iseParams	= setWeights ? blockParams.weightISEParams : computeMaximumRangeISEParams(computeNumBitsForColorEndpoints(blockParams), numValues);
+			ISEInput&			iseInput	= setWeights ? iseInputs.weight : iseInputs.endpoint;
+
+			iseInput.isGivenInBlockForm = rnd.getBool();
+
+			if (iseInput.isGivenInBlockForm)
+			{
+				const int numValuesPerISEBlock	= iseParams.mode == ISEMODE_TRIT	? 5
+												: iseParams.mode == ISEMODE_QUINT	? 3
+												:									  1;
+				const int iseBitMax				= (1 << iseParams.numBits) - 1;
+				const int numISEBlocks			= deDivRoundUp32(numValues, numValuesPerISEBlock);
+
+				for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocks; iseBlockNdx++)
+				{
+					iseInput.value.block[iseBlockNdx].tOrQValue = rnd.getInt(0, 255);
+					for (int i = 0; i < numValuesPerISEBlock; i++)
+						iseInput.value.block[iseBlockNdx].bitValues[i] = rnd.getInt(0, iseBitMax);
+				}
+			}
+			else
+			{
+				const int rangeMax = computeISERangeMax(iseParams);
+
+				for (int valueNdx = 0; valueNdx < numValues; valueNdx++)
+					iseInput.value.plain[valueNdx] = rnd.getInt(0, rangeMax);
+			}
+		}
+
+		generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).assignToMemory(dst);
+	}
+}
+
+} // anonymous
+
+// Generate block data for a given BlockTestType and format.
+void generateBlockCaseTestData (vector<deUint8>& dst, CompressedTexFormat format, BlockTestType testType)
+{
+	DE_ASSERT(isAstcFormat(format));
+	DE_ASSERT(!(isAstcSRGBFormat(format) && isBlockTestTypeHDROnly(testType)));
+
+	const IVec3 blockSize = getBlockPixelSize(format);
+	DE_ASSERT(blockSize.z() == 1);
+
+	switch (testType)
+	{
+		case BLOCK_TEST_TYPE_VOID_EXTENT_LDR:
+		// Generate a gradient-like set of LDR void-extent blocks.
+		{
+			const int			numBlocks	= 1<<13;
+			const deUint32		numValues	= 1<<16;
+			dst.reserve(numBlocks*BLOCK_SIZE_BYTES);
+
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const deUint32 baseValue	= blockNdx*(numValues-1) / (numBlocks-1);
+				const deUint16 r			= (deUint16)((baseValue + numValues*0/4) % numValues);
+				const deUint16 g			= (deUint16)((baseValue + numValues*1/4) % numValues);
+				const deUint16 b			= (deUint16)((baseValue + numValues*2/4) % numValues);
+				const deUint16 a			= (deUint16)((baseValue + numValues*3/4) % numValues);
+				AssignBlock128 block;
+
+				generateVoidExtentBlock(VoidExtentParams(false, r, g, b, a)).pushBytesToVector(dst);
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_VOID_EXTENT_HDR:
+		// Generate a gradient-like set of HDR void-extent blocks, with values ranging from the largest finite negative to largest finite positive of fp16.
+		{
+			const float		minValue	= -65504.0f;
+			const float		maxValue	= +65504.0f;
+			const int		numBlocks	= 1<<13;
+			dst.reserve(numBlocks*BLOCK_SIZE_BYTES);
+
+			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+			{
+				const int			rNdx	= (blockNdx + numBlocks*0/4) % numBlocks;
+				const int			gNdx	= (blockNdx + numBlocks*1/4) % numBlocks;
+				const int			bNdx	= (blockNdx + numBlocks*2/4) % numBlocks;
+				const int			aNdx	= (blockNdx + numBlocks*3/4) % numBlocks;
+				const deFloat16		r		= deFloat32To16(minValue + (float)rNdx * (maxValue - minValue) / (float)(numBlocks-1));
+				const deFloat16		g		= deFloat32To16(minValue + (float)gNdx * (maxValue - minValue) / (float)(numBlocks-1));
+				const deFloat16		b		= deFloat32To16(minValue + (float)bNdx * (maxValue - minValue) / (float)(numBlocks-1));
+				const deFloat16		a		= deFloat32To16(minValue + (float)aNdx * (maxValue - minValue) / (float)(numBlocks-1));
+
+				generateVoidExtentBlock(VoidExtentParams(true, r, g, b, a)).pushBytesToVector(dst);
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_WEIGHT_GRID:
+		// Generate different combinations of plane count, weight ISE params, and grid size.
+		{
+			for (int isDualPlane = 0;		isDualPlane <= 1;												isDualPlane++)
+			for (int iseParamsNdx = 0;		iseParamsNdx < DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates);	iseParamsNdx++)
+			for (int weightGridWidth = 2;	weightGridWidth <= 12;											weightGridWidth++)
+			for (int weightGridHeight = 2;	weightGridHeight <= 12;											weightGridHeight++)
+			{
+				NormalBlockParams		blockParams;
+				NormalBlockISEInputs	iseInputs;
+
+				blockParams.weightGridWidth			= weightGridWidth;
+				blockParams.weightGridHeight		= weightGridHeight;
+				blockParams.isDualPlane				= isDualPlane != 0;
+				blockParams.weightISEParams			= s_weightISEParamsCandidates[iseParamsNdx];
+				blockParams.ccs						= 0;
+				blockParams.numPartitions			= 1;
+				blockParams.colorEndpointModes[0]	= 0;
+
+				if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
+					generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_WEIGHT_ISE:
+		// For each weight ISE param set, generate blocks that cover:
+		// - each single value of the ISE's range, at each position inside an ISE block
+		// - for trit and quint ISEs, each single T or Q value of an ISE block
+		{
+			for (int iseParamsNdx = 0;	iseParamsNdx < DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates);	iseParamsNdx++)
+			{
+				const ISEParams&	iseParams = s_weightISEParamsCandidates[iseParamsNdx];
+				NormalBlockParams	blockParams;
+
+				blockParams.weightGridWidth			= 4;
+				blockParams.weightGridHeight		= 4;
+				blockParams.weightISEParams			= iseParams;
+				blockParams.numPartitions			= 1;
+				blockParams.isDualPlane				= blockParams.weightGridWidth * blockParams.weightGridHeight < 24 ? true : false;
+				blockParams.ccs						= 0;
+				blockParams.colorEndpointModes[0]	= 0;
+
+				while (!isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
+				{
+					blockParams.weightGridWidth--;
+					blockParams.weightGridHeight--;
+				}
+
+				const int numValuesInISEBlock	= iseParams.mode == ISEMODE_TRIT ? 5 : iseParams.mode == ISEMODE_QUINT ? 3 : 1;
+				const int numWeights			= computeNumWeights(blockParams);
+
+				{
+					const int				numWeightValues		= (int)computeISERangeMax(iseParams) + 1;
+					const int				numBlocks			= deDivRoundUp32(numWeightValues, numWeights);
+					NormalBlockISEInputs	iseInputs			= generateDefaultISEInputs(blockParams);
+					iseInputs.weight.isGivenInBlockForm = false;
+
+					for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
+					for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
+					{
+						for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
+							iseInputs.weight.value.plain[weightNdx] = (blockNdx*numWeights + weightNdx + offset) % numWeightValues;
+
+						generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
+					}
+				}
+
+				if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
+				{
+					NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
+					iseInputs.weight.isGivenInBlockForm = true;
+
+					const int numTQValues			= 1 << (iseParams.mode == ISEMODE_TRIT ? 8 : 7);
+					const int numISEBlocksPerBlock	= deDivRoundUp32(numWeights, numValuesInISEBlock);
+					const int numBlocks				= deDivRoundUp32(numTQValues, numISEBlocksPerBlock);
+
+					for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
+					for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
+					{
+						for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocksPerBlock; iseBlockNdx++)
+						{
+							for (int i = 0; i < numValuesInISEBlock; i++)
+								iseInputs.weight.value.block[iseBlockNdx].bitValues[i] = 0;
+							iseInputs.weight.value.block[iseBlockNdx].tOrQValue = (blockNdx*numISEBlocksPerBlock + iseBlockNdx + offset) % numTQValues;
+						}
+
+						generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
+					}
+				}
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_CEMS:
+		// For each plane count & partition count combination, generate all color endpoint mode combinations.
+		{
+			for (int isDualPlane = 0;		isDualPlane <= 1;								isDualPlane++)
+			for (int numPartitions = 1;		numPartitions <= (isDualPlane != 0 ? 3 : 4);	numPartitions++)
+			{
+				// Multi-partition, single-CEM mode.
+				if (numPartitions > 1)
+				{
+					for (deUint32 singleCem = 0; singleCem < 16; singleCem++)
+					{
+						NormalBlockParams blockParams;
+						blockParams.weightGridWidth				= 4;
+						blockParams.weightGridHeight			= 4;
+						blockParams.isDualPlane					= isDualPlane != 0;
+						blockParams.ccs							= 0;
+						blockParams.numPartitions				= numPartitions;
+						blockParams.isMultiPartSingleCemMode	= true;
+						blockParams.colorEndpointModes[0]		= singleCem;
+						blockParams.partitionSeed				= 634;
+
+						for (int iseParamsNdx = 0; iseParamsNdx < DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates); iseParamsNdx++)
+						{
+							blockParams.weightISEParams = s_weightISEParamsCandidates[iseParamsNdx];
+							if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
+							{
+								generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
+								break;
+							}
+						}
+					}
+				}
+
+				// Separate-CEM mode.
+				for (deUint32 cem0 = 0; cem0 < 16; cem0++)
+				for (deUint32 cem1 = 0; cem1 < (numPartitions >= 2 ? 16u : 1u); cem1++)
+				for (deUint32 cem2 = 0; cem2 < (numPartitions >= 3 ? 16u : 1u); cem2++)
+				for (deUint32 cem3 = 0; cem3 < (numPartitions >= 4 ? 16u : 1u); cem3++)
+				{
+					NormalBlockParams blockParams;
+					blockParams.weightGridWidth				= 4;
+					blockParams.weightGridHeight			= 4;
+					blockParams.isDualPlane					= isDualPlane != 0;
+					blockParams.ccs							= 0;
+					blockParams.numPartitions				= numPartitions;
+					blockParams.isMultiPartSingleCemMode	= false;
+					blockParams.colorEndpointModes[0]		= cem0;
+					blockParams.colorEndpointModes[1]		= cem1;
+					blockParams.colorEndpointModes[2]		= cem2;
+					blockParams.colorEndpointModes[3]		= cem3;
+					blockParams.partitionSeed				= 634;
+
+					{
+						const deUint32 minCem		= *std::min_element(&blockParams.colorEndpointModes[0], &blockParams.colorEndpointModes[numPartitions]);
+						const deUint32 maxCem		= *std::max_element(&blockParams.colorEndpointModes[0], &blockParams.colorEndpointModes[numPartitions]);
+						const deUint32 minCemClass	= minCem/4;
+						const deUint32 maxCemClass	= maxCem/4;
+
+						if (maxCemClass - minCemClass > 1)
+							continue;
+					}
+
+					for (int iseParamsNdx = 0; iseParamsNdx < DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates); iseParamsNdx++)
+					{
+						blockParams.weightISEParams = s_weightISEParamsCandidates[iseParamsNdx];
+						if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
+						{
+							generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
+							break;
+						}
+					}
+				}
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_PARTITION_SEED:
+		// Test all partition seeds ("partition pattern indices").
+		{
+			for (int		numPartitions = 2;	numPartitions <= 4;		numPartitions++)
+			for (deUint32	partitionSeed = 0;	partitionSeed < 1<<10;	partitionSeed++)
+			{
+				NormalBlockParams blockParams;
+				blockParams.weightGridWidth				= 4;
+				blockParams.weightGridHeight			= 4;
+				blockParams.weightISEParams				= ISEParams(ISEMODE_PLAIN_BIT, 2);
+				blockParams.isDualPlane					= false;
+				blockParams.numPartitions				= numPartitions;
+				blockParams.isMultiPartSingleCemMode	= true;
+				blockParams.colorEndpointModes[0]		= 0;
+				blockParams.partitionSeed				= partitionSeed;
+
+				generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
+			}
+
+			break;
+		}
+
+		// \note Fall-through.
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_LDR:
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15:
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15:
+		// For each endpoint mode, for each pair of components in the endpoint value, test 10x10 combinations of values for that pair.
+		// \note Separate modes for HDR and mode 15 due to different color scales and biases.
+		{
+			for (deUint32 cem = 0; cem < 16; cem++)
+			{
+				const bool isHDRCem = cem == 2		||
+									  cem == 3		||
+									  cem == 7		||
+									  cem == 11		||
+									  cem == 14		||
+									  cem == 15;
+
+				if ((testType == BLOCK_TEST_TYPE_ENDPOINT_VALUE_LDR			&& isHDRCem)					||
+					(testType == BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15		&& (!isHDRCem || cem == 15))	||
+					(testType == BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15		&& cem != 15))
+					continue;
+
+				NormalBlockParams blockParams;
+				blockParams.weightGridWidth			= 3;
+				blockParams.weightGridHeight		= 4;
+				blockParams.weightISEParams			= ISEParams(ISEMODE_PLAIN_BIT, 2);
+				blockParams.isDualPlane				= false;
+				blockParams.numPartitions			= 1;
+				blockParams.colorEndpointModes[0]	= cem;
+
+				{
+					const int			numBitsForEndpoints		= computeNumBitsForColorEndpoints(blockParams);
+					const int			numEndpointParts		= computeNumColorEndpointValues(cem);
+					const ISEParams		endpointISE				= computeMaximumRangeISEParams(numBitsForEndpoints, numEndpointParts);
+					const int			endpointISERangeMax		= computeISERangeMax(endpointISE);
+
+					for (int endpointPartNdx0 = 0;						endpointPartNdx0 < numEndpointParts; endpointPartNdx0++)
+					for (int endpointPartNdx1 = endpointPartNdx0+1;		endpointPartNdx1 < numEndpointParts; endpointPartNdx1++)
+					{
+						NormalBlockISEInputs	iseInputs			= generateDefaultISEInputs(blockParams);
+						const int				numEndpointValues	= de::min(10, endpointISERangeMax+1);
+
+						for (int endpointValueNdx0 = 0; endpointValueNdx0 < numEndpointValues; endpointValueNdx0++)
+						for (int endpointValueNdx1 = 0; endpointValueNdx1 < numEndpointValues; endpointValueNdx1++)
+						{
+							const int endpointValue0 = endpointValueNdx0 * endpointISERangeMax / (numEndpointValues-1);
+							const int endpointValue1 = endpointValueNdx1 * endpointISERangeMax / (numEndpointValues-1);
+
+							iseInputs.endpoint.value.plain[endpointPartNdx0] = endpointValue0;
+							iseInputs.endpoint.value.plain[endpointPartNdx1] = endpointValue1;
+
+							generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
+						}
+					}
+				}
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_ENDPOINT_ISE:
+		// Similar to BLOCK_TEST_TYPE_WEIGHT_ISE, see above.
+		{
+			static const deUint32 endpointRangeMaximums[] = { 5, 9, 11, 19, 23, 39, 47, 79, 95, 159, 191 };
+
+			for (int endpointRangeNdx = 0; endpointRangeNdx < DE_LENGTH_OF_ARRAY(endpointRangeMaximums); endpointRangeNdx++)
+			{
+				bool validCaseGenerated = false;
+
+				for (int numPartitions = 1;			!validCaseGenerated && numPartitions <= 4;														numPartitions++)
+				for (int isDual = 0;				!validCaseGenerated && isDual <= 1;																isDual++)
+				for (int weightISEParamsNdx = 0;	!validCaseGenerated && weightISEParamsNdx < DE_LENGTH_OF_ARRAY(s_weightISEParamsCandidates);	weightISEParamsNdx++)
+				for (int weightGridWidth = 2;		!validCaseGenerated && weightGridWidth <= 12;													weightGridWidth++)
+				for (int weightGridHeight = 2;		!validCaseGenerated && weightGridHeight <= 12;													weightGridHeight++)
+				{
+					NormalBlockParams blockParams;
+					blockParams.weightGridWidth				= weightGridWidth;
+					blockParams.weightGridHeight			= weightGridHeight;
+					blockParams.weightISEParams				= s_weightISEParamsCandidates[weightISEParamsNdx];
+					blockParams.isDualPlane					= isDual != 0;
+					blockParams.ccs							= 0;
+					blockParams.numPartitions				= numPartitions;
+					blockParams.isMultiPartSingleCemMode	= true;
+					blockParams.colorEndpointModes[0]		= 12;
+					blockParams.partitionSeed				= 634;
+
+					if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
+					{
+						const ISEParams endpointISEParams = computeMaximumRangeISEParams(computeNumBitsForColorEndpoints(blockParams),
+																						 computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], numPartitions, true));
+
+						if (computeISERangeMax(endpointISEParams) == endpointRangeMaximums[endpointRangeNdx])
+						{
+							validCaseGenerated = true;
+
+							const int numColorEndpoints		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], numPartitions, blockParams.isMultiPartSingleCemMode);
+							const int numValuesInISEBlock	= endpointISEParams.mode == ISEMODE_TRIT ? 5 : endpointISEParams.mode == ISEMODE_QUINT ? 3 : 1;
+
+							{
+								const int				numColorEndpointValues	= (int)computeISERangeMax(endpointISEParams) + 1;
+								const int				numBlocks				= deDivRoundUp32(numColorEndpointValues, numColorEndpoints);
+								NormalBlockISEInputs	iseInputs				= generateDefaultISEInputs(blockParams);
+								iseInputs.endpoint.isGivenInBlockForm = false;
+
+								for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
+								for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
+								{
+									for (int endpointNdx = 0; endpointNdx < numColorEndpoints; endpointNdx++)
+										iseInputs.endpoint.value.plain[endpointNdx] = (blockNdx*numColorEndpoints + endpointNdx + offset) % numColorEndpointValues;
+
+									generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
+								}
+							}
+
+							if (endpointISEParams.mode == ISEMODE_TRIT || endpointISEParams.mode == ISEMODE_QUINT)
+							{
+								NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
+								iseInputs.endpoint.isGivenInBlockForm = true;
+
+								const int numTQValues			= 1 << (endpointISEParams.mode == ISEMODE_TRIT ? 8 : 7);
+								const int numISEBlocksPerBlock	= deDivRoundUp32(numColorEndpoints, numValuesInISEBlock);
+								const int numBlocks				= deDivRoundUp32(numTQValues, numISEBlocksPerBlock);
+
+								for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
+								for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
+								{
+									for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocksPerBlock; iseBlockNdx++)
+									{
+										for (int i = 0; i < numValuesInISEBlock; i++)
+											iseInputs.endpoint.value.block[iseBlockNdx].bitValues[i] = 0;
+										iseInputs.endpoint.value.block[iseBlockNdx].tOrQValue = (blockNdx*numISEBlocksPerBlock + iseBlockNdx + offset) % numTQValues;
+									}
+
+									generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
+								}
+							}
+						}
+					}
+				}
+
+				DE_ASSERT(validCaseGenerated);
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_CCS:
+		// For all partition counts, test all values of the CCS (color component selector).
+		{
+			for (int		numPartitions = 1;		numPartitions <= 3;		numPartitions++)
+			for (deUint32	ccs = 0;				ccs < 4;				ccs++)
+			{
+				NormalBlockParams blockParams;
+				blockParams.weightGridWidth				= 3;
+				blockParams.weightGridHeight			= 3;
+				blockParams.weightISEParams				= ISEParams(ISEMODE_PLAIN_BIT, 2);
+				blockParams.isDualPlane					= true;
+				blockParams.ccs							= ccs;
+				blockParams.numPartitions				= numPartitions;
+				blockParams.isMultiPartSingleCemMode	= true;
+				blockParams.colorEndpointModes[0]		= 8;
+				blockParams.partitionSeed				= 634;
+
+				generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
+			}
+
+			break;
+		}
+
+		case BLOCK_TEST_TYPE_RANDOM:
+		// Generate a number of random (including invalid) blocks.
+		{
+			const int		numBlocks	= 16384;
+			const deUint32	seed		= 1;
+
+			dst.resize(numBlocks*BLOCK_SIZE_BYTES);
+
+			generateRandomBlocks(&dst[0], numBlocks, format, seed);
+
+			break;
+		}
+
+		default:
+			DE_ASSERT(false);
+	}
+}
+
+void generateRandomBlocks (deUint8* dst, size_t numBlocks, CompressedTexFormat format, deUint32 seed)
+{
+	const IVec3		blockSize			= getBlockPixelSize(format);
+	de::Random		rnd					(seed);
+	size_t			numBlocksGenerated	= 0;
+
+	DE_ASSERT(isAstcFormat(format));
+	DE_ASSERT(blockSize.z() == 1);
+
+	for (numBlocksGenerated = 0; numBlocksGenerated < numBlocks; numBlocksGenerated++)
+	{
+		deUint8* const	curBlockPtr		= dst + numBlocksGenerated*BLOCK_SIZE_BYTES;
+
+		generateRandomBlock(curBlockPtr, blockSize, rnd);
+	}
+}
+
+void generateRandomValidBlocks (deUint8* dst, size_t numBlocks, CompressedTexFormat format, TexDecompressionParams::AstcMode mode, deUint32 seed)
+{
+	const IVec3		blockSize			= getBlockPixelSize(format);
+	de::Random		rnd					(seed);
+	size_t			numBlocksGenerated	= 0;
+
+	DE_ASSERT(isAstcFormat(format));
+	DE_ASSERT(blockSize.z() == 1);
+
+	for (numBlocksGenerated = 0; numBlocksGenerated < numBlocks; numBlocksGenerated++)
+	{
+		deUint8* const	curBlockPtr		= dst + numBlocksGenerated*BLOCK_SIZE_BYTES;
+
+		do
+		{
+			generateRandomBlock(curBlockPtr, blockSize, rnd);
+		} while (!isValidBlock(curBlockPtr, format, mode));
+	}
+}
+
+// Generate a number of trivial dummy blocks to fill unneeded space in a texture.
+void generateDummyVoidExtentBlocks (deUint8* dst, size_t numBlocks)
+{
+	AssignBlock128 block = generateVoidExtentBlock(VoidExtentParams(false, 0, 0, 0, 0));
+	for (size_t ndx = 0; ndx < numBlocks; ndx++)
+		block.assignToMemory(&dst[ndx * BLOCK_SIZE_BYTES]);
+}
+
+void generateDummyNormalBlocks (deUint8* dst, size_t numBlocks, int blockWidth, int blockHeight)
+{
+	NormalBlockParams blockParams;
+
+	blockParams.weightGridWidth			= 3;
+	blockParams.weightGridHeight		= 3;
+	blockParams.weightISEParams			= ISEParams(ISEMODE_PLAIN_BIT, 5);
+	blockParams.isDualPlane				= false;
+	blockParams.numPartitions			= 1;
+	blockParams.colorEndpointModes[0]	= 8;
+
+	NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
+	iseInputs.weight.isGivenInBlockForm = false;
+
+	const int numWeights		= computeNumWeights(blockParams);
+	const int weightRangeMax	= computeISERangeMax(blockParams.weightISEParams);
+
+	for (size_t blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+	{
+		for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
+			iseInputs.weight.value.plain[weightNdx] = (deUint32)((blockNdx*numWeights + weightNdx) * weightRangeMax / (numBlocks*numWeights-1));
+
+		generateNormalBlock(blockParams, blockWidth, blockHeight, iseInputs).assignToMemory(dst + blockNdx*BLOCK_SIZE_BYTES);
+	}
+}
+
+bool isValidBlock (const deUint8* data, CompressedTexFormat format, TexDecompressionParams::AstcMode mode)
+{
+	const tcu::IVec3		blockPixelSize	= getBlockPixelSize(format);
+	const bool				isSRGB			= isAstcSRGBFormat(format);
+	const bool				isLDR			= isSRGB || mode == TexDecompressionParams::ASTCMODE_LDR;
+	union
+	{
+		deUint8		sRGB[MAX_BLOCK_WIDTH*MAX_BLOCK_HEIGHT*4];
+		float		linear[MAX_BLOCK_WIDTH*MAX_BLOCK_HEIGHT*4];
+	} tmpBuffer;
+	const Block128			blockData		(data);
+	const DecompressResult	result			= decompressBlock((isSRGB ? (void*)&tmpBuffer.sRGB[0] : (void*)&tmpBuffer.linear[0]),
+															  blockData, blockPixelSize.x(), blockPixelSize.y(), isSRGB, isLDR);
+
+	return result == DECOMPRESS_RESULT_VALID_BLOCK;
+}
+
+void decompress (const PixelBufferAccess& dst, const deUint8* data, CompressedTexFormat format, TexDecompressionParams::AstcMode mode)
+{
+	const bool			isSRGBFormat	= isAstcSRGBFormat(format);
+
+#if defined(DE_DEBUG)
+	const tcu::IVec3	blockPixelSize	= getBlockPixelSize(format);
+
+	DE_ASSERT(dst.getWidth()	== blockPixelSize.x() &&
+			  dst.getHeight()	== blockPixelSize.y() &&
+			  dst.getDepth()	== blockPixelSize.z());
+	DE_ASSERT(mode == TexDecompressionParams::ASTCMODE_LDR || mode == TexDecompressionParams::ASTCMODE_HDR);
+#endif
+
+	decompress(dst, data, isSRGBFormat, isSRGBFormat || mode == TexDecompressionParams::ASTCMODE_LDR);
+}
+
+const char* getBlockTestTypeName (BlockTestType testType)
+{
+	switch (testType)
+	{
+		case BLOCK_TEST_TYPE_VOID_EXTENT_LDR:			return "void_extent_ldr";
+		case BLOCK_TEST_TYPE_VOID_EXTENT_HDR:			return "void_extent_hdr";
+		case BLOCK_TEST_TYPE_WEIGHT_GRID:				return "weight_grid";
+		case BLOCK_TEST_TYPE_WEIGHT_ISE:				return "weight_ise";
+		case BLOCK_TEST_TYPE_CEMS:						return "color_endpoint_modes";
+		case BLOCK_TEST_TYPE_PARTITION_SEED:			return "partition_pattern_index";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_LDR:		return "endpoint_value_ldr";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15:	return "endpoint_value_hdr_cem_not_15";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15:		return "endpoint_value_hdr_cem_15";
+		case BLOCK_TEST_TYPE_ENDPOINT_ISE:				return "endpoint_ise";
+		case BLOCK_TEST_TYPE_CCS:						return "color_component_selector";
+		case BLOCK_TEST_TYPE_RANDOM:					return "random";
+		default:
+			DE_ASSERT(false);
+			return DE_NULL;
+	}
+}
+
+const char* getBlockTestTypeDescription (BlockTestType testType)
+{
+	switch (testType)
+	{
+		case BLOCK_TEST_TYPE_VOID_EXTENT_LDR:			return "Test void extent block, LDR mode";
+		case BLOCK_TEST_TYPE_VOID_EXTENT_HDR:			return "Test void extent block, HDR mode";
+		case BLOCK_TEST_TYPE_WEIGHT_GRID:				return "Test combinations of plane count, weight integer sequence encoding parameters, and weight grid size";
+		case BLOCK_TEST_TYPE_WEIGHT_ISE:				return "Test different integer sequence encoding block values for weight grid";
+		case BLOCK_TEST_TYPE_CEMS:						return "Test different color endpoint mode combinations, combined with different plane and partition counts";
+		case BLOCK_TEST_TYPE_PARTITION_SEED:			return "Test different partition pattern indices";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_LDR:		return "Test various combinations of each pair of color endpoint values, for each LDR color endpoint mode";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15:	return "Test various combinations of each pair of color endpoint values, for each HDR color endpoint mode other than mode 15";
+		case BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15:		return "Test various combinations of each pair of color endpoint values, HDR color endpoint mode 15";
+		case BLOCK_TEST_TYPE_ENDPOINT_ISE:				return "Test different integer sequence encoding block values for color endpoints";
+		case BLOCK_TEST_TYPE_CCS:						return "Test color component selector, for different partition counts";
+		case BLOCK_TEST_TYPE_RANDOM:					return "Random block test";
+		default:
+			DE_ASSERT(false);
+			return DE_NULL;
+	}
+}
+
+bool isBlockTestTypeHDROnly (BlockTestType testType)
+{
+	return testType == BLOCK_TEST_TYPE_VOID_EXTENT_HDR			||
+		   testType == BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15	||
+		   testType == BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15;
+}
+
+Vec4 getBlockTestTypeColorScale (BlockTestType testType)
+{
+	switch (testType)
+	{
+		case tcu::astc::BLOCK_TEST_TYPE_VOID_EXTENT_HDR:			return Vec4(0.5f/65504.0f);
+		case tcu::astc::BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15:	return Vec4(1.0f/65504.0f, 1.0f/65504.0f, 1.0f/65504.0f, 1.0f);
+		case tcu::astc::BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15:		return Vec4(1.0f/65504.0f);
+		default:													return Vec4(1.0f);
+	}
+}
+
+Vec4 getBlockTestTypeColorBias (BlockTestType testType)
+{
+	switch (testType)
+	{
+		case tcu::astc::BLOCK_TEST_TYPE_VOID_EXTENT_HDR:	return Vec4(0.5f);
+		default:											return Vec4(0.0f);
+	}
+}
+
+} // astc
+} // tcu
diff --git a/framework/common/tcuAstcUtil.hpp b/framework/common/tcuAstcUtil.hpp
new file mode 100644
index 0000000..620e492
--- /dev/null
+++ b/framework/common/tcuAstcUtil.hpp
@@ -0,0 +1,80 @@
+#ifndef _TCUASTCUTIL_HPP
+#define _TCUASTCUTIL_HPP
+/*-------------------------------------------------------------------------
+ * drawElements Quality Program Tester Core
+ * ----------------------------------------
+ *
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief ASTC Utilities.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuCompressedTexture.hpp"
+
+#include <vector>
+
+namespace tcu
+{
+namespace astc
+{
+
+enum BlockTestType
+{
+	BLOCK_TEST_TYPE_VOID_EXTENT_LDR = 0,
+	BLOCK_TEST_TYPE_VOID_EXTENT_HDR,
+	BLOCK_TEST_TYPE_WEIGHT_GRID,
+	BLOCK_TEST_TYPE_WEIGHT_ISE,
+	BLOCK_TEST_TYPE_CEMS,
+	BLOCK_TEST_TYPE_PARTITION_SEED,
+	BLOCK_TEST_TYPE_ENDPOINT_VALUE_LDR,
+	BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_NO_15,
+	BLOCK_TEST_TYPE_ENDPOINT_VALUE_HDR_15,
+	BLOCK_TEST_TYPE_ENDPOINT_ISE,
+	BLOCK_TEST_TYPE_CCS,
+	BLOCK_TEST_TYPE_RANDOM,
+
+	BLOCK_TEST_TYPE_LAST
+};
+
+enum
+{
+	BLOCK_SIZE_BYTES = 128/8,
+};
+
+const char*		getBlockTestTypeName			(BlockTestType testType);
+const char*		getBlockTestTypeDescription		(BlockTestType testType);
+bool			isBlockTestTypeHDROnly			(BlockTestType testType);
+Vec4			getBlockTestTypeColorScale		(BlockTestType testType);
+Vec4			getBlockTestTypeColorBias		(BlockTestType testType);
+
+void			generateBlockCaseTestData		(std::vector<deUint8>& dst, CompressedTexFormat format, BlockTestType testType);
+
+void			generateRandomBlocks			(deUint8* dst, size_t numBlocks, CompressedTexFormat format, deUint32 seed);
+void			generateRandomValidBlocks		(deUint8* dst, size_t numBlocks, CompressedTexFormat format, TexDecompressionParams::AstcMode mode, deUint32 seed);
+
+void			generateDummyVoidExtentBlocks	(deUint8* dst, size_t numBlocks);
+void			generateDummyNormalBlocks		(deUint8* dst, size_t numBlocks, int blockWidth, int blockHeight);
+
+bool			isValidBlock					(const deUint8* data, CompressedTexFormat format, TexDecompressionParams::AstcMode mode);
+
+void			decompress						(const PixelBufferAccess& dst, const deUint8* data, CompressedTexFormat format, TexDecompressionParams::AstcMode mode);
+
+} // astc
+} // tcu
+
+#endif // _TCUASTCUTIL_HPP
diff --git a/framework/common/tcuCommandLine.cpp b/framework/common/tcuCommandLine.cpp
index c660e63..b41ef7b 100644
--- a/framework/common/tcuCommandLine.cpp
+++ b/framework/common/tcuCommandLine.cpp
@@ -81,7 +81,9 @@
 DE_DECLARE_COMMAND_LINE_OPT(EGLWindowType,				std::string);
 DE_DECLARE_COMMAND_LINE_OPT(EGLPixmapType,				std::string);
 DE_DECLARE_COMMAND_LINE_OPT(LogImages,					bool);
+DE_DECLARE_COMMAND_LINE_OPT(LogShaderSources,			bool);
 DE_DECLARE_COMMAND_LINE_OPT(TestOOM,					bool);
+DE_DECLARE_COMMAND_LINE_OPT(VKDeviceID,					int);
 
 static void parseIntList (const char* src, std::vector<int>* dst)
 {
@@ -163,7 +165,9 @@
 		<< Option<EGLDisplayType>		(DE_NULL,	"deqp-egl-display-type",		"EGL native display type")
 		<< Option<EGLWindowType>		(DE_NULL,	"deqp-egl-window-type",			"EGL native window type")
 		<< Option<EGLPixmapType>		(DE_NULL,	"deqp-egl-pixmap-type",			"EGL native pixmap type")
+		<< Option<VKDeviceID>			(DE_NULL,	"deqp-vk-device-id",			"Vulkan device ID (IDs start from 1)",									"1")
 		<< Option<LogImages>			(DE_NULL,	"deqp-log-images",				"Enable or disable logging of result images",		s_enableNames,		"enable")
+		<< Option<LogShaderSources>		(DE_NULL,	"deqp-log-shader-sources",		"Enable or disable logging of shader sources",		s_enableNames,		"enable")
 		<< Option<TestOOM>				(DE_NULL,	"deqp-test-oom",				"Run tests that exhaust memory on purpose",			s_enableNames,		TEST_OOM_DEFAULT);
 }
 
@@ -718,6 +722,9 @@
 	if (!m_cmdLine.getOption<opt::LogImages>())
 		m_logFlags |= QP_TEST_LOG_EXCLUDE_IMAGES;
 
+	if (!m_cmdLine.getOption<opt::LogShaderSources>())
+		m_logFlags |= QP_TEST_LOG_EXCLUDE_SHADER_SOURCES;
+
 	if ((m_cmdLine.hasOption<opt::CasePath>()?1:0) +
 		(m_cmdLine.hasOption<opt::CaseList>()?1:0) +
 		(m_cmdLine.hasOption<opt::CaseListFile>()?1:0) +
@@ -788,23 +795,24 @@
 	return isOk;
 }
 
-const char*				CommandLine::getLogFileName				(void) const	{ return m_cmdLine.getOption<opt::LogFilename>().c_str();		   }
-deUint32				CommandLine::getLogFlags				(void) const	{ return m_logFlags;											   }
-RunMode					CommandLine::getRunMode					(void) const	{ return m_cmdLine.getOption<opt::RunMode>();					   }
-const char*				CommandLine::getCaseListExportFile		(void) const	{ return m_cmdLine.getOption<opt::ExportFilenamePattern>().c_str();}
-WindowVisibility		CommandLine::getVisibility				(void) const	{ return m_cmdLine.getOption<opt::Visibility>();				   }
-bool					CommandLine::isWatchDogEnabled			(void) const	{ return m_cmdLine.getOption<opt::WatchDog>();					   }
-bool					CommandLine::isCrashHandlingEnabled		(void) const	{ return m_cmdLine.getOption<opt::CrashHandler>();				   }
-int						CommandLine::getBaseSeed				(void) const	{ return m_cmdLine.getOption<opt::BaseSeed>();					   }
-int						CommandLine::getTestIterationCount		(void) const	{ return m_cmdLine.getOption<opt::TestIterationCount>();		   }
-int						CommandLine::getSurfaceWidth			(void) const	{ return m_cmdLine.getOption<opt::SurfaceWidth>();				   }
-int						CommandLine::getSurfaceHeight			(void) const	{ return m_cmdLine.getOption<opt::SurfaceHeight>();				   }
-SurfaceType				CommandLine::getSurfaceType				(void) const	{ return m_cmdLine.getOption<opt::SurfaceType>();				   }
-ScreenRotation			CommandLine::getScreenRotation			(void) const	{ return m_cmdLine.getOption<opt::ScreenRotation>();			   }
-int						CommandLine::getGLConfigId				(void) const	{ return m_cmdLine.getOption<opt::GLConfigID>();				   }
-int						CommandLine::getCLPlatformId			(void) const	{ return m_cmdLine.getOption<opt::CLPlatformID>();				   }
-const std::vector<int>&	CommandLine::getCLDeviceIds				(void) const	{ return m_cmdLine.getOption<opt::CLDeviceIDs>();				   }
-bool					CommandLine::isOutOfMemoryTestEnabled	(void) const	{ return m_cmdLine.getOption<opt::TestOOM>();					   }
+const char*				CommandLine::getLogFileName				(void) const	{ return m_cmdLine.getOption<opt::LogFilename>().c_str();			}
+deUint32				CommandLine::getLogFlags				(void) const	{ return m_logFlags;												}
+RunMode					CommandLine::getRunMode					(void) const	{ return m_cmdLine.getOption<opt::RunMode>();						}
+const char*				CommandLine::getCaseListExportFile		(void) const	{ return m_cmdLine.getOption<opt::ExportFilenamePattern>().c_str();	}
+WindowVisibility		CommandLine::getVisibility				(void) const	{ return m_cmdLine.getOption<opt::Visibility>();					}
+bool					CommandLine::isWatchDogEnabled			(void) const	{ return m_cmdLine.getOption<opt::WatchDog>();						}
+bool					CommandLine::isCrashHandlingEnabled		(void) const	{ return m_cmdLine.getOption<opt::CrashHandler>();					}
+int						CommandLine::getBaseSeed				(void) const	{ return m_cmdLine.getOption<opt::BaseSeed>();						}
+int						CommandLine::getTestIterationCount		(void) const	{ return m_cmdLine.getOption<opt::TestIterationCount>();			}
+int						CommandLine::getSurfaceWidth			(void) const	{ return m_cmdLine.getOption<opt::SurfaceWidth>();					}
+int						CommandLine::getSurfaceHeight			(void) const	{ return m_cmdLine.getOption<opt::SurfaceHeight>();					}
+SurfaceType				CommandLine::getSurfaceType				(void) const	{ return m_cmdLine.getOption<opt::SurfaceType>();					}
+ScreenRotation			CommandLine::getScreenRotation			(void) const	{ return m_cmdLine.getOption<opt::ScreenRotation>();				}
+int						CommandLine::getGLConfigId				(void) const	{ return m_cmdLine.getOption<opt::GLConfigID>();					}
+int						CommandLine::getCLPlatformId			(void) const	{ return m_cmdLine.getOption<opt::CLPlatformID>();					}
+const std::vector<int>&	CommandLine::getCLDeviceIds				(void) const	{ return m_cmdLine.getOption<opt::CLDeviceIDs>();					}
+int						CommandLine::getVKDeviceId				(void) const	{ return m_cmdLine.getOption<opt::VKDeviceID>();					}
+bool					CommandLine::isOutOfMemoryTestEnabled	(void) const	{ return m_cmdLine.getOption<opt::TestOOM>();						}
 
 const char* CommandLine::getGLContextType (void) const
 {
diff --git a/framework/common/tcuCommandLine.hpp b/framework/common/tcuCommandLine.hpp
index 37a79a7..6195bec 100644
--- a/framework/common/tcuCommandLine.hpp
+++ b/framework/common/tcuCommandLine.hpp
@@ -176,6 +176,9 @@
 	//! Get EGL native pixmap factory (--deqp-egl-pixmap-type)
 	const char*						getEGLPixmapType			(void) const;
 
+	//! Get Vulkan device ID (--deqp-vk-device-id)
+	int								getVKDeviceId				(void) const;
+
 	//! Should we run tests that exhaust memory (--deqp-test-oom)
 	bool							isOutOfMemoryTestEnabled(void) const;
 
diff --git a/framework/common/tcuCompressedTexture.cpp b/framework/common/tcuCompressedTexture.cpp
index 7d0073b..4521a08 100644
--- a/framework/common/tcuCompressedTexture.cpp
+++ b/framework/common/tcuCompressedTexture.cpp
@@ -23,6 +23,7 @@
 
 #include "tcuCompressedTexture.hpp"
 #include "tcuTextureUtil.hpp"
+#include "tcuAstcUtil.hpp"
 
 #include "deStringUtil.hpp"
 #include "deFloat16.h"
@@ -32,23 +33,11 @@
 namespace tcu
 {
 
-namespace
-{
-
-enum { ASTC_BLOCK_SIZE_BYTES = 128/8 };
-
-template <typename T, typename Y>
-struct isSameType			{ enum { V = 0 }; };
-template <typename T>
-struct isSameType<T, T>		{ enum { V = 1 }; };
-
-} // anonymous
-
 int getBlockSize (CompressedTexFormat format)
 {
 	if (isAstcFormat(format))
 	{
-		return ASTC_BLOCK_SIZE_BYTES;
+		return astc::BLOCK_SIZE_BYTES;
 	}
 	else if (isEtcFormat(format))
 	{
@@ -271,11 +260,6 @@
 namespace
 {
 
-inline int divRoundUp (int a, int b)
-{
-	return a/b + ((a%b) ? 1 : 0);
-}
-
 // \todo [2013-08-06 nuutti] ETC and ASTC decompression codes are rather unrelated, and are already in their own "private" namespaces - should this be split to multiple files?
 
 namespace EtcDecompressInternal
@@ -998,1539 +982,6 @@
 	}
 }
 
-namespace ASTCDecompressInternal
-{
-
-enum
-{
-	ASTC_MAX_BLOCK_WIDTH	= 12,
-	ASTC_MAX_BLOCK_HEIGHT	= 12
-};
-
-inline deUint32 getBit (deUint32 src, int ndx)
-{
-	DE_ASSERT(de::inBounds(ndx, 0, 32));
-	return (src >> ndx) & 1;
-}
-
-inline deUint32 getBits (deUint32 src, int low, int high)
-{
-	const int numBits = (high-low) + 1;
-	DE_ASSERT(de::inRange(numBits, 1, 32));
-	return (src >> low) & ((1u<<numBits)-1);
-}
-
-inline bool isBitSet (deUint32 src, int ndx)
-{
-	return getBit(src, ndx) != 0;
-}
-
-inline deUint32 reverseBits (deUint32 src, int numBits)
-{
-	DE_ASSERT(de::inRange(numBits, 0, 32));
-	deUint32 result = 0;
-	for (int i = 0; i < numBits; i++)
-		result |= ((src >> i) & 1) << (numBits-1-i);
-	return result;
-}
-
-inline deUint32 bitReplicationScale (deUint32 src, int numSrcBits, int numDstBits)
-{
-	DE_ASSERT(numSrcBits <= numDstBits);
-	DE_ASSERT((src & ((1<<numSrcBits)-1)) == src);
-	deUint32 dst = 0;
-	for (int shift = numDstBits-numSrcBits; shift > -numSrcBits; shift -= numSrcBits)
-		dst |= shift >= 0 ? src << shift : src >> -shift;
-	return dst;
-}
-
-inline deInt32 signExtend (deInt32 src, int numSrcBits)
-{
-	DE_ASSERT(de::inRange(numSrcBits, 2, 31));
-	const bool negative = (src & (1 << (numSrcBits-1))) != 0;
-	return src | (negative ? ~((1 << numSrcBits) - 1) : 0);
-}
-
-inline bool isFloat16InfOrNan (deFloat16 v)
-{
-	return getBits(v, 10, 14) == 31;
-}
-
-// A helper for getting bits from a 128-bit block.
-class Block128
-{
-private:
-	typedef deUint64 Word;
-
-	enum
-	{
-		WORD_BYTES	= sizeof(Word),
-		WORD_BITS	= 8*WORD_BYTES,
-		NUM_WORDS	= 128 / WORD_BITS
-	};
-
-	DE_STATIC_ASSERT(128 % WORD_BITS == 0);
-
-public:
-	Block128 (const deUint8* src)
-	{
-		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
-		{
-			m_words[wordNdx] = 0;
-			for (int byteNdx = 0; byteNdx < WORD_BYTES; byteNdx++)
-				m_words[wordNdx] |= (Word)src[wordNdx*WORD_BYTES + byteNdx] << (8*byteNdx);
-		}
-	}
-
-	deUint32 getBit (int ndx) const
-	{
-		DE_ASSERT(de::inBounds(ndx, 0, 128));
-		return (m_words[ndx / WORD_BITS] >> (ndx % WORD_BITS)) & 1;
-	}
-
-	deUint32 getBits (int low, int high) const
-	{
-		DE_ASSERT(de::inBounds(low, 0, 128));
-		DE_ASSERT(de::inBounds(high, 0, 128));
-		DE_ASSERT(de::inRange(high-low+1, 0, 32));
-
-		if (high-low+1 == 0)
-			return 0;
-
-		const int word0Ndx = low / WORD_BITS;
-		const int word1Ndx = high / WORD_BITS;
-
-		// \note "foo << bar << 1" done instead of "foo << (bar+1)" to avoid overflow, i.e. shift amount being too big.
-
-		if (word0Ndx == word1Ndx)
-			return (deUint32)((m_words[word0Ndx] & ((((Word)1 << high%WORD_BITS << 1) - 1))) >> ((Word)low % WORD_BITS));
-		else
-		{
-			DE_ASSERT(word1Ndx == word0Ndx + 1);
-
-			return (deUint32)(m_words[word0Ndx] >> (low%WORD_BITS)) |
-				   (deUint32)((m_words[word1Ndx] & (((Word)1 << high%WORD_BITS << 1) - 1)) << (high-low - high%WORD_BITS));
-		}
-	}
-
-	bool isBitSet (int ndx) const
-	{
-		DE_ASSERT(de::inBounds(ndx, 0, 128));
-		return getBit(ndx) != 0;
-	}
-
-private:
-	Word m_words[NUM_WORDS];
-};
-
-// A helper for sequential access into a Block128.
-class BitAccessStream
-{
-public:
-	BitAccessStream (const Block128& src, int startNdxInSrc, int length, bool forward)
-		: m_src				(src)
-		, m_startNdxInSrc	(startNdxInSrc)
-		, m_length			(length)
-		, m_forward			(forward)
-		, m_ndx				(0)
-	{
-	}
-
-	// Get the next num bits. Bits at positions greater than or equal to m_length are zeros.
-	deUint32 getNext (int num)
-	{
-		if (num == 0 || m_ndx >= m_length)
-			return 0;
-
-		const int end				= m_ndx + num;
-		const int numBitsFromSrc	= de::max(0, de::min(m_length, end) - m_ndx);
-		const int low				= m_ndx;
-		const int high				= m_ndx + numBitsFromSrc - 1;
-
-		m_ndx += num;
-
-		return m_forward ?			   m_src.getBits(m_startNdxInSrc + low,  m_startNdxInSrc + high)
-						 : reverseBits(m_src.getBits(m_startNdxInSrc - high, m_startNdxInSrc - low), numBitsFromSrc);
-	}
-
-private:
-	const Block128&		m_src;
-	const int			m_startNdxInSrc;
-	const int			m_length;
-	const bool			m_forward;
-
-	int					m_ndx;
-};
-
-enum ISEMode
-{
-	ISEMODE_TRIT = 0,
-	ISEMODE_QUINT,
-	ISEMODE_PLAIN_BIT,
-
-	ISEMODE_LAST
-};
-
-struct ISEParams
-{
-	ISEMode		mode;
-	int			numBits;
-
-	ISEParams (ISEMode mode_, int numBits_) : mode(mode_), numBits(numBits_) {}
-};
-
-inline int computeNumRequiredBits (const ISEParams& iseParams, int numValues)
-{
-	switch (iseParams.mode)
-	{
-		case ISEMODE_TRIT:			return divRoundUp(numValues*8, 5) + numValues*iseParams.numBits;
-		case ISEMODE_QUINT:			return divRoundUp(numValues*7, 3) + numValues*iseParams.numBits;
-		case ISEMODE_PLAIN_BIT:		return numValues*iseParams.numBits;
-		default:
-			DE_ASSERT(false);
-			return -1;
-	}
-}
-
-struct ISEDecodedResult
-{
-	deUint32 m;
-	deUint32 tq; //!< Trit or quint value, depending on ISE mode.
-	deUint32 v;
-};
-
-// Data from an ASTC block's "block mode" part (i.e. bits [0,10]).
-struct ASTCBlockMode
-{
-	bool		isError;
-	// \note Following fields only relevant if !isError.
-	bool		isVoidExtent;
-	// \note Following fields only relevant if !isVoidExtent.
-	bool		isDualPlane;
-	int			weightGridWidth;
-	int			weightGridHeight;
-	ISEParams	weightISEParams;
-
-	ASTCBlockMode (void)
-		: isError			(true)
-		, isVoidExtent		(true)
-		, isDualPlane		(true)
-		, weightGridWidth	(-1)
-		, weightGridHeight	(-1)
-		, weightISEParams	(ISEMODE_LAST, -1)
-	{
-	}
-};
-
-inline int computeNumWeights (const ASTCBlockMode& mode)
-{
-	return mode.weightGridWidth * mode.weightGridHeight * (mode.isDualPlane ? 2 : 1);
-}
-
-struct ColorEndpointPair
-{
-	UVec4 e0;
-	UVec4 e1;
-};
-
-struct TexelWeightPair
-{
-	deUint32 w[2];
-};
-
-ASTCBlockMode getASTCBlockMode (deUint32 blockModeData)
-{
-	ASTCBlockMode blockMode;
-	blockMode.isError = true; // \note Set to false later, if not error.
-
-	blockMode.isVoidExtent = getBits(blockModeData, 0, 8) == 0x1fc;
-
-	if (!blockMode.isVoidExtent)
-	{
-		if ((getBits(blockModeData, 0, 1) == 0 && getBits(blockModeData, 6, 8) == 7) || getBits(blockModeData, 0, 3) == 0)
-			return blockMode; // Invalid ("reserved").
-
-		deUint32 r = (deUint32)-1; // \note Set in the following branches.
-
-		if (getBits(blockModeData, 0, 1) == 0)
-		{
-			const deUint32 r0	= getBit(blockModeData, 4);
-			const deUint32 r1	= getBit(blockModeData, 2);
-			const deUint32 r2	= getBit(blockModeData, 3);
-			const deUint32 i78	= getBits(blockModeData, 7, 8);
-
-			r = (r2 << 2) | (r1 << 1) | (r0 << 0);
-
-			if (i78 == 3)
-			{
-				const bool i5 = isBitSet(blockModeData, 5);
-				blockMode.weightGridWidth	= i5 ? 10 : 6;
-				blockMode.weightGridHeight	= i5 ? 6  : 10;
-			}
-			else
-			{
-				const deUint32 a = getBits(blockModeData, 5, 6);
-				switch (i78)
-				{
-					case 0:		blockMode.weightGridWidth = 12;		blockMode.weightGridHeight = a + 2;									break;
-					case 1:		blockMode.weightGridWidth = a + 2;	blockMode.weightGridHeight = 12;									break;
-					case 2:		blockMode.weightGridWidth = a + 6;	blockMode.weightGridHeight = getBits(blockModeData, 9, 10) + 6;		break;
-					default: DE_ASSERT(false);
-				}
-			}
-		}
-		else
-		{
-			const deUint32 r0	= getBit(blockModeData, 4);
-			const deUint32 r1	= getBit(blockModeData, 0);
-			const deUint32 r2	= getBit(blockModeData, 1);
-			const deUint32 i23	= getBits(blockModeData, 2, 3);
-			const deUint32 a	= getBits(blockModeData, 5, 6);
-
-			r = (r2 << 2) | (r1 << 1) | (r0 << 0);
-
-			if (i23 == 3)
-			{
-				const deUint32	b	= getBit(blockModeData, 7);
-				const bool		i8	= isBitSet(blockModeData, 8);
-				blockMode.weightGridWidth	= i8 ? b+2 : a+2;
-				blockMode.weightGridHeight	= i8 ? a+2 : b+6;
-			}
-			else
-			{
-				const deUint32 b = getBits(blockModeData, 7, 8);
-
-				switch (i23)
-				{
-					case 0:		blockMode.weightGridWidth = b + 4;	blockMode.weightGridHeight = a + 2;	break;
-					case 1:		blockMode.weightGridWidth = b + 8;	blockMode.weightGridHeight = a + 2;	break;
-					case 2:		blockMode.weightGridWidth = a + 2;	blockMode.weightGridHeight = b + 8;	break;
-					default: DE_ASSERT(false);
-				}
-			}
-		}
-
-		const bool	zeroDH		= getBits(blockModeData, 0, 1) == 0 && getBits(blockModeData, 7, 8) == 2;
-		const bool	h			= zeroDH ? 0 : isBitSet(blockModeData, 9);
-		blockMode.isDualPlane	= zeroDH ? 0 : isBitSet(blockModeData, 10);
-
-		{
-			ISEMode&	m	= blockMode.weightISEParams.mode;
-			int&		b	= blockMode.weightISEParams.numBits;
-			m = ISEMODE_PLAIN_BIT;
-			b = 0;
-
-			if (h)
-			{
-				switch (r)
-				{
-					case 2:							m = ISEMODE_QUINT;	b = 1;	break;
-					case 3:		m = ISEMODE_TRIT;						b = 2;	break;
-					case 4:												b = 4;	break;
-					case 5:							m = ISEMODE_QUINT;	b = 2;	break;
-					case 6:		m = ISEMODE_TRIT;						b = 3;	break;
-					case 7:												b = 5;	break;
-					default: DE_ASSERT(false);
-				}
-			}
-			else
-			{
-				switch (r)
-				{
-					case 2: 											b = 1;	break;
-					case 3: 	m = ISEMODE_TRIT;								break;
-					case 4: 											b = 2;	break;
-					case 5: 						m = ISEMODE_QUINT;			break;
-					case 6: 	m = ISEMODE_TRIT;						b = 1;	break;
-					case 7: 											b = 3;	break;
-					default: DE_ASSERT(false);
-				}
-			}
-		}
-	}
-
-	blockMode.isError = false;
-	return blockMode;
-}
-
-inline void setASTCErrorColorBlock (void* dst, int blockWidth, int blockHeight, bool isSRGB)
-{
-	if (isSRGB)
-	{
-		deUint8* const dstU = (deUint8*)dst;
-
-		for (int i = 0; i < blockWidth*blockHeight; i++)
-		{
-			dstU[4*i + 0] = 0xff;
-			dstU[4*i + 1] = 0;
-			dstU[4*i + 2] = 0xff;
-			dstU[4*i + 3] = 0xff;
-		}
-	}
-	else
-	{
-		float* const dstF = (float*)dst;
-
-		for (int i = 0; i < blockWidth*blockHeight; i++)
-		{
-			dstF[4*i + 0] = 1.0f;
-			dstF[4*i + 1] = 0.0f;
-			dstF[4*i + 2] = 1.0f;
-			dstF[4*i + 3] = 1.0f;
-		}
-	}
-}
-
-void decodeVoidExtentBlock (void* dst, const Block128& blockData, int blockWidth, int blockHeight, bool isSRGB, bool isLDRMode)
-{
-	const deUint32	minSExtent			= blockData.getBits(12, 24);
-	const deUint32	maxSExtent			= blockData.getBits(25, 37);
-	const deUint32	minTExtent			= blockData.getBits(38, 50);
-	const deUint32	maxTExtent			= blockData.getBits(51, 63);
-	const bool		allExtentsAllOnes	= minSExtent == 0x1fff && maxSExtent == 0x1fff && minTExtent == 0x1fff && maxTExtent == 0x1fff;
-	const bool		isHDRBlock			= blockData.isBitSet(9);
-
-	if ((isLDRMode && isHDRBlock) || (!allExtentsAllOnes && (minSExtent >= maxSExtent || minTExtent >= maxTExtent)))
-	{
-		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
-		return;
-	}
-
-	const deUint32 rgba[4] =
-	{
-		blockData.getBits(64,  79),
-		blockData.getBits(80,  95),
-		blockData.getBits(96,  111),
-		blockData.getBits(112, 127)
-	};
-
-	if (isSRGB)
-	{
-		deUint8* const dstU = (deUint8*)dst;
-		for (int i = 0; i < blockWidth*blockHeight; i++)
-		for (int c = 0; c < 4; c++)
-			dstU[i*4 + c] = (deUint8)((rgba[c] & 0xff00) >> 8);
-	}
-	else
-	{
-		float* const dstF = (float*)dst;
-
-		if (isHDRBlock)
-		{
-			for (int c = 0; c < 4; c++)
-			{
-				if (isFloat16InfOrNan((deFloat16)rgba[c]))
-					throw InternalError("Infinity or NaN color component in HDR void extent block in ASTC texture (behavior undefined by ASTC specification)");
-			}
-
-			for (int i = 0; i < blockWidth*blockHeight; i++)
-			for (int c = 0; c < 4; c++)
-				dstF[i*4 + c] = deFloat16To32((deFloat16)rgba[c]);
-		}
-		else
-		{
-			for (int i = 0; i < blockWidth*blockHeight; i++)
-			for (int c = 0; c < 4; c++)
-				dstF[i*4 + c] = rgba[c] == 65535 ? 1.0f : (float)rgba[c] / 65536.0f;
-		}
-	}
-
-	return;
-}
-
-void decodeColorEndpointModes (deUint32* endpointModesDst, const Block128& blockData, int numPartitions, int extraCemBitsStart)
-{
-	if (numPartitions == 1)
-		endpointModesDst[0] = blockData.getBits(13, 16);
-	else
-	{
-		const deUint32 highLevelSelector = blockData.getBits(23, 24);
-
-		if (highLevelSelector == 0)
-		{
-			const deUint32 mode = blockData.getBits(25, 28);
-			for (int i = 0; i < numPartitions; i++)
-				endpointModesDst[i] = mode;
-		}
-		else
-		{
-			for (int partNdx = 0; partNdx < numPartitions; partNdx++)
-			{
-				const deUint32 cemClass		= highLevelSelector - (blockData.isBitSet(25 + partNdx) ? 0 : 1);
-				const deUint32 lowBit0Ndx	= numPartitions + 2*partNdx;
-				const deUint32 lowBit1Ndx	= numPartitions + 2*partNdx + 1;
-				const deUint32 lowBit0		= blockData.getBit(lowBit0Ndx < 4 ? 25+lowBit0Ndx : extraCemBitsStart+lowBit0Ndx-4);
-				const deUint32 lowBit1		= blockData.getBit(lowBit1Ndx < 4 ? 25+lowBit1Ndx : extraCemBitsStart+lowBit1Ndx-4);
-
-				endpointModesDst[partNdx] = (cemClass << 2) | (lowBit1 << 1) | lowBit0;
-			}
-		}
-	}
-}
-
-inline int computeNumColorEndpointValues (deUint32 endpointMode)
-{
-	DE_ASSERT(endpointMode < 16);
-	return (endpointMode/4 + 1) * 2;
-}
-
-int computeNumColorEndpointValues (const deUint32* endpointModes, int numPartitions)
-{
-	int result = 0;
-	for (int i = 0; i < numPartitions; i++)
-		result += computeNumColorEndpointValues(endpointModes[i]);
-	return result;
-}
-
-void decodeISETritBlock (ISEDecodedResult* dst, int numValues, BitAccessStream& data, int numBits)
-{
-	DE_ASSERT(de::inRange(numValues, 1, 5));
-
-	deUint32 m[5];
-
-	m[0]			= data.getNext(numBits);
-	deUint32 T01	= data.getNext(2);
-	m[1]			= data.getNext(numBits);
-	deUint32 T23	= data.getNext(2);
-	m[2]			= data.getNext(numBits);
-	deUint32 T4		= data.getNext(1);
-	m[3]			= data.getNext(numBits);
-	deUint32 T56	= data.getNext(2);
-	m[4]			= data.getNext(numBits);
-	deUint32 T7		= data.getNext(1);
-
-	switch (numValues)
-	{
-		// \note Fall-throughs.
-		case 1: T23		= 0;
-		case 2: T4		= 0;
-		case 3: T56		= 0;
-		case 4: T7		= 0;
-		case 5: break;
-		default:
-			DE_ASSERT(false);
-	}
-
-	const deUint32 T = (T7 << 7) | (T56 << 5) | (T4 << 4) | (T23 << 2) | (T01 << 0);
-
-	static const deUint32 tritsFromT[256][5] =
-	{
-		{ 0,0,0,0,0 }, { 1,0,0,0,0 }, { 2,0,0,0,0 }, { 0,0,2,0,0 }, { 0,1,0,0,0 }, { 1,1,0,0,0 }, { 2,1,0,0,0 }, { 1,0,2,0,0 }, { 0,2,0,0,0 }, { 1,2,0,0,0 }, { 2,2,0,0,0 }, { 2,0,2,0,0 }, { 0,2,2,0,0 }, { 1,2,2,0,0 }, { 2,2,2,0,0 }, { 2,0,2,0,0 },
-		{ 0,0,1,0,0 }, { 1,0,1,0,0 }, { 2,0,1,0,0 }, { 0,1,2,0,0 }, { 0,1,1,0,0 }, { 1,1,1,0,0 }, { 2,1,1,0,0 }, { 1,1,2,0,0 }, { 0,2,1,0,0 }, { 1,2,1,0,0 }, { 2,2,1,0,0 }, { 2,1,2,0,0 }, { 0,0,0,2,2 }, { 1,0,0,2,2 }, { 2,0,0,2,2 }, { 0,0,2,2,2 },
-		{ 0,0,0,1,0 }, { 1,0,0,1,0 }, { 2,0,0,1,0 }, { 0,0,2,1,0 }, { 0,1,0,1,0 }, { 1,1,0,1,0 }, { 2,1,0,1,0 }, { 1,0,2,1,0 }, { 0,2,0,1,0 }, { 1,2,0,1,0 }, { 2,2,0,1,0 }, { 2,0,2,1,0 }, { 0,2,2,1,0 }, { 1,2,2,1,0 }, { 2,2,2,1,0 }, { 2,0,2,1,0 },
-		{ 0,0,1,1,0 }, { 1,0,1,1,0 }, { 2,0,1,1,0 }, { 0,1,2,1,0 }, { 0,1,1,1,0 }, { 1,1,1,1,0 }, { 2,1,1,1,0 }, { 1,1,2,1,0 }, { 0,2,1,1,0 }, { 1,2,1,1,0 }, { 2,2,1,1,0 }, { 2,1,2,1,0 }, { 0,1,0,2,2 }, { 1,1,0,2,2 }, { 2,1,0,2,2 }, { 1,0,2,2,2 },
-		{ 0,0,0,2,0 }, { 1,0,0,2,0 }, { 2,0,0,2,0 }, { 0,0,2,2,0 }, { 0,1,0,2,0 }, { 1,1,0,2,0 }, { 2,1,0,2,0 }, { 1,0,2,2,0 }, { 0,2,0,2,0 }, { 1,2,0,2,0 }, { 2,2,0,2,0 }, { 2,0,2,2,0 }, { 0,2,2,2,0 }, { 1,2,2,2,0 }, { 2,2,2,2,0 }, { 2,0,2,2,0 },
-		{ 0,0,1,2,0 }, { 1,0,1,2,0 }, { 2,0,1,2,0 }, { 0,1,2,2,0 }, { 0,1,1,2,0 }, { 1,1,1,2,0 }, { 2,1,1,2,0 }, { 1,1,2,2,0 }, { 0,2,1,2,0 }, { 1,2,1,2,0 }, { 2,2,1,2,0 }, { 2,1,2,2,0 }, { 0,2,0,2,2 }, { 1,2,0,2,2 }, { 2,2,0,2,2 }, { 2,0,2,2,2 },
-		{ 0,0,0,0,2 }, { 1,0,0,0,2 }, { 2,0,0,0,2 }, { 0,0,2,0,2 }, { 0,1,0,0,2 }, { 1,1,0,0,2 }, { 2,1,0,0,2 }, { 1,0,2,0,2 }, { 0,2,0,0,2 }, { 1,2,0,0,2 }, { 2,2,0,0,2 }, { 2,0,2,0,2 }, { 0,2,2,0,2 }, { 1,2,2,0,2 }, { 2,2,2,0,2 }, { 2,0,2,0,2 },
-		{ 0,0,1,0,2 }, { 1,0,1,0,2 }, { 2,0,1,0,2 }, { 0,1,2,0,2 }, { 0,1,1,0,2 }, { 1,1,1,0,2 }, { 2,1,1,0,2 }, { 1,1,2,0,2 }, { 0,2,1,0,2 }, { 1,2,1,0,2 }, { 2,2,1,0,2 }, { 2,1,2,0,2 }, { 0,2,2,2,2 }, { 1,2,2,2,2 }, { 2,2,2,2,2 }, { 2,0,2,2,2 },
-		{ 0,0,0,0,1 }, { 1,0,0,0,1 }, { 2,0,0,0,1 }, { 0,0,2,0,1 }, { 0,1,0,0,1 }, { 1,1,0,0,1 }, { 2,1,0,0,1 }, { 1,0,2,0,1 }, { 0,2,0,0,1 }, { 1,2,0,0,1 }, { 2,2,0,0,1 }, { 2,0,2,0,1 }, { 0,2,2,0,1 }, { 1,2,2,0,1 }, { 2,2,2,0,1 }, { 2,0,2,0,1 },
-		{ 0,0,1,0,1 }, { 1,0,1,0,1 }, { 2,0,1,0,1 }, { 0,1,2,0,1 }, { 0,1,1,0,1 }, { 1,1,1,0,1 }, { 2,1,1,0,1 }, { 1,1,2,0,1 }, { 0,2,1,0,1 }, { 1,2,1,0,1 }, { 2,2,1,0,1 }, { 2,1,2,0,1 }, { 0,0,1,2,2 }, { 1,0,1,2,2 }, { 2,0,1,2,2 }, { 0,1,2,2,2 },
-		{ 0,0,0,1,1 }, { 1,0,0,1,1 }, { 2,0,0,1,1 }, { 0,0,2,1,1 }, { 0,1,0,1,1 }, { 1,1,0,1,1 }, { 2,1,0,1,1 }, { 1,0,2,1,1 }, { 0,2,0,1,1 }, { 1,2,0,1,1 }, { 2,2,0,1,1 }, { 2,0,2,1,1 }, { 0,2,2,1,1 }, { 1,2,2,1,1 }, { 2,2,2,1,1 }, { 2,0,2,1,1 },
-		{ 0,0,1,1,1 }, { 1,0,1,1,1 }, { 2,0,1,1,1 }, { 0,1,2,1,1 }, { 0,1,1,1,1 }, { 1,1,1,1,1 }, { 2,1,1,1,1 }, { 1,1,2,1,1 }, { 0,2,1,1,1 }, { 1,2,1,1,1 }, { 2,2,1,1,1 }, { 2,1,2,1,1 }, { 0,1,1,2,2 }, { 1,1,1,2,2 }, { 2,1,1,2,2 }, { 1,1,2,2,2 },
-		{ 0,0,0,2,1 }, { 1,0,0,2,1 }, { 2,0,0,2,1 }, { 0,0,2,2,1 }, { 0,1,0,2,1 }, { 1,1,0,2,1 }, { 2,1,0,2,1 }, { 1,0,2,2,1 }, { 0,2,0,2,1 }, { 1,2,0,2,1 }, { 2,2,0,2,1 }, { 2,0,2,2,1 }, { 0,2,2,2,1 }, { 1,2,2,2,1 }, { 2,2,2,2,1 }, { 2,0,2,2,1 },
-		{ 0,0,1,2,1 }, { 1,0,1,2,1 }, { 2,0,1,2,1 }, { 0,1,2,2,1 }, { 0,1,1,2,1 }, { 1,1,1,2,1 }, { 2,1,1,2,1 }, { 1,1,2,2,1 }, { 0,2,1,2,1 }, { 1,2,1,2,1 }, { 2,2,1,2,1 }, { 2,1,2,2,1 }, { 0,2,1,2,2 }, { 1,2,1,2,2 }, { 2,2,1,2,2 }, { 2,1,2,2,2 },
-		{ 0,0,0,1,2 }, { 1,0,0,1,2 }, { 2,0,0,1,2 }, { 0,0,2,1,2 }, { 0,1,0,1,2 }, { 1,1,0,1,2 }, { 2,1,0,1,2 }, { 1,0,2,1,2 }, { 0,2,0,1,2 }, { 1,2,0,1,2 }, { 2,2,0,1,2 }, { 2,0,2,1,2 }, { 0,2,2,1,2 }, { 1,2,2,1,2 }, { 2,2,2,1,2 }, { 2,0,2,1,2 },
-		{ 0,0,1,1,2 }, { 1,0,1,1,2 }, { 2,0,1,1,2 }, { 0,1,2,1,2 }, { 0,1,1,1,2 }, { 1,1,1,1,2 }, { 2,1,1,1,2 }, { 1,1,2,1,2 }, { 0,2,1,1,2 }, { 1,2,1,1,2 }, { 2,2,1,1,2 }, { 2,1,2,1,2 }, { 0,2,2,2,2 }, { 1,2,2,2,2 }, { 2,2,2,2,2 }, { 2,1,2,2,2 }
-	};
-
-	const deUint32 (& trits)[5] = tritsFromT[T];
-
-	for (int i = 0; i < numValues; i++)
-	{
-		dst[i].m	= m[i];
-		dst[i].tq	= trits[i];
-		dst[i].v	= (trits[i] << numBits) + m[i];
-	}
-}
-
-void decodeISEQuintBlock (ISEDecodedResult* dst, int numValues, BitAccessStream& data, int numBits)
-{
-	DE_ASSERT(de::inRange(numValues, 1, 3));
-
-	deUint32 m[3];
-
-	m[0]			= data.getNext(numBits);
-	deUint32 Q012	= data.getNext(3);
-	m[1]			= data.getNext(numBits);
-	deUint32 Q34	= data.getNext(2);
-	m[2]			= data.getNext(numBits);
-	deUint32 Q56	= data.getNext(2);
-
-	switch (numValues)
-	{
-		// \note Fall-throughs.
-		case 1: Q34		= 0;
-		case 2: Q56		= 0;
-		case 3: break;
-		default:
-			DE_ASSERT(false);
-	}
-
-	const deUint32 Q = (Q56 << 5) | (Q34 << 3) | (Q012 << 0);
-
-	static const deUint32 quintsFromQ[256][3] =
-	{
-		{ 0,0,0 }, { 1,0,0 }, { 2,0,0 }, { 3,0,0 }, { 4,0,0 }, { 0,4,0 }, { 4,4,0 }, { 4,4,4 }, { 0,1,0 }, { 1,1,0 }, { 2,1,0 }, { 3,1,0 }, { 4,1,0 }, { 1,4,0 }, { 4,4,1 }, { 4,4,4 },
-		{ 0,2,0 }, { 1,2,0 }, { 2,2,0 }, { 3,2,0 }, { 4,2,0 }, { 2,4,0 }, { 4,4,2 }, { 4,4,4 }, { 0,3,0 }, { 1,3,0 }, { 2,3,0 }, { 3,3,0 }, { 4,3,0 }, { 3,4,0 }, { 4,4,3 }, { 4,4,4 },
-		{ 0,0,1 }, { 1,0,1 }, { 2,0,1 }, { 3,0,1 }, { 4,0,1 }, { 0,4,1 }, { 4,0,4 }, { 0,4,4 }, { 0,1,1 }, { 1,1,1 }, { 2,1,1 }, { 3,1,1 }, { 4,1,1 }, { 1,4,1 }, { 4,1,4 }, { 1,4,4 },
-		{ 0,2,1 }, { 1,2,1 }, { 2,2,1 }, { 3,2,1 }, { 4,2,1 }, { 2,4,1 }, { 4,2,4 }, { 2,4,4 }, { 0,3,1 }, { 1,3,1 }, { 2,3,1 }, { 3,3,1 }, { 4,3,1 }, { 3,4,1 }, { 4,3,4 }, { 3,4,4 },
-		{ 0,0,2 }, { 1,0,2 }, { 2,0,2 }, { 3,0,2 }, { 4,0,2 }, { 0,4,2 }, { 2,0,4 }, { 3,0,4 }, { 0,1,2 }, { 1,1,2 }, { 2,1,2 }, { 3,1,2 }, { 4,1,2 }, { 1,4,2 }, { 2,1,4 }, { 3,1,4 },
-		{ 0,2,2 }, { 1,2,2 }, { 2,2,2 }, { 3,2,2 }, { 4,2,2 }, { 2,4,2 }, { 2,2,4 }, { 3,2,4 }, { 0,3,2 }, { 1,3,2 }, { 2,3,2 }, { 3,3,2 }, { 4,3,2 }, { 3,4,2 }, { 2,3,4 }, { 3,3,4 },
-		{ 0,0,3 }, { 1,0,3 }, { 2,0,3 }, { 3,0,3 }, { 4,0,3 }, { 0,4,3 }, { 0,0,4 }, { 1,0,4 }, { 0,1,3 }, { 1,1,3 }, { 2,1,3 }, { 3,1,3 }, { 4,1,3 }, { 1,4,3 }, { 0,1,4 }, { 1,1,4 },
-		{ 0,2,3 }, { 1,2,3 }, { 2,2,3 }, { 3,2,3 }, { 4,2,3 }, { 2,4,3 }, { 0,2,4 }, { 1,2,4 }, { 0,3,3 }, { 1,3,3 }, { 2,3,3 }, { 3,3,3 }, { 4,3,3 }, { 3,4,3 }, { 0,3,4 }, { 1,3,4 }
-	};
-
-	const deUint32 (& quints)[3] = quintsFromQ[Q];
-
-	for (int i = 0; i < numValues; i++)
-	{
-		dst[i].m	= m[i];
-		dst[i].tq	= quints[i];
-		dst[i].v	= (quints[i] << numBits) + m[i];
-	}
-}
-
-inline void decodeISEBitBlock (ISEDecodedResult* dst, BitAccessStream& data, int numBits)
-{
-	dst[0].m = data.getNext(numBits);
-	dst[0].v = dst[0].m;
-}
-
-void decodeISE (ISEDecodedResult* dst, int numValues, BitAccessStream& data, const ISEParams& params)
-{
-	if (params.mode == ISEMODE_TRIT)
-	{
-		const int numBlocks = divRoundUp(numValues, 5);
-		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-		{
-			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 5*(numBlocks-1) : 5;
-			decodeISETritBlock(&dst[5*blockNdx], numValuesInBlock, data, params.numBits);
-		}
-	}
-	else if (params.mode == ISEMODE_QUINT)
-	{
-		const int numBlocks = divRoundUp(numValues, 3);
-		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-		{
-			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 3*(numBlocks-1) : 3;
-			decodeISEQuintBlock(&dst[3*blockNdx], numValuesInBlock, data, params.numBits);
-		}
-	}
-	else
-	{
-		DE_ASSERT(params.mode == ISEMODE_PLAIN_BIT);
-		for (int i = 0; i < numValues; i++)
-			decodeISEBitBlock(&dst[i], data, params.numBits);
-	}
-}
-
-ISEParams computeMaximumRangeISEParams (int numAvailableBits, int numValuesInSequence)
-{
-	int curBitsForTritMode		= 6;
-	int curBitsForQuintMode		= 5;
-	int curBitsForPlainBitMode	= 8;
-
-	while (true)
-	{
-		DE_ASSERT(curBitsForTritMode > 0 || curBitsForQuintMode > 0 || curBitsForPlainBitMode > 0);
-
-		const int tritRange			= curBitsForTritMode > 0		? (3 << curBitsForTritMode) - 1			: -1;
-		const int quintRange		= curBitsForQuintMode > 0		? (5 << curBitsForQuintMode) - 1		: -1;
-		const int plainBitRange		= curBitsForPlainBitMode > 0	? (1 << curBitsForPlainBitMode) - 1		: -1;
-		const int maxRange			= de::max(de::max(tritRange, quintRange), plainBitRange);
-
-		if (maxRange == tritRange)
-		{
-			const ISEParams params(ISEMODE_TRIT, curBitsForTritMode);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_TRIT, curBitsForTritMode);
-			curBitsForTritMode--;
-		}
-		else if (maxRange == quintRange)
-		{
-			const ISEParams params(ISEMODE_QUINT, curBitsForQuintMode);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_QUINT, curBitsForQuintMode);
-			curBitsForQuintMode--;
-		}
-		else
-		{
-			const ISEParams params(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
-			DE_ASSERT(maxRange == plainBitRange);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
-			curBitsForPlainBitMode--;
-		}
-	}
-}
-
-void unquantizeColorEndpoints (deUint32* dst, const ISEDecodedResult* iseResults, int numEndpoints, const ISEParams& iseParams)
-{
-	if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
-	{
-		const int rangeCase				= iseParams.numBits*2 - (iseParams.mode == ISEMODE_TRIT ? 2 : 1);
-		DE_ASSERT(de::inRange(rangeCase, 0, 10));
-		static const deUint32	Ca[11]	= { 204, 113, 93, 54, 44, 26, 22, 13, 11, 6, 5 };
-		const deUint32			C		= Ca[rangeCase];
-
-		for (int endpointNdx = 0; endpointNdx < numEndpoints; endpointNdx++)
-		{
-			const deUint32 a = getBit(iseResults[endpointNdx].m, 0);
-			const deUint32 b = getBit(iseResults[endpointNdx].m, 1);
-			const deUint32 c = getBit(iseResults[endpointNdx].m, 2);
-			const deUint32 d = getBit(iseResults[endpointNdx].m, 3);
-			const deUint32 e = getBit(iseResults[endpointNdx].m, 4);
-			const deUint32 f = getBit(iseResults[endpointNdx].m, 5);
-
-			const deUint32 A = a == 0 ? 0 : (1<<9)-1;
-			const deUint32 B = rangeCase == 0	? 0
-							 : rangeCase == 1	? 0
-							 : rangeCase == 2	? (b << 8) |									(b << 4) |				(b << 2) |	(b << 1)
-							 : rangeCase == 3	? (b << 8) |												(b << 3) |	(b << 2)
-							 : rangeCase == 4	? (c << 8) | (b << 7) |										(c << 3) |	(b << 2) |	(c << 1) |	(b << 0)
-							 : rangeCase == 5	? (c << 8) | (b << 7) |													(c << 2) |	(b << 1) |	(c << 0)
-							 : rangeCase == 6	? (d << 8) | (c << 7) | (b << 6) |										(d << 2) |	(c << 1) |	(b << 0)
-							 : rangeCase == 7	? (d << 8) | (c << 7) | (b << 6) |													(d << 1) |	(c << 0)
-							 : rangeCase == 8	? (e << 8) | (d << 7) | (c << 6) | (b << 5) |										(e << 1) |	(d << 0)
-							 : rangeCase == 9	? (e << 8) | (d << 7) | (c << 6) | (b << 5) |													(e << 0)
-							 : rangeCase == 10	? (f << 8) | (e << 7) | (d << 6) | (c << 5) |	(b << 4) |										(f << 0)
-							 : (deUint32)-1;
-			DE_ASSERT(B != (deUint32)-1);
-
-			dst[endpointNdx] = (((iseResults[endpointNdx].tq*C + B) ^ A) >> 2) | (A & 0x80);
-		}
-	}
-	else
-	{
-		DE_ASSERT(iseParams.mode == ISEMODE_PLAIN_BIT);
-
-		for (int endpointNdx = 0; endpointNdx < numEndpoints; endpointNdx++)
-			dst[endpointNdx] = bitReplicationScale(iseResults[endpointNdx].v, iseParams.numBits, 8);
-	}
-}
-
-inline void bitTransferSigned (deInt32& a, deInt32& b)
-{
-	b >>= 1;
-	b |= a & 0x80;
-	a >>= 1;
-	a &= 0x3f;
-	if (isBitSet(a, 5))
-		a -= 0x40;
-}
-
-inline UVec4 clampedRGBA (const IVec4& rgba)
-{
-	return UVec4(de::clamp(rgba.x(), 0, 0xff),
-				 de::clamp(rgba.y(), 0, 0xff),
-				 de::clamp(rgba.z(), 0, 0xff),
-				 de::clamp(rgba.w(), 0, 0xff));
-}
-
-inline IVec4 blueContract (int r, int g, int b, int a)
-{
-	return IVec4((r+b)>>1, (g+b)>>1, b, a);
-}
-
-inline bool isColorEndpointModeHDR (deUint32 mode)
-{
-	return mode == 2	||
-		   mode == 3	||
-		   mode == 7	||
-		   mode == 11	||
-		   mode == 14	||
-		   mode == 15;
-}
-
-void decodeHDREndpointMode7 (UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3)
-{
-	const deUint32 m10		= getBit(v1, 7) | (getBit(v2, 7) << 1);
-	const deUint32 m23		= getBits(v0, 6, 7);
-	const deUint32 majComp	= m10 != 3	? m10
-							: m23 != 3	? m23
-							:			  0;
-	const deUint32 mode		= m10 != 3	? m23
-							: m23 != 3	? 4
-							:			  5;
-
-	deInt32			red		= (deInt32)getBits(v0, 0, 5);
-	deInt32			green	= (deInt32)getBits(v1, 0, 4);
-	deInt32			blue	= (deInt32)getBits(v2, 0, 4);
-	deInt32			scale	= (deInt32)getBits(v3, 0, 4);
-
-	{
-#define SHOR(DST_VAR, SHIFT, BIT_VAR) (DST_VAR) |= (BIT_VAR) << (SHIFT)
-#define ASSIGN_X_BITS(V0,S0, V1,S1, V2,S2, V3,S3, V4,S4, V5,S5, V6,S6) do { SHOR(V0,S0,x0); SHOR(V1,S1,x1); SHOR(V2,S2,x2); SHOR(V3,S3,x3); SHOR(V4,S4,x4); SHOR(V5,S5,x5); SHOR(V6,S6,x6); } while (false)
-
-		const deUint32	x0	= getBit(v1, 6);
-		const deUint32	x1	= getBit(v1, 5);
-		const deUint32	x2	= getBit(v2, 6);
-		const deUint32	x3	= getBit(v2, 5);
-		const deUint32	x4	= getBit(v3, 7);
-		const deUint32	x5	= getBit(v3, 6);
-		const deUint32	x6	= getBit(v3, 5);
-
-		deInt32&		R	= red;
-		deInt32&		G	= green;
-		deInt32&		B	= blue;
-		deInt32&		S	= scale;
-
-		switch (mode)
-		{
-			case 0: ASSIGN_X_BITS(R,9,  R,8,  R,7,  R,10,  R,6,  S,6,   S,5); break;
-			case 1: ASSIGN_X_BITS(R,8,  G,5,  R,7,  B,5,   R,6,  R,10,  R,9); break;
-			case 2: ASSIGN_X_BITS(R,9,  R,8,  R,7,  R,6,   S,7,  S,6,   S,5); break;
-			case 3: ASSIGN_X_BITS(R,8,  G,5,  R,7,  B,5,   R,6,  S,6,   S,5); break;
-			case 4: ASSIGN_X_BITS(G,6,  G,5,  B,6,  B,5,   R,6,  R,7,   S,5); break;
-			case 5: ASSIGN_X_BITS(G,6,  G,5,  B,6,  B,5,   R,6,  S,6,   S,5); break;
-			default:
-				DE_ASSERT(false);
-		}
-
-#undef ASSIGN_X_BITS
-#undef SHOR
-	}
-
-	static const int shiftAmounts[] = { 1, 1, 2, 3, 4, 5 };
-	DE_ASSERT(mode < DE_LENGTH_OF_ARRAY(shiftAmounts));
-
-	red		<<= shiftAmounts[mode];
-	green	<<= shiftAmounts[mode];
-	blue	<<= shiftAmounts[mode];
-	scale	<<= shiftAmounts[mode];
-
-	if (mode != 5)
-	{
-		green	= red - green;
-		blue	= red - blue;
-	}
-
-	if (majComp == 1)
-		std::swap(red, green);
-	else if (majComp == 2)
-		std::swap(red, blue);
-
-	e0 = UVec4(de::clamp(red	- scale,	0, 0xfff),
-			   de::clamp(green	- scale,	0, 0xfff),
-			   de::clamp(blue	- scale,	0, 0xfff),
-			   0x780);
-
-	e1 = UVec4(de::clamp(red,				0, 0xfff),
-			   de::clamp(green,				0, 0xfff),
-			   de::clamp(blue,				0, 0xfff),
-			   0x780);
-}
-
-void decodeHDREndpointMode11 (UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3, deUint32 v4, deUint32 v5)
-{
-	const deUint32 major = (getBit(v5, 7) << 1) | getBit(v4, 7);
-
-	if (major == 3)
-	{
-		e0 = UVec4(v0<<4, v2<<4, getBits(v4,0,6)<<5, 0x780);
-		e1 = UVec4(v1<<4, v3<<4, getBits(v5,0,6)<<5, 0x780);
-	}
-	else
-	{
-		const deUint32 mode = (getBit(v3, 7) << 2) | (getBit(v2, 7) << 1) | getBit(v1, 7);
-
-		deInt32 a	= (deInt32)((getBit(v1, 6) << 8) | v0);
-		deInt32 c	= (deInt32)(getBits(v1, 0, 5));
-		deInt32 b0	= (deInt32)(getBits(v2, 0, 5));
-		deInt32 b1	= (deInt32)(getBits(v3, 0, 5));
-		deInt32 d0	= (deInt32)(getBits(v4, 0, 4));
-		deInt32 d1	= (deInt32)(getBits(v5, 0, 4));
-
-		{
-#define SHOR(DST_VAR, SHIFT, BIT_VAR) (DST_VAR) |= (BIT_VAR) << (SHIFT)
-#define ASSIGN_X_BITS(V0,S0, V1,S1, V2,S2, V3,S3, V4,S4, V5,S5) do { SHOR(V0,S0,x0); SHOR(V1,S1,x1); SHOR(V2,S2,x2); SHOR(V3,S3,x3); SHOR(V4,S4,x4); SHOR(V5,S5,x5); } while (false)
-
-			const deUint32 x0 = getBit(v2, 6);
-			const deUint32 x1 = getBit(v3, 6);
-			const deUint32 x2 = getBit(v4, 6);
-			const deUint32 x3 = getBit(v5, 6);
-			const deUint32 x4 = getBit(v4, 5);
-			const deUint32 x5 = getBit(v5, 5);
-
-			switch (mode)
-			{
-				case 0: ASSIGN_X_BITS(b0,6,  b1,6,   d0,6,  d1,6,  d0,5,  d1,5); break;
-				case 1: ASSIGN_X_BITS(b0,6,  b1,6,   b0,7,  b1,7,  d0,5,  d1,5); break;
-				case 2: ASSIGN_X_BITS(a,9,   c,6,    d0,6,  d1,6,  d0,5,  d1,5); break;
-				case 3: ASSIGN_X_BITS(b0,6,  b1,6,   a,9,   c,6,   d0,5,  d1,5); break;
-				case 4: ASSIGN_X_BITS(b0,6,  b1,6,   b0,7,  b1,7,  a,9,   a,10); break;
-				case 5: ASSIGN_X_BITS(a,9,   a,10,   c,7,   c,6,   d0,5,  d1,5); break;
-				case 6: ASSIGN_X_BITS(b0,6,  b1,6,   a,11,  c,6,   a,9,   a,10); break;
-				case 7: ASSIGN_X_BITS(a,9,   a,10,   a,11,  c,6,   d0,5,  d1,5); break;
-				default:
-					DE_ASSERT(false);
-			}
-
-#undef ASSIGN_X_BITS
-#undef SHOR
-		}
-
-		static const int numDBits[] = { 7, 6, 7, 6, 5, 6, 5, 6 };
-		DE_ASSERT(mode < DE_LENGTH_OF_ARRAY(numDBits));
-
-		d0 = signExtend(d0, numDBits[mode]);
-		d1 = signExtend(d1, numDBits[mode]);
-
-		const int shiftAmount = (mode >> 1) ^ 3;
-		a	<<= shiftAmount;
-		c	<<= shiftAmount;
-		b0	<<= shiftAmount;
-		b1	<<= shiftAmount;
-		d0	<<= shiftAmount;
-		d1	<<= shiftAmount;
-
-		e0 = UVec4(de::clamp(a-c,			0, 0xfff),
-				   de::clamp(a-b0-c-d0,		0, 0xfff),
-				   de::clamp(a-b1-c-d1,		0, 0xfff),
-				   0x780);
-
-		e1 = UVec4(de::clamp(a,				0, 0xfff),
-				   de::clamp(a-b0,			0, 0xfff),
-				   de::clamp(a-b1,			0, 0xfff),
-				   0x780);
-
-		if (major == 1)
-		{
-			std::swap(e0.x(), e0.y());
-			std::swap(e1.x(), e1.y());
-		}
-		else if (major == 2)
-		{
-			std::swap(e0.x(), e0.z());
-			std::swap(e1.x(), e1.z());
-		}
-	}
-}
-
-void decodeHDREndpointMode15(UVec4& e0, UVec4& e1, deUint32 v0, deUint32 v1, deUint32 v2, deUint32 v3, deUint32 v4, deUint32 v5, deUint32 v6In, deUint32 v7In)
-{
-	decodeHDREndpointMode11(e0, e1, v0, v1, v2, v3, v4, v5);
-
-	const deUint32	mode	= (getBit(v7In, 7) << 1) | getBit(v6In, 7);
-	deInt32			v6		= (deInt32)getBits(v6In, 0, 6);
-	deInt32			v7		= (deInt32)getBits(v7In, 0, 6);
-
-	if (mode == 3)
-	{
-		e0.w() = v6 << 5;
-		e1.w() = v7 << 5;
-	}
-	else
-	{
-		v6 |= (v7 << (mode+1)) & 0x780;
-		v7 &= (0x3f >> mode);
-		v7 ^= 0x20 >> mode;
-		v7 -= 0x20 >> mode;
-		v6 <<= 4-mode;
-		v7 <<= 4-mode;
-
-		v7 += v6;
-		v7 = de::clamp(v7, 0, 0xfff);
-		e0.w() = v6;
-		e1.w() = v7;
-	}
-}
-
-void decodeColorEndpoints (ColorEndpointPair* dst, const deUint32* unquantizedEndpoints, const deUint32* endpointModes, int numPartitions)
-{
-	int unquantizedNdx = 0;
-
-	for (int partitionNdx = 0; partitionNdx < numPartitions; partitionNdx++)
-	{
-		const deUint32		endpointMode	= endpointModes[partitionNdx];
-		const deUint32*		v				= &unquantizedEndpoints[unquantizedNdx];
-		UVec4&				e0				= dst[partitionNdx].e0;
-		UVec4&				e1				= dst[partitionNdx].e1;
-
-		unquantizedNdx += computeNumColorEndpointValues(endpointMode);
-
-		switch (endpointMode)
-		{
-			case 0:
-				e0 = UVec4(v[0], v[0], v[0], 0xff);
-				e1 = UVec4(v[1], v[1], v[1], 0xff);
-				break;
-
-			case 1:
-			{
-				const deUint32 L0 = (v[0] >> 2) | (getBits(v[1], 6, 7) << 6);
-				const deUint32 L1 = de::min(0xffu, L0 + getBits(v[1], 0, 5));
-				e0 = UVec4(L0, L0, L0, 0xff);
-				e1 = UVec4(L1, L1, L1, 0xff);
-				break;
-			}
-
-			case 2:
-			{
-				const deUint32 v1Gr		= v[1] >= v[0];
-				const deUint32 y0		= v1Gr ? v[0]<<4 : (v[1]<<4) + 8;
-				const deUint32 y1		= v1Gr ? v[1]<<4 : (v[0]<<4) - 8;
-
-				e0 = UVec4(y0, y0, y0, 0x780);
-				e1 = UVec4(y1, y1, y1, 0x780);
-				break;
-			}
-
-			case 3:
-			{
-				const bool		m	= isBitSet(v[0], 7);
-				const deUint32	y0	= m ? (getBits(v[1], 5, 7) << 9) | (getBits(v[0], 0, 6) << 2)
-										: (getBits(v[1], 4, 7) << 8) | (getBits(v[0], 0, 6) << 1);
-				const deUint32	d	= m ? getBits(v[1], 0, 4) << 2
-										: getBits(v[1], 0, 3) << 1;
-				const deUint32	y1	= de::min(0xfffu, y0+d);
-
-				e0 = UVec4(y0, y0, y0, 0x780);
-				e1 = UVec4(y1, y1, y1, 0x780);
-				break;
-			}
-
-			case 4:
-				e0 = UVec4(v[0], v[0], v[0], v[2]);
-				e1 = UVec4(v[1], v[1], v[1], v[3]);
-				break;
-
-			case 5:
-			{
-				deInt32 v0 = (deInt32)v[0];
-				deInt32 v1 = (deInt32)v[1];
-				deInt32 v2 = (deInt32)v[2];
-				deInt32 v3 = (deInt32)v[3];
-				bitTransferSigned(v1, v0);
-				bitTransferSigned(v3, v2);
-
-				e0 = clampedRGBA(IVec4(v0,		v0,		v0,		v2));
-				e1 = clampedRGBA(IVec4(v0+v1,	v0+v1,	v0+v1,	v2+v3));
-				break;
-			}
-
-			case 6:
-				e0 = UVec4((v[0]*v[3]) >> 8,	(v[1]*v[3]) >> 8,	(v[2]*v[3]) >> 8,	0xff);
-				e1 = UVec4(v[0],				v[1],				v[2],				0xff);
-				break;
-
-			case 7:
-				decodeHDREndpointMode7(e0, e1, v[0], v[1], v[2], v[3]);
-				break;
-
-			case 8:
-				if (v[1]+v[3]+v[5] >= v[0]+v[2]+v[4])
-				{
-					e0 = UVec4(v[0], v[2], v[4], 0xff);
-					e1 = UVec4(v[1], v[3], v[5], 0xff);
-				}
-				else
-				{
-					e0 = blueContract(v[1], v[3], v[5], 0xff).asUint();
-					e1 = blueContract(v[0], v[2], v[4], 0xff).asUint();
-				}
-				break;
-
-			case 9:
-			{
-				deInt32 v0 = (deInt32)v[0];
-				deInt32 v1 = (deInt32)v[1];
-				deInt32 v2 = (deInt32)v[2];
-				deInt32 v3 = (deInt32)v[3];
-				deInt32 v4 = (deInt32)v[4];
-				deInt32 v5 = (deInt32)v[5];
-				bitTransferSigned(v1, v0);
-				bitTransferSigned(v3, v2);
-				bitTransferSigned(v5, v4);
-
-				if (v1+v3+v5 >= 0)
-				{
-					e0 = clampedRGBA(IVec4(v0,		v2,		v4,		0xff));
-					e1 = clampedRGBA(IVec4(v0+v1,	v2+v3,	v4+v5,	0xff));
-				}
-				else
-				{
-					e0 = clampedRGBA(blueContract(v0+v1,	v2+v3,	v4+v5,	0xff));
-					e1 = clampedRGBA(blueContract(v0,		v2,		v4,		0xff));
-				}
-				break;
-			}
-
-			case 10:
-				e0 = UVec4((v[0]*v[3]) >> 8,	(v[1]*v[3]) >> 8,	(v[2]*v[3]) >> 8,	v[4]);
-				e1 = UVec4(v[0],				v[1],				v[2],				v[5]);
-				break;
-
-			case 11:
-				decodeHDREndpointMode11(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5]);
-				break;
-
-			case 12:
-				if (v[1]+v[3]+v[5] >= v[0]+v[2]+v[4])
-				{
-					e0 = UVec4(v[0], v[2], v[4], v[6]);
-					e1 = UVec4(v[1], v[3], v[5], v[7]);
-				}
-				else
-				{
-					e0 = clampedRGBA(blueContract(v[1], v[3], v[5], v[7]));
-					e1 = clampedRGBA(blueContract(v[0], v[2], v[4], v[6]));
-				}
-				break;
-
-			case 13:
-			{
-				deInt32 v0 = (deInt32)v[0];
-				deInt32 v1 = (deInt32)v[1];
-				deInt32 v2 = (deInt32)v[2];
-				deInt32 v3 = (deInt32)v[3];
-				deInt32 v4 = (deInt32)v[4];
-				deInt32 v5 = (deInt32)v[5];
-				deInt32 v6 = (deInt32)v[6];
-				deInt32 v7 = (deInt32)v[7];
-				bitTransferSigned(v1, v0);
-				bitTransferSigned(v3, v2);
-				bitTransferSigned(v5, v4);
-				bitTransferSigned(v7, v6);
-
-				if (v1+v3+v5 >= 0)
-				{
-					e0 = clampedRGBA(IVec4(v0,		v2,		v4,		v6));
-					e1 = clampedRGBA(IVec4(v0+v1,	v2+v3,	v4+v5,	v6+v7));
-				}
-				else
-				{
-					e0 = clampedRGBA(blueContract(v0+v1,	v2+v3,	v4+v5,	v6+v7));
-					e1 = clampedRGBA(blueContract(v0,		v2,		v4,		v6));
-				}
-
-				break;
-			}
-
-			case 14:
-				decodeHDREndpointMode11(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5]);
-				e0.w() = v[6];
-				e1.w() = v[7];
-				break;
-
-			case 15:
-				decodeHDREndpointMode15(e0, e1, v[0], v[1], v[2], v[3], v[4], v[5], v[6], v[7]);
-				break;
-
-			default:
-				DE_ASSERT(false);
-		}
-	}
-}
-
-void computeColorEndpoints (ColorEndpointPair* dst, const Block128& blockData, const deUint32* endpointModes, int numPartitions, int numColorEndpointValues, const ISEParams& iseParams, int numBitsAvailable)
-{
-	const int			colorEndpointDataStart = numPartitions == 1 ? 17 : 29;
-	ISEDecodedResult	colorEndpointData[18];
-
-	{
-		BitAccessStream dataStream(blockData, colorEndpointDataStart, numBitsAvailable, true);
-		decodeISE(&colorEndpointData[0], numColorEndpointValues, dataStream, iseParams);
-	}
-
-	{
-		deUint32 unquantizedEndpoints[18];
-		unquantizeColorEndpoints(&unquantizedEndpoints[0], &colorEndpointData[0], numColorEndpointValues, iseParams);
-		decodeColorEndpoints(dst, &unquantizedEndpoints[0], &endpointModes[0], numPartitions);
-	}
-}
-
-void unquantizeWeights (deUint32 dst[64], const ISEDecodedResult* weightGrid, const ASTCBlockMode& blockMode)
-{
-	const int			numWeights	= computeNumWeights(blockMode);
-	const ISEParams&	iseParams	= blockMode.weightISEParams;
-
-	if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
-	{
-		const int rangeCase = iseParams.numBits*2 + (iseParams.mode == ISEMODE_QUINT ? 1 : 0);
-
-		if (rangeCase == 0 || rangeCase == 1)
-		{
-			static const deUint32 map0[3]	= { 0, 32, 63 };
-			static const deUint32 map1[5]	= { 0, 16, 32, 47, 63 };
-			const deUint32* const map		= rangeCase == 0 ? &map0[0] : &map1[0];
-			for (int i = 0; i < numWeights; i++)
-			{
-				DE_ASSERT(weightGrid[i].v < (rangeCase == 0 ? 3u : 5u));
-				dst[i] = map[weightGrid[i].v];
-			}
-		}
-		else
-		{
-			DE_ASSERT(rangeCase <= 6);
-			static const deUint32	Ca[5]	= { 50, 28, 23, 13, 11 };
-			const deUint32			C		= Ca[rangeCase-2];
-
-			for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
-			{
-				const deUint32 a = getBit(weightGrid[weightNdx].m, 0);
-				const deUint32 b = getBit(weightGrid[weightNdx].m, 1);
-				const deUint32 c = getBit(weightGrid[weightNdx].m, 2);
-
-				const deUint32 A = a == 0 ? 0 : (1<<7)-1;
-				const deUint32 B = rangeCase == 2 ? 0
-								 : rangeCase == 3 ? 0
-								 : rangeCase == 4 ? (b << 6) |					(b << 2) |				(b << 0)
-								 : rangeCase == 5 ? (b << 6) |								(b << 1)
-								 : rangeCase == 6 ? (c << 6) | (b << 5) |					(c << 1) |	(b << 0)
-								 : (deUint32)-1;
-
-				dst[weightNdx] = (((weightGrid[weightNdx].tq*C + B) ^ A) >> 2) | (A & 0x20);
-			}
-		}
-	}
-	else
-	{
-		DE_ASSERT(iseParams.mode == ISEMODE_PLAIN_BIT);
-
-		for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
-			dst[weightNdx] = bitReplicationScale(weightGrid[weightNdx].v, iseParams.numBits, 6);
-	}
-
-	for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
-		dst[weightNdx] += dst[weightNdx] > 32 ? 1 : 0;
-
-	// Initialize nonexistent weights to poison values
-	for (int weightNdx = numWeights; weightNdx < 64; weightNdx++)
-		dst[weightNdx] = ~0u;
-
-}
-
-void interpolateWeights (TexelWeightPair* dst, const deUint32* unquantizedWeights, int blockWidth, int blockHeight, const ASTCBlockMode& blockMode)
-{
-	const int		numWeightsPerTexel	= blockMode.isDualPlane ? 2 : 1;
-	const deUint32	scaleX				= (1024 + blockWidth/2) / (blockWidth-1);
-	const deUint32	scaleY				= (1024 + blockHeight/2) / (blockHeight-1);
-
-	for (int texelY = 0; texelY < blockHeight; texelY++)
-	{
-		for (int texelX = 0; texelX < blockWidth; texelX++)
-		{
-			const deUint32 gX	= (scaleX*texelX*(blockMode.weightGridWidth-1) + 32) >> 6;
-			const deUint32 gY	= (scaleY*texelY*(blockMode.weightGridHeight-1) + 32) >> 6;
-			const deUint32 jX	= gX >> 4;
-			const deUint32 jY	= gY >> 4;
-			const deUint32 fX	= gX & 0xf;
-			const deUint32 fY	= gY & 0xf;
-			const deUint32 w11	= (fX*fY + 8) >> 4;
-			const deUint32 w10	= fY - w11;
-			const deUint32 w01	= fX - w11;
-			const deUint32 w00	= 16 - fX - fY + w11;
-			const deUint32 v0	= jY*blockMode.weightGridWidth + jX;
-
-			for (int texelWeightNdx = 0; texelWeightNdx < numWeightsPerTexel; texelWeightNdx++)
-			{
-				const deUint32 p00	= unquantizedWeights[(v0)									* numWeightsPerTexel + texelWeightNdx];
-				const deUint32 p01	= unquantizedWeights[(v0 + 1)								* numWeightsPerTexel + texelWeightNdx];
-				const deUint32 p10	= unquantizedWeights[(v0 + blockMode.weightGridWidth)		* numWeightsPerTexel + texelWeightNdx];
-				const deUint32 p11	= unquantizedWeights[(v0 + blockMode.weightGridWidth + 1)	* numWeightsPerTexel + texelWeightNdx];
-
-				dst[texelY*blockWidth + texelX].w[texelWeightNdx] = (p00*w00 + p01*w01 + p10*w10 + p11*w11 + 8) >> 4;
-			}
-		}
-	}
-}
-
-void computeTexelWeights (TexelWeightPair* dst, const Block128& blockData, int blockWidth, int blockHeight, const ASTCBlockMode& blockMode)
-{
-	ISEDecodedResult weightGrid[64];
-
-	{
-		BitAccessStream dataStream(blockData, 127, computeNumRequiredBits(blockMode.weightISEParams, computeNumWeights(blockMode)), false);
-		decodeISE(&weightGrid[0], computeNumWeights(blockMode), dataStream, blockMode.weightISEParams);
-	}
-
-	{
-		deUint32 unquantizedWeights[64];
-		unquantizeWeights(&unquantizedWeights[0], &weightGrid[0], blockMode);
-		interpolateWeights(dst, &unquantizedWeights[0], blockWidth, blockHeight, blockMode);
-	}
-}
-
-inline deUint32 hash52 (deUint32 v)
-{
-	deUint32 p = v;
-	p ^= p >> 15;	p -= p << 17;	p += p << 7;	p += p << 4;
-	p ^= p >>  5;	p += p << 16;	p ^= p >> 7;	p ^= p >> 3;
-	p ^= p <<  6;	p ^= p >> 17;
-	return p;
-}
-
-int computeTexelPartition (deUint32 seedIn, deUint32 xIn, deUint32 yIn, deUint32 zIn, int numPartitions, bool smallBlock)
-{
-	DE_ASSERT(zIn == 0);
-	const deUint32	x		= smallBlock ? xIn << 1 : xIn;
-	const deUint32	y		= smallBlock ? yIn << 1 : yIn;
-	const deUint32	z		= smallBlock ? zIn << 1 : zIn;
-	const deUint32	seed	= seedIn + 1024*(numPartitions-1);
-	const deUint32	rnum	= hash52(seed);
-	deUint8			seed1	= (deUint8)( rnum							& 0xf);
-	deUint8			seed2	= (deUint8)((rnum >>  4)					& 0xf);
-	deUint8			seed3	= (deUint8)((rnum >>  8)					& 0xf);
-	deUint8			seed4	= (deUint8)((rnum >> 12)					& 0xf);
-	deUint8			seed5	= (deUint8)((rnum >> 16)					& 0xf);
-	deUint8			seed6	= (deUint8)((rnum >> 20)					& 0xf);
-	deUint8			seed7	= (deUint8)((rnum >> 24)					& 0xf);
-	deUint8			seed8	= (deUint8)((rnum >> 28)					& 0xf);
-	deUint8			seed9	= (deUint8)((rnum >> 18)					& 0xf);
-	deUint8			seed10	= (deUint8)((rnum >> 22)					& 0xf);
-	deUint8			seed11	= (deUint8)((rnum >> 26)					& 0xf);
-	deUint8			seed12	= (deUint8)(((rnum >> 30) | (rnum << 2))	& 0xf);
-
-	seed1  = (deUint8)(seed1  * seed1 );
-	seed2  = (deUint8)(seed2  * seed2 );
-	seed3  = (deUint8)(seed3  * seed3 );
-	seed4  = (deUint8)(seed4  * seed4 );
-	seed5  = (deUint8)(seed5  * seed5 );
-	seed6  = (deUint8)(seed6  * seed6 );
-	seed7  = (deUint8)(seed7  * seed7 );
-	seed8  = (deUint8)(seed8  * seed8 );
-	seed9  = (deUint8)(seed9  * seed9 );
-	seed10 = (deUint8)(seed10 * seed10);
-	seed11 = (deUint8)(seed11 * seed11);
-	seed12 = (deUint8)(seed12 * seed12);
-
-	const int shA = (seed & 2) != 0		? 4		: 5;
-	const int shB = numPartitions == 3	? 6		: 5;
-	const int sh1 = (seed & 1) != 0		? shA	: shB;
-	const int sh2 = (seed & 1) != 0		? shB	: shA;
-	const int sh3 = (seed & 0x10) != 0	? sh1	: sh2;
-
-	seed1  = (deUint8)(seed1  >> sh1);
-	seed2  = (deUint8)(seed2  >> sh2);
-	seed3  = (deUint8)(seed3  >> sh1);
-	seed4  = (deUint8)(seed4  >> sh2);
-	seed5  = (deUint8)(seed5  >> sh1);
-	seed6  = (deUint8)(seed6  >> sh2);
-	seed7  = (deUint8)(seed7  >> sh1);
-	seed8  = (deUint8)(seed8  >> sh2);
-	seed9  = (deUint8)(seed9  >> sh3);
-	seed10 = (deUint8)(seed10 >> sh3);
-	seed11 = (deUint8)(seed11 >> sh3);
-	seed12 = (deUint8)(seed12 >> sh3);
-
-	const int a =						0x3f & (seed1*x + seed2*y + seed11*z + (rnum >> 14));
-	const int b =						0x3f & (seed3*x + seed4*y + seed12*z + (rnum >> 10));
-	const int c = numPartitions >= 3 ?	0x3f & (seed5*x + seed6*y + seed9*z  + (rnum >>  6))	: 0;
-	const int d = numPartitions >= 4 ?	0x3f & (seed7*x + seed8*y + seed10*z + (rnum >>  2))	: 0;
-
-	return a >= b && a >= c && a >= d	? 0
-		 : b >= c && b >= d				? 1
-		 : c >= d						? 2
-		 :								  3;
-}
-
-void setTexelColors (void* dst, ColorEndpointPair* colorEndpoints, TexelWeightPair* texelWeights, int ccs, deUint32 partitionIndexSeed,
-							int numPartitions, int blockWidth, int blockHeight, bool isSRGB, bool isLDRMode, const deUint32* colorEndpointModes)
-{
-	const bool	smallBlock = blockWidth*blockHeight < 31;
-	bool		isHDREndpoint[4];
-
-	for (int i = 0; i < numPartitions; i++)
-		isHDREndpoint[i] = isColorEndpointModeHDR(colorEndpointModes[i]);
-
-	for (int texelY = 0; texelY < blockHeight; texelY++)
-	for (int texelX = 0; texelX < blockWidth; texelX++)
-	{
-		const int				texelNdx			= texelY*blockWidth + texelX;
-		const int				colorEndpointNdx	= numPartitions == 1 ? 0 : computeTexelPartition(partitionIndexSeed, texelX, texelY, 0, numPartitions, smallBlock);
-		DE_ASSERT(colorEndpointNdx < numPartitions);
-		const UVec4&			e0					= colorEndpoints[colorEndpointNdx].e0;
-		const UVec4&			e1					= colorEndpoints[colorEndpointNdx].e1;
-		const TexelWeightPair&	weight				= texelWeights[texelNdx];
-
-		if (isLDRMode && isHDREndpoint[colorEndpointNdx])
-		{
-			if (isSRGB)
-			{
-				((deUint8*)dst)[texelNdx*4 + 0] = 0xff;
-				((deUint8*)dst)[texelNdx*4 + 1] = 0;
-				((deUint8*)dst)[texelNdx*4 + 2] = 0xff;
-				((deUint8*)dst)[texelNdx*4 + 3] = 0xff;
-			}
-			else
-			{
-				((float*)dst)[texelNdx*4 + 0] = 1.0f;
-				((float*)dst)[texelNdx*4 + 1] = 0;
-				((float*)dst)[texelNdx*4 + 2] = 1.0f;
-				((float*)dst)[texelNdx*4 + 3] = 1.0f;
-			}
-		}
-		else
-		{
-			for (int channelNdx = 0; channelNdx < 4; channelNdx++)
-			{
-				if (!isHDREndpoint[colorEndpointNdx] || (channelNdx == 3 && colorEndpointModes[colorEndpointNdx] == 14)) // \note Alpha for mode 14 is treated the same as LDR.
-				{
-					const deUint32 c0	= (e0[channelNdx] << 8) | (isSRGB ? 0x80 : e0[channelNdx]);
-					const deUint32 c1	= (e1[channelNdx] << 8) | (isSRGB ? 0x80 : e1[channelNdx]);
-					const deUint32 w	= weight.w[ccs == channelNdx ? 1 : 0];
-					const deUint32 c	= (c0*(64-w) + c1*w + 32) / 64;
-
-					if (isSRGB)
-						((deUint8*)dst)[texelNdx*4 + channelNdx] = (deUint8)((c & 0xff00) >> 8);
-					else
-						((float*)dst)[texelNdx*4 + channelNdx] = c == 65535 ? 1.0f : (float)c / 65536.0f;
-				}
-				else
-				{
-					DE_STATIC_ASSERT((isSameType<deFloat16, deUint16>::V));
-					const deUint32		c0	= e0[channelNdx] << 4;
-					const deUint32		c1	= e1[channelNdx] << 4;
-					const deUint32		w	= weight.w[ccs == channelNdx ? 1 : 0];
-					const deUint32		c	= (c0*(64-w) + c1*w + 32) / 64;
-					const deUint32		e	= getBits(c, 11, 15);
-					const deUint32		m	= getBits(c, 0, 10);
-					const deUint32		mt	= m < 512		? 3*m
-											: m >= 1536		? 5*m - 2048
-											:				  4*m - 512;
-					const deFloat16		cf	= (deFloat16)((e << 10) + (mt >> 3));
-
-					((float*)dst)[texelNdx*4 + channelNdx] = deFloat16To32(isFloat16InfOrNan(cf) ? 0x7bff : cf);
-				}
-			}
-		}
-	}
-}
-
-void decompressASTCBlock (void* dst, const Block128& blockData, int blockWidth, int blockHeight, bool isSRGB, bool isLDR)
-{
-	DE_ASSERT(isLDR || !isSRGB);
-
-	// Decode block mode.
-
-	const ASTCBlockMode blockMode = getASTCBlockMode(blockData.getBits(0, 10));
-
-	// Check for block mode errors.
-
-	if (blockMode.isError)
-	{
-		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
-		return;
-	}
-
-	// Separate path for void-extent.
-
-	if (blockMode.isVoidExtent)
-	{
-		decodeVoidExtentBlock(dst, blockData, blockWidth, blockHeight, isSRGB, isLDR);
-		return;
-	}
-
-	// Compute weight grid values.
-
-	const int numWeights			= computeNumWeights(blockMode);
-	const int numWeightDataBits		= computeNumRequiredBits(blockMode.weightISEParams, numWeights);
-	const int numPartitions			= (int)blockData.getBits(11, 12) + 1;
-
-	// Check for errors in weight grid, partition and dual-plane parameters.
-
-	if (numWeights > 64								||
-		numWeightDataBits > 96						||
-		numWeightDataBits < 24						||
-		blockMode.weightGridWidth > blockWidth		||
-		blockMode.weightGridHeight > blockHeight	||
-		(numPartitions == 4 && blockMode.isDualPlane))
-	{
-		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
-		return;
-	}
-
-	// Compute number of bits available for color endpoint data.
-
-	const bool	isSingleUniqueCem			= numPartitions == 1 || blockData.getBits(23, 24) == 0;
-	const int	numConfigDataBits			= (numPartitions == 1 ? 17 : isSingleUniqueCem ? 29 : 25 + 3*numPartitions) +
-											  (blockMode.isDualPlane ? 2 : 0);
-	const int	numBitsForColorEndpoints	= 128 - numWeightDataBits - numConfigDataBits;
-	const int	extraCemBitsStart			= 127 - numWeightDataBits - (isSingleUniqueCem		? -1
-																		: numPartitions == 4	? 7
-																		: numPartitions == 3	? 4
-																		: numPartitions == 2	? 1
-																		: 0);
-	// Decode color endpoint modes.
-
-	deUint32 colorEndpointModes[4];
-	decodeColorEndpointModes(&colorEndpointModes[0], blockData, numPartitions, extraCemBitsStart);
-
-	const int numColorEndpointValues = computeNumColorEndpointValues(colorEndpointModes, numPartitions);
-
-	// Check for errors in color endpoint value count.
-
-	if (numColorEndpointValues > 18 || numBitsForColorEndpoints < divRoundUp(13*numColorEndpointValues, 5))
-	{
-		setASTCErrorColorBlock(dst, blockWidth, blockHeight, isSRGB);
-		return;
-	}
-
-	// Compute color endpoints.
-
-	ColorEndpointPair colorEndpoints[4];
-	computeColorEndpoints(&colorEndpoints[0], blockData, &colorEndpointModes[0], numPartitions, numColorEndpointValues,
-						  computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues), numBitsForColorEndpoints);
-
-	// Compute texel weights.
-
-	TexelWeightPair texelWeights[ASTC_MAX_BLOCK_WIDTH*ASTC_MAX_BLOCK_HEIGHT];
-	computeTexelWeights(&texelWeights[0], blockData, blockWidth, blockHeight, blockMode);
-
-	// Set texel colors.
-
-	const int		ccs						= blockMode.isDualPlane ? (int)blockData.getBits(extraCemBitsStart-2, extraCemBitsStart-1) : -1;
-	const deUint32	partitionIndexSeed		= numPartitions > 1 ? blockData.getBits(13, 22) : (deUint32)-1;
-
-	setTexelColors(dst, &colorEndpoints[0], &texelWeights[0], ccs, partitionIndexSeed, numPartitions, blockWidth, blockHeight, isSRGB, isLDR, &colorEndpointModes[0]);
-}
-
-} // ASTCDecompressInternal
-
-void decompressASTC (const PixelBufferAccess& dst, const deUint8* data, bool isSRGB, bool isLDR)
-{
-	using namespace ASTCDecompressInternal;
-
-	DE_ASSERT(isLDR || !isSRGB);
-
-	const int blockWidth = dst.getWidth();
-	const int blockHeight = dst.getHeight();
-
-	union
-	{
-		deUint8		sRGB[ASTC_MAX_BLOCK_WIDTH*ASTC_MAX_BLOCK_HEIGHT*4];
-		float		linear[ASTC_MAX_BLOCK_WIDTH*ASTC_MAX_BLOCK_HEIGHT*4];
-	} decompressedBuffer;
-
-	const Block128 blockData(data);
-	decompressASTCBlock(isSRGB ? (void*)&decompressedBuffer.sRGB[0] : (void*)&decompressedBuffer.linear[0],
-						blockData, dst.getWidth(), dst.getHeight(), isSRGB, isLDR);
-
-	if (isSRGB)
-	{
-		for (int i = 0; i < blockHeight; i++)
-		for (int j = 0; j < blockWidth; j++)
-		{
-			dst.setPixel(IVec4(decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 0],
-									decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 1],
-									decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 2],
-									decompressedBuffer.sRGB[(i*blockWidth + j) * 4 + 3]), j, i);
-		}
-	}
-	else
-	{
-		for (int i = 0; i < blockHeight; i++)
-		for (int j = 0; j < blockWidth; j++)
-		{
-			dst.setPixel(Vec4(decompressedBuffer.linear[(i*blockWidth + j) * 4 + 0],
-								   decompressedBuffer.linear[(i*blockWidth + j) * 4 + 1],
-								   decompressedBuffer.linear[(i*blockWidth + j) * 4 + 2],
-								   decompressedBuffer.linear[(i*blockWidth + j) * 4 + 3]), j, i);
-		}
-	}
-}
-
 void decompressBlock (CompressedTexFormat format, const PixelBufferAccess& dst, const deUint8* src, const TexDecompressionParams& params)
 {
 	// No 3D blocks supported right now
@@ -2578,14 +1029,8 @@
 		case COMPRESSEDTEXFORMAT_ASTC_10x10_SRGB8_ALPHA8:
 		case COMPRESSEDTEXFORMAT_ASTC_12x10_SRGB8_ALPHA8:
 		case COMPRESSEDTEXFORMAT_ASTC_12x12_SRGB8_ALPHA8:
-		{
-			DE_ASSERT(params.astcMode == TexDecompressionParams::ASTCMODE_LDR || params.astcMode == TexDecompressionParams::ASTCMODE_HDR);
-
-			const bool isSRGBFormat = isAstcSRGBFormat(format);
-			decompressASTC(dst, src, isSRGBFormat, isSRGBFormat || params.astcMode == TexDecompressionParams::ASTCMODE_LDR);
-
+			astc::decompress(dst, src, format, params.astcMode);
 			break;
-		}
 
 		default:
 			DE_ASSERT(false);
@@ -2604,9 +1049,9 @@
 {
 	const int				blockSize			= getBlockSize(fmt);
 	const IVec3				blockPixelSize		(getBlockPixelSize(fmt));
-	const IVec3				blockCount			(divRoundUp(dst.getWidth(),		blockPixelSize.x()),
-												 divRoundUp(dst.getHeight(),	blockPixelSize.y()),
-												 divRoundUp(dst.getDepth(),		blockPixelSize.z()));
+	const IVec3				blockCount			(deDivRoundUp32(dst.getWidth(),		blockPixelSize.x()),
+												 deDivRoundUp32(dst.getHeight(),	blockPixelSize.y()),
+												 deDivRoundUp32(dst.getDepth(),		blockPixelSize.z()));
 	const IVec3				blockPitches		(blockSize, blockSize * blockCount.x(), blockSize * blockCount.x() * blockCount.y());
 
 	std::vector<deUint8>	uncompressedBlock	(dst.getFormat().getPixelSize() * blockPixelSize.x() * blockPixelSize.y() * blockPixelSize.z());
@@ -2659,15 +1104,12 @@
 	m_height	= height;
 	m_depth		= depth;
 
-	if (isAstcFormat(m_format) && m_depth > 1)
-		throw InternalError("3D ASTC textures not currently supported");
-
 	if (m_format != COMPRESSEDTEXFORMAT_LAST)
 	{
 		const IVec3	blockPixelSize	= getBlockPixelSize(m_format);
 		const int	blockSize		= getBlockSize(m_format);
 
-		m_data.resize(divRoundUp(m_width, blockPixelSize.x()) * divRoundUp(m_height, blockPixelSize.y()) * divRoundUp(m_depth, blockPixelSize.z()) * blockSize);
+		m_data.resize(deDivRoundUp32(m_width, blockPixelSize.x()) * deDivRoundUp32(m_height, blockPixelSize.y()) * deDivRoundUp32(m_depth, blockPixelSize.z()) * blockSize);
 	}
 	else
 	{
diff --git a/framework/common/tcuPlatform.cpp b/framework/common/tcuPlatform.cpp
index 271cdcf..2c9344c 100644
--- a/framework/common/tcuPlatform.cpp
+++ b/framework/common/tcuPlatform.cpp
@@ -41,12 +41,17 @@
 
 const glu::Platform& Platform::getGLPlatform (void) const
 {
-	throw tcu::NotSupportedError("OpenGL (ES) is not supported", DE_NULL, __FILE__, __LINE__);
+	TCU_THROW(NotSupportedError, "OpenGL (ES) is not supported");
 }
 
 const eglu::Platform& Platform::getEGLPlatform (void) const
 {
-	throw tcu::NotSupportedError("EGL is not supported", DE_NULL, __FILE__, __LINE__);
+	TCU_THROW(NotSupportedError, "EGL is not supported");
+}
+
+const vk::Platform& Platform::getVulkanPlatform (void) const
+{
+	TCU_THROW(NotSupportedError, "Vulkan is not supported");
 }
 
 } // tcu
diff --git a/framework/common/tcuPlatform.hpp b/framework/common/tcuPlatform.hpp
index 597cb7d..082cf08 100644
--- a/framework/common/tcuPlatform.hpp
+++ b/framework/common/tcuPlatform.hpp
@@ -35,6 +35,11 @@
 class Platform;
 }
 
+namespace vk
+{
+class Platform;
+}
+
 namespace tcu
 {
 
@@ -105,6 +110,8 @@
 	 * \return Reference to EGL platform interface.
 	 *//*--------------------------------------------------------------------*/
 	virtual const eglu::Platform&	getEGLPlatform		(void) const;
+
+	virtual const vk::Platform&		getVulkanPlatform	(void) const;
 };
 
 } // tcu
diff --git a/framework/common/tcuStringTemplate.cpp b/framework/common/tcuStringTemplate.cpp
index 15e0f36..d9ddedc 100644
--- a/framework/common/tcuStringTemplate.cpp
+++ b/framework/common/tcuStringTemplate.cpp
@@ -67,19 +67,30 @@
 			// Find end-of-param.
 			size_t paramEndNdx = m_template.find("}", paramNdx);
 			if (paramEndNdx == string::npos)
-				throw tcu::InternalError("No '}' found in template parameter", "", __FILE__, __LINE__);
+				TCU_THROW(InternalError, "No '}' found in template parameter");
 
 			// Parse parameter contents.
 			string	paramStr		= m_template.substr(paramNdx+2, paramEndNdx-2-paramNdx);
 			bool	paramSingleLine	= false;
+			bool	paramOptional	= false;
 			string	paramName;
 			size_t colonNdx = paramStr.find(":");
 			if (colonNdx != string::npos)
 			{
 				paramName = paramStr.substr(0, colonNdx);
 				string flagsStr = paramStr.substr(colonNdx+1);
-				TCU_CHECK(flagsStr == "single-line");
-				paramSingleLine = true;
+				if (flagsStr == "single-line")
+				{
+					paramSingleLine = true;
+				}
+				else if (flagsStr == "opt")
+				{
+					paramOptional = true;
+				}
+				else
+				{
+					TCU_THROW(InternalError, (string("Unrecognized flag") + paramStr).c_str());
+				}
 			}
 			else
 				paramName = paramStr;
@@ -98,8 +109,8 @@
 				else
 					res << val;
 			}
-			else
-				throw tcu::InternalError((string("Value for parameter '") + paramName + "' not found in map").c_str(), "", __FILE__, __LINE__);
+			else if (!paramOptional)
+				TCU_THROW(InternalError, (string("Value for parameter '") + paramName + "' not found in map").c_str());
 
 			// Skip over template.
 			curNdx = paramEndNdx + 1;
diff --git a/framework/common/tcuTestLog.cpp b/framework/common/tcuTestLog.cpp
index bbb41ee..a6f41bc 100644
--- a/framework/common/tcuTestLog.cpp
+++ b/framework/common/tcuTestLog.cpp
@@ -227,7 +227,7 @@
 
 		Vector<int, Size> res;
 		for (int i = 0; i < Size; i++)
-			res[i] = deRoundFloatToInt32((float)imageSize[i] / d);
+			res[i] = de::max(1, deRoundFloatToInt32((float)imageSize[i] / d));
 
 		return res;
 	}
diff --git a/framework/common/tcuTexVerifierUtil.cpp b/framework/common/tcuTexVerifierUtil.cpp
index 44a3f70..bdc1efa 100644
--- a/framework/common/tcuTexVerifierUtil.cpp
+++ b/framework/common/tcuTexVerifierUtil.cpp
@@ -144,6 +144,10 @@
 		case tcu::Sampler::REPEAT_CL:
 			return imod(c, size);
 
+		case tcu::Sampler::MIRRORED_ONCE:
+			c = deClamp32(c, -size, size);
+			// Fall-through
+
 		case tcu::Sampler::MIRRORED_REPEAT_GL:
 		case tcu::Sampler::MIRRORED_REPEAT_CL:
 			return (size - 1) - mirror(imod(c, 2*size) - size);
@@ -153,6 +157,5 @@
 			return 0;
 	}
 }
-
 } // TexVerifierUtil
 } // tcu
diff --git a/framework/common/tcuTexture.cpp b/framework/common/tcuTexture.cpp
index a291a4e..0fd15bc 100644
--- a/framework/common/tcuTexture.cpp
+++ b/framework/common/tcuTexture.cpp
@@ -260,7 +260,7 @@
 int getChannelSize (TextureFormat::ChannelType type)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -324,7 +324,7 @@
 inline float channelToFloat (const deUint8* value, TextureFormat::ChannelType type)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -354,7 +354,7 @@
 inline int channelToInt (const deUint8* value, TextureFormat::ChannelType type)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -384,7 +384,7 @@
 void floatToChannel (deUint8* dst, float src, TextureFormat::ChannelType type)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -441,7 +441,7 @@
 void intToChannel (deUint8* dst, int src, TextureFormat::ChannelType type)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -606,6 +606,9 @@
 		case TextureFormat::UNSIGNED_SHORT_5551:
 			return format.order == TextureFormat::RGBA || format.order == TextureFormat::BGRA;
 
+		case TextureFormat::UNORM_SHORT_1555:
+			return format.order == TextureFormat::ARGB;
+
 		case TextureFormat::UNORM_INT_101010:
 			return format.order == TextureFormat::RGB;
 
@@ -650,7 +653,7 @@
 			return 0u;
 	}
 
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 }
 
 /** Get pixel size in bytes. */
@@ -662,7 +665,7 @@
 	DE_ASSERT(isValid(format));
 
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (type)
 	{
@@ -674,6 +677,7 @@
 		case TextureFormat::UNORM_SHORT_555:
 		case TextureFormat::UNORM_SHORT_4444:
 		case TextureFormat::UNORM_SHORT_5551:
+		case TextureFormat::UNORM_SHORT_1555:
 		case TextureFormat::UNSIGNED_SHORT_565:
 		case TextureFormat::UNSIGNED_SHORT_4444:
 		case TextureFormat::UNSIGNED_SHORT_5551:
@@ -947,20 +951,24 @@
 	{
 		case TextureFormat::UNORM_BYTE_44:				return			  Vec4(UN8 (4,   4), UN8 ( 0,  4), 0.0f, 1.0f);
 		case TextureFormat::UNSIGNED_BYTE_44:			return			 UVec4(UI8 (4,   4), UI8 ( 0,  4), 0u, 1u).cast<float>();
-		case TextureFormat::UNORM_SHORT_565:			return swizzleRB( Vec4(UN16(11,  5), UN16( 5,  6), UN16( 0,  5), 1.0f), TextureFormat::RGB, m_format.order);
-		case TextureFormat::UNSIGNED_SHORT_565:			return swizzleRB(UVec4(UI16(11,  5), UI16( 5,  6), UI16( 0,  5), 1u), TextureFormat::RGB, m_format.order).cast<float>();
-		case TextureFormat::UNORM_SHORT_555:			return swizzleRB( Vec4(UN16(10,  5), UN16( 5,  5), UN16( 0,  5), 1.0f), TextureFormat::RGB, m_format.order);
-		case TextureFormat::UNORM_SHORT_4444:			return swizzleRB( Vec4(UN16(12,  4), UN16( 8,  4), UN16( 4,  4), UN16( 0, 4)), TextureFormat::RGBA, m_format.order);
-		case TextureFormat::UNSIGNED_SHORT_4444:		return swizzleRB(UVec4(UI16(12,  4), UI16( 8,  4), UI16( 4,  4), UI16( 0, 4)), TextureFormat::RGBA, m_format.order).cast<float>();
-		case TextureFormat::UNORM_SHORT_5551:			return swizzleRB( Vec4(UN16(11,  5), UN16( 6,  5), UN16( 1,  5), UN16( 0, 1)), TextureFormat::RGBA, m_format.order);
-		case TextureFormat::UNSIGNED_SHORT_5551:		return swizzleRB(UVec4(UI16(11,  5), UI16( 6,  5), UI16( 1,  5), UI16( 0, 1)), TextureFormat::RGBA, m_format.order).cast<float>();
+		case TextureFormat::UNORM_SHORT_565:			return swizzleRB( Vec4(UN16(11,  5), UN16( 5,  6), UN16( 0,  5), 1.0f), m_format.order, TextureFormat::RGB);
+		case TextureFormat::UNSIGNED_SHORT_565:			return swizzleRB(UVec4(UI16(11,  5), UI16( 5,  6), UI16( 0,  5), 1u), m_format.order, TextureFormat::RGB).cast<float>();
+		case TextureFormat::UNORM_SHORT_555:			return swizzleRB( Vec4(UN16(10,  5), UN16( 5,  5), UN16( 0,  5), 1.0f), m_format.order, TextureFormat::RGB);
+		case TextureFormat::UNORM_SHORT_4444:			return swizzleRB( Vec4(UN16(12,  4), UN16( 8,  4), UN16( 4,  4), UN16( 0, 4)), m_format.order, TextureFormat::RGBA);
+		case TextureFormat::UNSIGNED_SHORT_4444:		return swizzleRB(UVec4(UI16(12,  4), UI16( 8,  4), UI16( 4,  4), UI16( 0, 4)), m_format.order, TextureFormat::RGBA).cast<float>();
+		case TextureFormat::UNORM_SHORT_5551:			return swizzleRB( Vec4(UN16(11,  5), UN16( 6,  5), UN16( 1,  5), UN16( 0, 1)), m_format.order, TextureFormat::RGBA);
+		case TextureFormat::UNSIGNED_SHORT_5551:		return swizzleRB(UVec4(UI16(11,  5), UI16( 6,  5), UI16( 1,  5), UI16( 0, 1)), m_format.order, TextureFormat::RGBA).cast<float>();
 		case TextureFormat::UNORM_INT_101010:			return			  Vec4(UN32(22, 10), UN32(12, 10), UN32( 2, 10), 1.0f);
-		case TextureFormat::UNORM_INT_1010102_REV:		return swizzleRB( Vec4(UN32( 0, 10), UN32(10, 10), UN32(20, 10), UN32(30, 2)), TextureFormat::RGBA, m_format.order);
-		case TextureFormat::SNORM_INT_1010102_REV:		return swizzleRB( Vec4(SN32( 0, 10), SN32(10, 10), SN32(20, 10), SN32(30, 2)), TextureFormat::RGBA, m_format.order);
-		case TextureFormat::UNSIGNED_INT_1010102_REV:	return swizzleRB( UVec4(UI32(0, 10), UI32(10, 10), UI32(20, 10), UI32(30, 2)), TextureFormat::RGBA, m_format.order).cast<float>();
-		case TextureFormat::SIGNED_INT_1010102_REV:		return swizzleRB( UVec4(SI32(0, 10), SI32(10, 10), SI32(20, 10), SI32(30, 2)), TextureFormat::RGBA, m_format.order).cast<float>();
+		case TextureFormat::UNORM_INT_1010102_REV:		return swizzleRB( Vec4(UN32( 0, 10), UN32(10, 10), UN32(20, 10), UN32(30, 2)), m_format.order, TextureFormat::RGBA);
+		case TextureFormat::SNORM_INT_1010102_REV:		return swizzleRB( Vec4(SN32( 0, 10), SN32(10, 10), SN32(20, 10), SN32(30, 2)), m_format.order, TextureFormat::RGBA);
+		case TextureFormat::UNSIGNED_INT_1010102_REV:	return swizzleRB( UVec4(UI32(0, 10), UI32(10, 10), UI32(20, 10), UI32(30, 2)), m_format.order, TextureFormat::RGBA).cast<float>();
+		case TextureFormat::SIGNED_INT_1010102_REV:		return swizzleRB( UVec4(SI32(0, 10), SI32(10, 10), SI32(20, 10), SI32(30, 2)), m_format.order, TextureFormat::RGBA).cast<float>();
 		case TextureFormat::UNSIGNED_INT_999_E5_REV:	return unpackRGB999E5(*((const deUint32*)pixelPtr));
 
+		case TextureFormat::UNORM_SHORT_1555:
+			DE_ASSERT(m_format.order == TextureFormat::ARGB);
+			return Vec4(UN16(15, 1), UN16(10, 5), UN16(5, 5), UN16(0, 5)).swizzle(1,2,3,0); // ARGB -> RGBA
+
 		case TextureFormat::UNSIGNED_INT_11F_11F_10F_REV:
 			return Vec4(Float11(UI32(0, 11)).asFloat(), Float11(UI32(11, 11)).asFloat(), Float10(UI32(22, 10)).asFloat(), 1.0f);
 
@@ -1039,17 +1047,21 @@
 		case TextureFormat::UNSIGNED_BYTE_44:			// Fall-through
 		case TextureFormat::UNORM_BYTE_44:				return			 UVec4(U8 ( 4,  4), U8 ( 0,  4), 0u, 1u).cast<int>();
 		case TextureFormat::UNSIGNED_SHORT_565:			// Fall-through
-		case TextureFormat::UNORM_SHORT_565:			return swizzleRB(UVec4(U16(11,  5), U16( 5,  6), U16( 0,  5), 1).cast<int>(), TextureFormat::RGB, m_format.order);
-		case TextureFormat::UNORM_SHORT_555:			return swizzleRB(UVec4(U16(10,  5), U16( 5,  5), U16( 0,  5), 1).cast<int>(), TextureFormat::RGB, m_format.order);
+		case TextureFormat::UNORM_SHORT_565:			return swizzleRB(UVec4(U16(11,  5), U16( 5,  6), U16( 0,  5), 1).cast<int>(), m_format.order, TextureFormat::RGB);
+		case TextureFormat::UNORM_SHORT_555:			return swizzleRB(UVec4(U16(10,  5), U16( 5,  5), U16( 0,  5), 1).cast<int>(), m_format.order, TextureFormat::RGB);
 		case TextureFormat::UNSIGNED_SHORT_4444:		// Fall-through
-		case TextureFormat::UNORM_SHORT_4444:			return swizzleRB(UVec4(U16(12,  4), U16( 8,  4), U16( 4,  4), U16( 0, 4)).cast<int>(), TextureFormat::RGBA, m_format.order);
+		case TextureFormat::UNORM_SHORT_4444:			return swizzleRB(UVec4(U16(12,  4), U16( 8,  4), U16( 4,  4), U16( 0, 4)).cast<int>(), m_format.order, TextureFormat::RGBA);
 		case TextureFormat::UNSIGNED_SHORT_5551:		// Fall-through
-		case TextureFormat::UNORM_SHORT_5551:			return swizzleRB(UVec4(U16(11,  5), U16( 6,  5), U16( 1,  5), U16( 0, 1)).cast<int>(), TextureFormat::RGBA, m_format.order);
+		case TextureFormat::UNORM_SHORT_5551:			return swizzleRB(UVec4(U16(11,  5), U16( 6,  5), U16( 1,  5), U16( 0, 1)).cast<int>(), m_format.order, TextureFormat::RGBA);
 		case TextureFormat::UNORM_INT_101010:			return			 UVec4(U32(22, 10), U32(12, 10), U32( 2, 10), 1).cast<int>();
 		case TextureFormat::UNORM_INT_1010102_REV:		// Fall-through
-		case TextureFormat::UNSIGNED_INT_1010102_REV:	return swizzleRB(UVec4(U32( 0, 10), U32(10, 10), U32(20, 10), U32(30, 2)), TextureFormat::RGBA, m_format.order).cast<int>();
+		case TextureFormat::UNSIGNED_INT_1010102_REV:	return swizzleRB(UVec4(U32( 0, 10), U32(10, 10), U32(20, 10), U32(30, 2)), m_format.order, TextureFormat::RGBA).cast<int>();
 		case TextureFormat::SNORM_INT_1010102_REV:		// Fall-through
-		case TextureFormat::SIGNED_INT_1010102_REV:		return swizzleRB(IVec4(S32( 0, 10), S32(10, 10), S32(20, 10), S32(30, 2)), TextureFormat::RGBA, m_format.order);
+		case TextureFormat::SIGNED_INT_1010102_REV:		return swizzleRB(IVec4(S32( 0, 10), S32(10, 10), S32(20, 10), S32(30, 2)), m_format.order, TextureFormat::RGBA);
+
+		case TextureFormat::UNORM_SHORT_1555:
+			DE_ASSERT(m_format.order == TextureFormat::ARGB);
+			return UVec4(U16(15, 1), U16(10, 5), U16(5, 5), U16(0, 5)).cast<int>().swizzle(1,2,3,0); // ARGB -> RGBA
 
 		default:
 			break; // To generic path.
@@ -1210,77 +1222,84 @@
 
 		case TextureFormat::UNORM_SHORT_565:
 		{
-			const Vec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGB);
+			const Vec4 swizzled = swizzleRB(color, TextureFormat::RGB, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PN(swizzled[0], 11, 5) | PN(swizzled[1], 5, 6) | PN(swizzled[2], 0, 5));
 			break;
 		}
 
 		case TextureFormat::UNSIGNED_SHORT_565:
 		{
-			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), m_format.order, TextureFormat::RGB);
+			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), TextureFormat::RGB, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 11, 5) | PU(swizzled[1], 5, 6) | PU(swizzled[2], 0, 5));
 			break;
 		}
 
 		case TextureFormat::UNORM_SHORT_555:
 		{
-			const Vec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGB);
+			const Vec4 swizzled = swizzleRB(color, TextureFormat::RGB, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PN(swizzled[0], 10, 5) | PN(swizzled[1], 5, 5) | PN(swizzled[2], 0, 5));
 			break;
 		}
 
 		case TextureFormat::UNORM_SHORT_4444:
 		{
-			const Vec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const Vec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PN(swizzled[0], 12, 4) | PN(swizzled[1], 8, 4) | PN(swizzled[2], 4, 4) | PN(swizzled[3], 0, 4));
 			break;
 		}
 
 		case TextureFormat::UNSIGNED_SHORT_4444:
 		{
-			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), m_format.order, TextureFormat::RGBA);
+			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 12, 4) | PU(swizzled[1], 8, 4) | PU(swizzled[2], 4, 4) | PU(swizzled[3], 0, 4));
 			break;
 		}
 
 		case TextureFormat::UNORM_SHORT_5551:
 		{
-			const Vec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const Vec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PN(swizzled[0], 11, 5) | PN(swizzled[1], 6, 5) | PN(swizzled[2], 1, 5) | PN(swizzled[3], 0, 1));
 			break;
 		}
 
+		case TextureFormat::UNORM_SHORT_1555:
+		{
+			const Vec4 swizzled = color.swizzle(3,0,1,2); // RGBA -> ARGB
+			*((deUint16*)pixelPtr) = (deUint16)(PN(swizzled[0], 15, 1) | PN(swizzled[1], 10, 5) | PN(swizzled[2], 5, 5) | PN(swizzled[3], 0, 5));
+			break;
+		}
+
 		case TextureFormat::UNSIGNED_SHORT_5551:
 		{
-			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), m_format.order, TextureFormat::RGBA);
+			const UVec4 swizzled = swizzleRB(color.cast<deUint32>(), TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 11, 5) | PU(swizzled[1], 6, 5) | PU(swizzled[2], 1, 5) | PU(swizzled[3], 0, 1));
 			break;
 		}
 
 		case TextureFormat::UNORM_INT_1010102_REV:
 		{
-			const Vec4 u = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const Vec4 u = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PN(u[0], 0, 10) | PN(u[1], 10, 10) | PN(u[2], 20, 10) | PN(u[3], 30, 2);
 			break;
 		}
 
 		case TextureFormat::SNORM_INT_1010102_REV:
 		{
-			const Vec4 u = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const Vec4 u = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PS(u[0], 0, 10) | PS(u[1], 10, 10) | PS(u[2], 20, 10) | PS(u[3], 30, 2);
 			break;
 		}
 
 		case TextureFormat::UNSIGNED_INT_1010102_REV:
 		{
-			const UVec4 u = swizzleRB(color.cast<deUint32>(), m_format.order, TextureFormat::RGBA);
+			const UVec4 u = swizzleRB(color.cast<deUint32>(), TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PU(u[0], 0, 10) | PU(u[1], 10, 10) | PU(u[2], 20, 10) | PU(u[3], 30, 2);
 			break;
 		}
 
 		case TextureFormat::SIGNED_INT_1010102_REV:
 		{
-			const IVec4 u = swizzleRB(color.cast<deInt32>(), m_format.order, TextureFormat::RGBA);
+			const IVec4 u = swizzleRB(color.cast<deInt32>(), TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PI(u[0], 0, 10) | PI(u[1], 10, 10) | PI(u[2], 20, 10) | PI(u[3], 30, 2);
 			break;
 		}
@@ -1352,14 +1371,14 @@
 		case TextureFormat::UNORM_SHORT_565:
 		case TextureFormat::UNSIGNED_SHORT_565:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGB);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGB, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 11, 5) | PU(swizzled[1], 5, 6) | PU(swizzled[2], 0, 5));
 			break;
 		}
 
 		case TextureFormat::UNORM_SHORT_555:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGB);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGB, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 10, 5) | PU(swizzled[1], 5, 5) | PU(swizzled[2], 0, 5));
 			break;
 		}
@@ -1367,7 +1386,7 @@
 		case TextureFormat::UNORM_SHORT_4444:
 		case TextureFormat::UNSIGNED_SHORT_4444:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 12, 4) | PU(swizzled[1], 8, 4) | PU(swizzled[2], 4, 4) | PU(swizzled[3], 0, 4));
 			break;
 		}
@@ -1375,15 +1394,22 @@
 		case TextureFormat::UNORM_SHORT_5551:
 		case TextureFormat::UNSIGNED_SHORT_5551:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 11, 5) | PU(swizzled[1], 6, 5) | PU(swizzled[2], 1, 5) | PU(swizzled[3], 0, 1));
 			break;
 		}
 
+		case TextureFormat::UNORM_SHORT_1555:
+		{
+			const IVec4 swizzled = color.swizzle(3,0,1,2); // RGBA -> ARGB
+			*((deUint16*)pixelPtr) = (deUint16)(PU(swizzled[0], 15, 1) | PU(swizzled[1], 10, 5) | PU(swizzled[2], 5, 5) | PU(swizzled[3], 0, 5));
+			break;
+		}
+
 		case TextureFormat::UNORM_INT_1010102_REV:
 		case TextureFormat::UNSIGNED_INT_1010102_REV:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PU(swizzled[0],  0, 10) | PU(swizzled[1], 10, 10) | PU(swizzled[2], 20, 10) | PU(swizzled[3], 30, 2);
 			break;
 		}
@@ -1391,7 +1417,7 @@
 		case TextureFormat::SNORM_INT_1010102_REV:
 		case TextureFormat::SIGNED_INT_1010102_REV:
 		{
-			const IVec4 swizzled = swizzleRB(color, m_format.order, TextureFormat::RGBA);
+			const IVec4 swizzled = swizzleRB(color, TextureFormat::RGBA, m_format.order);
 			*((deUint32*)pixelPtr) = PI(swizzled[0],  0, 10) | PI(swizzled[1], 10, 10) | PI(swizzled[2], 20, 10) | PI(swizzled[3], 30, 2);
 			break;
 		}
@@ -1529,6 +1555,10 @@
 		case tcu::Sampler::REPEAT_CL:
 			return imod(c, size);
 
+		case tcu::Sampler::MIRRORED_ONCE:
+			c = deClamp32(c, -size, size);
+			// Fall-through
+
 		case tcu::Sampler::MIRRORED_REPEAT_GL:
 			return (size - 1) - mirror(imod(c, 2*size) - size);
 
@@ -1549,7 +1579,8 @@
 		case tcu::Sampler::CLAMP_TO_EDGE:
 		case tcu::Sampler::CLAMP_TO_BORDER:
 		case tcu::Sampler::REPEAT_GL:
-		case tcu::Sampler::MIRRORED_REPEAT_GL: // Fall-through (ordinary case).
+		case tcu::Sampler::MIRRORED_REPEAT_GL:
+		case tcu::Sampler::MIRRORED_ONCE:		// Fall-through (ordinary case).
 			return (float)size*c;
 
 		case tcu::Sampler::REPEAT_CL:
@@ -3747,6 +3778,7 @@
 		"UNORM_SHORT_555",
 		"UNORM_SHORT_4444",
 		"UNORM_SHORT_5551",
+		"UNORM_SHORT_1555",
 		"UNORM_INT_101010",
 		"SNORM_INT_1010102_REV",
 		"UNORM_INT_1010102_REV",
diff --git a/framework/common/tcuTexture.hpp b/framework/common/tcuTexture.hpp
index b0de147..43dcb57 100644
--- a/framework/common/tcuTexture.hpp
+++ b/framework/common/tcuTexture.hpp
@@ -83,6 +83,7 @@
 		UNORM_SHORT_555,
 		UNORM_SHORT_4444,
 		UNORM_SHORT_5551,
+		UNORM_SHORT_1555,
 		UNORM_INT_101010,
 		SNORM_INT_1010102_REV,
 		UNORM_INT_1010102_REV,
@@ -181,6 +182,7 @@
 		REPEAT_CL,			//! Repeat with OpenCL semantics
 		MIRRORED_REPEAT_GL,	//! Mirrored repeat with OpenGL semantics
 		MIRRORED_REPEAT_CL, //! Mirrored repeat with OpenCL semantics
+		MIRRORED_ONCE,		//! Mirrored once in negative directions
 
 		WRAPMODE_LAST
 	};
diff --git a/framework/common/tcuTextureUtil.cpp b/framework/common/tcuTextureUtil.cpp
index d130cc3..7556259 100644
--- a/framework/common/tcuTextureUtil.cpp
+++ b/framework/common/tcuTextureUtil.cpp
@@ -115,7 +115,7 @@
 bool isCombinedDepthStencilType (TextureFormat::ChannelType type)
 {
 	// make sure to update this if type table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	return	type == TextureFormat::UNSIGNED_INT_16_8_8			||
 			type == TextureFormat::UNSIGNED_INT_24_8			||
@@ -157,7 +157,7 @@
 TextureChannelClass getTextureChannelClass (TextureFormat::ChannelType channelType)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (channelType)
 	{
@@ -173,6 +173,7 @@
 		case TextureFormat::UNORM_SHORT_555:				return TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
 		case TextureFormat::UNORM_SHORT_4444:				return TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
 		case TextureFormat::UNORM_SHORT_5551:				return TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
+		case TextureFormat::UNORM_SHORT_1555:				return TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
 		case TextureFormat::UNSIGNED_BYTE_44:				return TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
 		case TextureFormat::UNSIGNED_SHORT_565:				return TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
 		case TextureFormat::UNSIGNED_SHORT_4444:			return TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
@@ -196,9 +197,11 @@
 		case TextureFormat::UNSIGNED_INT32:					return TEXTURECHANNELCLASS_UNSIGNED_INTEGER;
 		case TextureFormat::HALF_FLOAT:						return TEXTURECHANNELCLASS_FLOATING_POINT;
 		case TextureFormat::FLOAT:							return TEXTURECHANNELCLASS_FLOATING_POINT;
-		case TextureFormat::FLOAT64:							return TEXTURECHANNELCLASS_FLOATING_POINT;
+		case TextureFormat::FLOAT64:						return TEXTURECHANNELCLASS_FLOATING_POINT;
 		case TextureFormat::FLOAT_UNSIGNED_INT_24_8_REV:	return TEXTURECHANNELCLASS_LAST;					//!< packed float32-pad24-uint8
-		default:											return TEXTURECHANNELCLASS_LAST;
+		default:
+			DE_FATAL("Unknown channel type");
+			return TEXTURECHANNELCLASS_LAST;
 	}
 }
 
@@ -339,7 +342,7 @@
 static Vec2 getFloatChannelValueRange (TextureFormat::ChannelType channelType)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	float cMin = 0.0f;
 	float cMax = 0.0f;
@@ -362,6 +365,7 @@
 		case TextureFormat::UNORM_SHORT_555:
 		case TextureFormat::UNORM_SHORT_4444:
 		case TextureFormat::UNORM_SHORT_5551:
+		case TextureFormat::UNORM_SHORT_1555:
 		case TextureFormat::UNORM_INT_101010:
 		case TextureFormat::UNORM_INT_1010102_REV:			cMin = 0.0f;			cMax = 1.0f;			break;
 
@@ -378,6 +382,8 @@
 		case TextureFormat::FLOAT64:						cMin = -1e5f;			cMax = 1e5f;			break;
 		case TextureFormat::UNSIGNED_INT_11F_11F_10F_REV:	cMin = 0.0f;			cMax = 1e4f;			break;
 		case TextureFormat::UNSIGNED_INT_999_E5_REV:		cMin = 0.0f;			cMax = 1e5f;			break;
+		case TextureFormat::UNSIGNED_BYTE_44:				cMin = 0.0f;			cMax = 15.f;			break;
+		case TextureFormat::UNSIGNED_SHORT_4444:			cMin = 0.0f;			cMax = 15.f;			break;
 
 		default:
 			DE_ASSERT(false);
@@ -397,11 +403,16 @@
 TextureFormatInfo getTextureFormatInfo (const TextureFormat& format)
 {
 	// Special cases.
-	if (format == TextureFormat(TextureFormat::RGBA, TextureFormat::UNSIGNED_INT_1010102_REV))
-		return TextureFormatInfo(Vec4(	    0.0f,		    0.0f,		    0.0f,		 0.0f),
-								 Vec4(	 1023.0f,		 1023.0f,		 1023.0f,		 3.0f),
+	if (format.type == TextureFormat::UNSIGNED_INT_1010102_REV)
+		return TextureFormatInfo(Vec4(	     0.0f,		    0.0f,		    0.0f,		 0.0f),
+								 Vec4(	  1023.0f,		 1023.0f,		 1023.0f,		 3.0f),
 								 Vec4(1.0f/1023.f,	1.0f/1023.0f,	1.0f/1023.0f,	1.0f/3.0f),
-								 Vec4(	    0.0f,		    0.0f,		    0.0f,		 0.0f));
+								 Vec4(	     0.0f,		    0.0f,		    0.0f,		 0.0f));
+	if (format.type == TextureFormat::SIGNED_INT_1010102_REV)
+		return TextureFormatInfo(Vec4(	  -512.0f,		 -512.0f,		 -512.0f,		-2.0f),
+								 Vec4(	   511.0f,		  511.0f,		  511.0f,		 1.0f),
+								 Vec4(1.0f/1023.f,	1.0f/1023.0f,	1.0f/1023.0f,	1.0f/3.0f),
+								 Vec4(	     0.5f,		    0.5f,		    0.5f,		 0.5f));
 	else if (format.order == TextureFormat::D || format.order == TextureFormat::DS)
 		return TextureFormatInfo(Vec4(0.0f,	0.0f,	0.0f,	0.0f),
 								 Vec4(1.0f,	1.0f,	1.0f,	0.0f),
@@ -412,6 +423,16 @@
 								 Vec4(1.0f, 1.0f, 1.0f, 1.5f),
 								 Vec4(1.0f, 1.0f, 1.0f, 1.0f),
 								 Vec4(0.0f, 0.0f, 0.0f, 0.0f));
+	else if (format.type == TextureFormat::UNSIGNED_SHORT_5551)
+		return TextureFormatInfo(Vec4(	   0.0f,		  0.0f,		  0.0f,	0.0f),
+								 Vec4(	  31.0f,		 31.0f,		 31.0f,	1.0f),
+								 Vec4(1.0f/31.f,	1.0f/31.0f,	1.0f/31.0f,	1.0f),
+								 Vec4(	   0.0f,		  0.0f,		  0.0f,	0.0f));
+	else if (format.type == TextureFormat::UNSIGNED_SHORT_565)
+		return TextureFormatInfo(Vec4(	   0.0f,		  0.0f,		  0.0f,	0.0f),
+								 Vec4(	  31.0f,		 63.0f,		 31.0f,	0.0f),
+								 Vec4(1.0f/31.f,	1.0f/63.0f,	1.0f/31.0f,	1.0f),
+								 Vec4(	   0.0f,		  0.0f,		  0.0f,	0.0f));
 
 	const Vec2						cRange		= getFloatChannelValueRange(format.type);
 	const TextureSwizzle::Channel*	map			= getChannelReadSwizzle(format.order).components;
@@ -483,7 +504,7 @@
 static IVec4 getChannelBitDepth (TextureFormat::ChannelType channelType)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (channelType)
 	{
@@ -499,6 +520,7 @@
 		case TextureFormat::UNORM_SHORT_4444:				return IVec4(4);
 		case TextureFormat::UNORM_SHORT_555:				return IVec4(5,5,5,0);
 		case TextureFormat::UNORM_SHORT_5551:				return IVec4(5,5,5,1);
+		case TextureFormat::UNORM_SHORT_1555:				return IVec4(1,5,5,5);
 		case TextureFormat::UNSIGNED_BYTE_44:				return IVec4(4,4,0,0);
 		case TextureFormat::UNSIGNED_SHORT_565:				return IVec4(5,6,5,0);
 		case TextureFormat::UNSIGNED_SHORT_4444:			return IVec4(4);
@@ -549,7 +571,7 @@
 static IVec4 getChannelMantissaBitDepth (TextureFormat::ChannelType channelType)
 {
 	// make sure this table is updated if format table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	switch (channelType)
 	{
@@ -565,6 +587,7 @@
 		case TextureFormat::UNORM_SHORT_4444:
 		case TextureFormat::UNORM_SHORT_555:
 		case TextureFormat::UNORM_SHORT_5551:
+		case TextureFormat::UNORM_SHORT_1555:
 		case TextureFormat::UNSIGNED_BYTE_44:
 		case TextureFormat::UNSIGNED_SHORT_565:
 		case TextureFormat::UNSIGNED_SHORT_4444:
@@ -1223,7 +1246,7 @@
 static AccessType toSamplerAccess (const AccessType& baseAccess, Sampler::DepthStencilMode mode)
 {
 	// make sure to update this if type table is updated
-	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 37);
+	DE_STATIC_ASSERT(TextureFormat::CHANNELTYPE_LAST == 38);
 
 	if (!isCombinedDepthStencilType(baseAccess.getFormat().type))
 		return baseAccess;
@@ -1492,6 +1515,8 @@
 		case TextureFormat::sRG:		swizzle = &RG;		break;
 		case TextureFormat::sRGB:		swizzle = &RGB;		break;
 		case TextureFormat::sRGBA:		swizzle = &RGBA;	break;
+		case TextureFormat::sBGR:		swizzle = &RGB;		break;
+		case TextureFormat::sBGRA:		swizzle = &RGBA;	break;
 		case TextureFormat::D:			swizzle = &D;		break;
 		case TextureFormat::S:			swizzle = &S;		break;
 
diff --git a/framework/delibs/cmake/toolchain-android-r10e.cmake b/framework/delibs/cmake/toolchain-android-r10e.cmake
index 15245d8..dbba207 100644
--- a/framework/delibs/cmake/toolchain-android-r10e.cmake
+++ b/framework/delibs/cmake/toolchain-android-r10e.cmake
@@ -140,7 +140,17 @@
 	set(TARGET_C_FLAGS		"-mandroid ${TARGET_C_FLAGS}")
 
 elseif (DE_COMPILER STREQUAL "DE_COMPILER_CLANG")
-	set(LLVM_PATH "${ANDROID_NDK_PATH}/toolchains/llvm-3.6/prebuilt/${ANDROID_NDK_HOST_OS}/")
+	if (NOT DEFINED LLVM_VERSION)
+		if (ANDROID_NDK_HOST_OS STREQUAL "windows" OR
+			ANDROID_NDK_HOST_OS STREQUAL "windows-x86_64")
+			# Windows NDK prebuilts don't include llvm-ar tool in version 3.6
+			set(LLVM_VERSION "3.5")
+		else ()
+			set(LLVM_VERSION "3.6")
+		endif ()
+	endif ()
+
+	set(LLVM_PATH "${ANDROID_NDK_PATH}/toolchains/llvm-${LLVM_VERSION}/prebuilt/${ANDROID_NDK_HOST_OS}/")
 
 	if (ANDROID_NDK_HOST_OS STREQUAL "linux-x86" OR
 		ANDROID_NDK_HOST_OS STREQUAL "linux-x86_64" OR
@@ -150,7 +160,8 @@
 		cmake_force_cxx_compiler("${LLVM_PATH}bin/clang++"		Clang)
 		set(CMAKE_AR "${LLVM_PATH}/bin/llvm-ar" CACHE FILEPATH "Archiver")
 		set(CMAKE_RANLIB "${CROSS_COMPILE}ranlib" CACHE FILEPATH "Indexer")
-	elseif (ANDROID_NDK_HOST_OS STREQUAL "windows")
+	elseif (ANDROID_NDK_HOST_OS STREQUAL "windows" OR
+			ANDROID_NDK_HOST_OS STREQUAL "windows-x86_64")
 		cmake_force_c_compiler("${LLVM_PATH}bin/clang.exe"		Clang)
 		cmake_force_cxx_compiler("${LLVM_PATH}bin/clang++.exe"	Clang)
 		set(CMAKE_AR "${LLVM_PATH}bin/llvm-ar.exe" CACHE FILEPATH "Archiver")
diff --git a/framework/delibs/debase/deInt32.h b/framework/delibs/debase/deInt32.h
index c1b4887..72efc78 100644
--- a/framework/delibs/debase/deInt32.h
+++ b/framework/delibs/debase/deInt32.h
@@ -206,6 +206,36 @@
 }
 
 /*--------------------------------------------------------------------*//*!
+ * \brief Check if a value is a power-of-two.
+ * \param a Input value.
+ * \return True if input is a power-of-two value, false otherwise.
+ *
+ * \note Also returns true for zero.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deBool deIsPowerOfTwo64 (deUint64 a)
+{
+	return ((a & (a - 1ull)) == 0);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Check if a value is a power-of-two.
+ * \param a Input value.
+ * \return True if input is a power-of-two value, false otherwise.
+ *
+ * \note Also returns true for zero.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deBool deIsPowerOfTwoSize (size_t a)
+{
+#if (DE_PTR_SIZE == 4)
+	return deIsPowerOfTwo32(a);
+#elif (DE_PTR_SIZE == 8)
+	return deIsPowerOfTwo64(a);
+#else
+#	error "Invalid DE_PTR_SIZE"
+#endif
+}
+
+/*--------------------------------------------------------------------*//*!
  * \brief Check if an integer is aligned to given power-of-two size.
  * \param a		Input value.
  * \param align	Alignment to check for.
@@ -254,6 +284,18 @@
 	return (void*)((val + align - 1) & ~(align - 1));
 }
 
+/*--------------------------------------------------------------------*//*!
+ * \brief Align a size_t value to given power-of-two size.
+ * \param ptr	Input value to align.
+ * \param align	Alignment to check for (power-of-two).
+ * \return The aligned size (larger or equal to input).
+ *//*--------------------------------------------------------------------*/
+DE_INLINE size_t deAlignSize (size_t val, size_t align)
+{
+	DE_ASSERT(deIsPowerOfTwoSize(align));
+	return (val + align - 1) & ~(align - 1);
+}
+
 extern const deInt8 g_clzLUT[256];
 
 /*--------------------------------------------------------------------*//*!
@@ -410,6 +452,11 @@
 	return (a + b);
 }
 
+DE_INLINE deInt32 deDivRoundUp32 (deInt32 a, deInt32 b)
+{
+	return a/b + ((a%b) ? 1 : 0);
+}
+
 /* \todo [petri] Move to deInt64.h? */
 
 DE_INLINE deInt32 deMulAsr32 (deInt32 a, deInt32 b, int shift)
@@ -519,7 +566,7 @@
 
 DE_INLINE deBool deInt64InInt32Range (deInt64 x)
 {
-	return ((x >= (-1ll<<31)) && (x <= ((1ll<<31)-1)));
+	return ((x >= (((deInt64)((deInt32)(-0x7FFFFFFF - 1))))) && (x <= ((1ll<<31)-1)));
 }
 
 
diff --git a/framework/delibs/debase/deInt32Test.c b/framework/delibs/debase/deInt32Test.c
index d87aee7..a79d08e 100644
--- a/framework/delibs/debase/deInt32Test.c
+++ b/framework/delibs/debase/deInt32Test.c
@@ -198,6 +198,20 @@
 	DE_TEST_ASSERT(deReverseBytes32(0xfecddeef) == 0xefdecdfe);
 	DE_TEST_ASSERT(deReverseBytes16(0x1122) == 0x2211);
 	DE_TEST_ASSERT(deReverseBytes16(0xdeef) == 0xefde);
+
+	DE_TEST_ASSERT(deInt64InInt32Range((deInt64)0x7FFFFFF));
+	DE_TEST_ASSERT(deInt64InInt32Range(0));
+	DE_TEST_ASSERT(deInt64InInt32Range(1));
+	DE_TEST_ASSERT(deInt64InInt32Range(-1));
+	DE_TEST_ASSERT(deInt64InInt32Range(-((deInt64)0x7FFFFFF)));
+	DE_TEST_ASSERT(deInt64InInt32Range(-((deInt64)0x8000 << 16)));
+	DE_TEST_ASSERT(deInt64InInt32Range((deInt64)deIntMinValue32(32)));
+
+	DE_TEST_ASSERT(!deInt64InInt32Range((((deInt64)0x7FFFFFF) << 32) | (deInt64)0xFFFFFFFF));
+	DE_TEST_ASSERT(!deInt64InInt32Range((deInt64)0x7FFFFFFF + 1));
+	DE_TEST_ASSERT(!deInt64InInt32Range(-((deInt64)0x7FFFFFFF + 2)));
+	DE_TEST_ASSERT(!deInt64InInt32Range(-((((deInt64)0x7FFFFFF) << 32) | (deInt64)0xFFFFFFFF)));
+	DE_TEST_ASSERT(!deInt64InInt32Range((deInt64)deIntMinValue32(32) - 1));
 }
 
 DE_END_EXTERN_C
diff --git a/framework/delibs/debase/deMemory.c b/framework/delibs/debase/deMemory.c
index 35d2cf7..90795a0 100644
--- a/framework/delibs/debase/deMemory.c
+++ b/framework/delibs/debase/deMemory.c
@@ -29,6 +29,20 @@
 #include <stdlib.h>
 #include <string.h>
 
+#define DE_ALIGNED_MALLOC_POSIX		0
+#define DE_ALIGNED_MALLOC_WIN32		1
+#define DE_ALIGNED_MALLOC_GENERIC	2
+
+#if (DE_OS == DE_OS_UNIX) || ((DE_OS == DE_OS_ANDROID) && (DE_ANDROID_API >= 21))
+#	define DE_ALIGNED_MALLOC DE_ALIGNED_MALLOC_POSIX
+#	include <malloc.h>
+#elif (DE_OS == DE_OS_WIN32)
+#	define DE_ALIGNED_MALLOC DE_ALIGNED_MALLOC_WIN32
+#	include <malloc.h>
+#else
+#	define DE_ALIGNED_MALLOC DE_ALIGNED_MALLOC_GENERIC
+#endif
+
 #if defined(DE_VALGRIND_BUILD)
 #	include <valgrind/valgrind.h>
 #	if defined(HAVE_VALGRIND_MEMCHECK_H)
@@ -81,24 +95,6 @@
 	return ptr;
 }
 
-void* deAlignedMalloc (size_t numBytes, deUint32 alignBytes)
-{
-	size_t		ptrSize		= sizeof(void*);
-	deUintptr	origPtr		= (deUintptr)deMalloc(numBytes + ptrSize + (size_t)alignBytes);
-
-	DE_ASSERT(deInRange32(alignBytes, 0, 256) && deIsPowerOfTwo32(alignBytes));
-
-	if (origPtr)
-	{
-		deUintptr	alignedPtr	= (deUintptr)deAlignPtr((void*)(origPtr + ptrSize), (deUintptr)alignBytes);
-		deUintptr	ptrPtr		= (alignedPtr - ptrSize);
-		*(deUintptr*)ptrPtr = origPtr;
-		return (void*)alignedPtr;
-	}
-	else
-		return DE_NULL;
-}
-
 /*--------------------------------------------------------------------*//*!
  * \brief Reallocate a chunk of memory.
  * \param ptr		Pointer to previously allocated memory block
@@ -119,16 +115,141 @@
 	free(ptr);
 }
 
-void deAlignedFree (void* ptr)
+#if (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_GENERIC)
+
+typedef struct AlignedAllocHeader_s
 {
+	void*	basePtr;
+	size_t	numBytes;
+} AlignedAllocHeader;
+
+DE_INLINE AlignedAllocHeader* getAlignedAllocHeader (void* ptr)
+{
+	const size_t	hdrSize		= sizeof(AlignedAllocHeader);
+	const deUintptr	hdrAddr		= (deUintptr)ptr - hdrSize;
+
+	return (AlignedAllocHeader*)hdrAddr;
+}
+
+#endif
+
+void* deAlignedMalloc (size_t numBytes, size_t alignBytes)
+{
+#if (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_POSIX)
+	/* posix_memalign() requires that alignment must be 2^N * sizeof(void*) */
+	const size_t	ptrAlignedAlign	= deAlignSize(alignBytes, sizeof(void*));
+	void*			ptr				= DE_NULL;
+
+	DE_ASSERT(deIsPowerOfTwoSize(alignBytes) && deIsPowerOfTwoSize(ptrAlignedAlign / sizeof(void*)));
+
+	if (posix_memalign(&ptr, ptrAlignedAlign, numBytes) == 0)
+	{
+		DE_ASSERT(ptr);
+		return ptr;
+	}
+	else
+	{
+		DE_ASSERT(!ptr);
+		return DE_NULL;
+	}
+
+#elif (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_WIN32)
+	DE_ASSERT(deIsPowerOfTwoSize(alignBytes));
+
+	return _aligned_malloc(numBytes, alignBytes);
+
+#elif (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_GENERIC)
+	void* const	basePtr	= deMalloc(numBytes + alignBytes + sizeof(AlignedAllocHeader));
+
+	DE_ASSERT(deIsPowerOfTwoSize(alignBytes));
+
+	if (basePtr)
+	{
+		void* const					alignedPtr	= deAlignPtr((void*)((deUintptr)basePtr + sizeof(AlignedAllocHeader)), alignBytes);
+		AlignedAllocHeader* const	hdr			= getAlignedAllocHeader(alignedPtr);
+
+		hdr->basePtr	= basePtr;
+		hdr->numBytes	= numBytes;
+
+		return alignedPtr;
+	}
+	else
+		return DE_NULL;
+#else
+#	error "Invalid DE_ALIGNED_MALLOC"
+#endif
+}
+
+void* deAlignedRealloc (void* ptr, size_t numBytes, size_t alignBytes)
+{
+#if (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_WIN32)
+	return _aligned_realloc(ptr, numBytes, alignBytes);
+
+#elif (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_GENERIC) || (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_POSIX)
 	if (ptr)
 	{
-		size_t		ptrSize		= sizeof(void*);
-		deUintptr	ptrPtr		= (deUintptr)ptr - ptrSize;
-		deUintptr	origPtr		= *(deUintptr*)ptrPtr;
-		DE_ASSERT(ptrPtr - origPtr < 256);
-		deFree((void*)origPtr);
+		if (numBytes > 0)
+		{
+#	if (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_GENERIC)
+			const size_t				oldSize	= getAlignedAllocHeader(ptr)->numBytes;
+#	else /* DE_ALIGNED_MALLOC_GENERIC */
+			const size_t				oldSize	= malloc_usable_size(ptr);
+#	endif
+
+			DE_ASSERT(deIsAlignedPtr(ptr, alignBytes));
+
+			if (oldSize < numBytes || oldSize > numBytes*2)
+			{
+				/* Create a new alloc if original is smaller, or more than twice the requested size */
+				void* const	newPtr	= deAlignedMalloc(numBytes, alignBytes);
+
+				if (newPtr)
+				{
+					const size_t	copyBytes	= numBytes < oldSize ? numBytes : oldSize;
+
+					deMemcpy(newPtr, ptr, copyBytes);
+					deAlignedFree(ptr);
+
+					return newPtr;
+				}
+				else
+					return DE_NULL;
+			}
+			else
+				return ptr;
+		}
+		else
+		{
+			deAlignedFree(ptr);
+			return DE_NULL;
+		}
 	}
+	else
+		return deAlignedMalloc(numBytes, alignBytes);
+
+#else
+#	error "Invalid DE_ALIGNED_MALLOC"
+#endif
+}
+
+void deAlignedFree (void* ptr)
+{
+#if (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_POSIX)
+	free(ptr);
+
+#elif (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_WIN32)
+	_aligned_free(ptr);
+
+#elif (DE_ALIGNED_MALLOC == DE_ALIGNED_MALLOC_GENERIC)
+	if (ptr)
+	{
+		AlignedAllocHeader* const	hdr	= getAlignedAllocHeader(ptr);
+
+		deFree(hdr->basePtr);
+	}
+#else
+#	error "Invalid DE_ALIGNED_MALLOC"
+#endif
 }
 
 char* deStrdup (const char* str)
@@ -147,4 +268,89 @@
 #endif
 }
 
+void deMemory_selfTest (void)
+{
+	static const struct
+	{
+		size_t		numBytes;
+		size_t		alignment;
+	} s_alignedAllocCases[] =
+	{
+		{ 1,		1		},
+		{ 1,		2		},
+		{ 1,		256		},
+		{ 1,		4096	},
+		{ 547389,	1		},
+		{ 547389,	2		},
+		{ 547389,	256		},
+		{ 547389,	4096	},
+		{ 52532,	1<<4	},
+		{ 52532,	1<<10	},
+		{ 52532,	1<<16	},
+	};
+	static const struct
+	{
+		size_t		initialSize;
+		size_t		newSize;
+		size_t		alignment;
+	} s_alignedReallocCases[] =
+	{
+		{ 1,		1,		1		},
+		{ 1,		1,		2		},
+		{ 1,		1,		256		},
+		{ 1,		1,		4096	},
+		{ 1,		1241,	1		},
+		{ 1,		1241,	2		},
+		{ 1,		1241,	256		},
+		{ 1,		1241,	4096	},
+		{ 547389,	234,	1		},
+		{ 547389,	234,	2		},
+		{ 547389,	234,	256		},
+		{ 547389,	234,	4096	},
+		{ 52532,	421523,	1<<4	},
+		{ 52532,	421523,	1<<10	},
+		{ 52532,	421523,	1<<16	},
+	};
+
+	int caseNdx;
+
+	for (caseNdx = 0; caseNdx < DE_LENGTH_OF_ARRAY(s_alignedAllocCases); caseNdx++)
+	{
+		void* const		ptr		= deAlignedMalloc(s_alignedAllocCases[caseNdx].numBytes, s_alignedAllocCases[caseNdx].alignment);
+
+		DE_TEST_ASSERT(ptr);
+		DE_TEST_ASSERT(deIsAlignedPtr(ptr, s_alignedAllocCases[caseNdx].alignment));
+
+		deMemset(ptr, 0xaa, s_alignedAllocCases[caseNdx].numBytes);
+
+		deAlignedFree(ptr);
+	}
+
+	for (caseNdx = 0; caseNdx < DE_LENGTH_OF_ARRAY(s_alignedReallocCases); caseNdx++)
+	{
+		void* const		ptr		= deAlignedMalloc(s_alignedReallocCases[caseNdx].initialSize, s_alignedReallocCases[caseNdx].alignment);
+
+		DE_TEST_ASSERT(ptr);
+		DE_TEST_ASSERT(deIsAlignedPtr(ptr, s_alignedReallocCases[caseNdx].alignment));
+
+		deMemset(ptr, 0xaa, s_alignedReallocCases[caseNdx].initialSize);
+
+		{
+			void* const		newPtr			= deAlignedRealloc(ptr, s_alignedReallocCases[caseNdx].newSize, s_alignedReallocCases[caseNdx].alignment);
+			const size_t	numPreserved	= s_alignedReallocCases[caseNdx].newSize < s_alignedReallocCases[caseNdx].initialSize
+											? s_alignedReallocCases[caseNdx].newSize
+											: s_alignedReallocCases[caseNdx].initialSize;
+			size_t			off;
+
+			DE_TEST_ASSERT(newPtr);
+			DE_TEST_ASSERT(deIsAlignedPtr(ptr, s_alignedReallocCases[caseNdx].alignment));
+
+			for (off = 0; off < numPreserved; off++)
+				DE_TEST_ASSERT(*((const deUint8*)newPtr + off) == 0xaa);
+
+			deAlignedFree(newPtr);
+		}
+	}
+}
+
 DE_END_EXTERN_C
diff --git a/framework/delibs/debase/deMemory.h b/framework/delibs/debase/deMemory.h
index 105d4d9..a5e6150 100644
--- a/framework/delibs/debase/deMemory.h
+++ b/framework/delibs/debase/deMemory.h
@@ -37,7 +37,8 @@
 void*	deRealloc		(void* ptr, size_t numBytes);
 void	deFree			(void* ptr);
 
-void*	deAlignedMalloc	(size_t numBytes, deUint32 alignBytes);
+void*	deAlignedMalloc	(size_t numBytes, size_t alignBytes);
+void*	deAlignedRealloc(void* ptr, size_t numBytes, size_t alignBytes);
 void	deAlignedFree	(void* ptr);
 
 char*	deStrdup		(const char* str);
@@ -76,6 +77,8 @@
 	return memmove(dst, src, numBytes);
 }
 
+void	deMemory_selfTest	(void);
+
 DE_END_EXTERN_C
 
 #endif /* _DEMEMORY_H */
diff --git a/framework/delibs/decpp/CMakeLists.txt b/framework/delibs/decpp/CMakeLists.txt
index 4994b2d..1266335 100644
--- a/framework/delibs/decpp/CMakeLists.txt
+++ b/framework/delibs/decpp/CMakeLists.txt
@@ -5,6 +5,8 @@
 endif ()
 
 set(DECPP_SRCS
+	deAppendList.cpp
+	deAppendList.hpp
 	deArrayBuffer.cpp
 	deArrayBuffer.hpp
 	deArrayUtil.cpp
diff --git a/framework/delibs/decpp/deAppendList.cpp b/framework/delibs/decpp/deAppendList.cpp
new file mode 100644
index 0000000..ed7ef74
--- /dev/null
+++ b/framework/delibs/decpp/deAppendList.cpp
@@ -0,0 +1,155 @@
+/*-------------------------------------------------------------------------
+ * drawElements C++ Base Library
+ * -----------------------------
+ *
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief Fast ordered append-only container
+ *//*--------------------------------------------------------------------*/
+
+#include "deAppendList.hpp"
+#include "deThread.hpp"
+#include "deSpinBarrier.hpp"
+#include "deSharedPtr.hpp"
+
+#include <vector>
+#include <algorithm>
+
+namespace de
+{
+
+namespace
+{
+
+using std::vector;
+
+struct TestElem
+{
+	deUint32	threadNdx;
+	deUint32	elemNdx;
+
+	TestElem (deUint32 threadNdx_, deUint32 elemNdx_)
+		: threadNdx	(threadNdx_)
+		, elemNdx	(elemNdx_)
+	{}
+
+	TestElem (void)
+		: threadNdx	(0)
+		, elemNdx	(0)
+	{}
+};
+
+struct SharedState
+{
+	deUint32				numElements;
+	SpinBarrier				barrier;
+	AppendList<TestElem>	testList;
+
+	SharedState (deUint32 numThreads, deUint32 numElements_, deUint32 numElementsHint)
+		: numElements	(numElements_)
+		, barrier		(numThreads)
+		, testList		(numElementsHint)
+	{}
+};
+
+class TestThread : public Thread
+{
+public:
+	TestThread (SharedState* shared, deUint32 threadNdx)
+		: m_shared		(shared)
+		, m_threadNdx	(threadNdx)
+	{}
+
+	void run (void)
+	{
+		const deUint32	syncPerElems	= 10000;
+
+		for (deUint32 elemNdx = 0; elemNdx < m_shared->numElements; elemNdx++)
+		{
+			if (elemNdx % syncPerElems == 0)
+				m_shared->barrier.sync(SpinBarrier::WAIT_MODE_AUTO);
+
+			m_shared->testList.append(TestElem(m_threadNdx, elemNdx));
+		}
+	}
+
+private:
+	SharedState* const	m_shared;
+	const deUint32		m_threadNdx;
+};
+
+typedef SharedPtr<TestThread> TestThreadSp;
+
+void runAppendListTest (deUint32 numThreads, deUint32 numElements, deUint32 numElementsHint)
+{
+	SharedState				sharedState		(numThreads, numElements, numElementsHint);
+	vector<TestThreadSp>	threads			(numThreads);
+
+	for (deUint32 threadNdx = 0; threadNdx < numThreads; ++threadNdx)
+	{
+		threads[threadNdx] = TestThreadSp(new TestThread(&sharedState, threadNdx));
+		threads[threadNdx]->start();
+	}
+
+	for (deUint32 threadNdx = 0; threadNdx < numThreads; ++threadNdx)
+		threads[threadNdx]->join();
+
+	DE_TEST_ASSERT(sharedState.testList.size() == (size_t)numElements*(size_t)numThreads);
+
+	{
+		vector<deUint32>	countByThread	(numThreads);
+
+		std::fill(countByThread.begin(), countByThread.end(), 0);
+
+		for (AppendList<TestElem>::const_iterator elemIter = sharedState.testList.begin();
+			 elemIter != sharedState.testList.end();
+			 ++elemIter)
+		{
+			const TestElem&	elem	= *elemIter;
+
+			DE_TEST_ASSERT(de::inBounds(elem.threadNdx, 0u, numThreads));
+			DE_TEST_ASSERT(countByThread[elem.threadNdx] == elem.elemNdx);
+
+			countByThread[elem.threadNdx] += 1;
+		}
+
+		for (deUint32 threadNdx = 0; threadNdx < numThreads; ++threadNdx)
+			DE_TEST_ASSERT(countByThread[threadNdx] == numElements);
+	}
+}
+
+} // anonymous
+
+void AppendList_selfTest (void)
+{
+	// Single-threaded
+	runAppendListTest(1, 1000, 500);
+	runAppendListTest(1, 1000, 2000);
+	runAppendListTest(1, 35, 1);
+
+	// Multi-threaded
+	runAppendListTest(2, 10000, 500);
+	runAppendListTest(2, 100, 10);
+
+	if (deGetNumAvailableLogicalCores() >= 4)
+	{
+		runAppendListTest(4, 10000, 500);
+		runAppendListTest(4, 100, 10);
+	}
+}
+
+} // de
diff --git a/framework/delibs/decpp/deAppendList.hpp b/framework/delibs/decpp/deAppendList.hpp
new file mode 100644
index 0000000..4c37767
--- /dev/null
+++ b/framework/delibs/decpp/deAppendList.hpp
@@ -0,0 +1,261 @@
+#ifndef _DEAPPENDLIST_HPP
+#define _DEAPPENDLIST_HPP
+/*-------------------------------------------------------------------------
+ * drawElements C++ Base Library
+ * -----------------------------
+ *
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief Fast ordered append-only container
+ *//*--------------------------------------------------------------------*/
+
+#include "deDefs.hpp"
+#include "deAtomic.h"
+#include "deThread.h"
+#include "deMemory.h"
+#include "deInt32.h"
+
+namespace de
+{
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Fast ordered append-only container
+ *
+ * AppendList provides data structure for recording ordered list of elements
+ * quickly, while still providing good sequential read access speed.
+ * It is good for example logging.
+ *
+ * AppendList allocates memory in blocks of blockSize elements. Choosing
+ * too small blockSize will affect performance.
+ *
+ * Elements can be appended from multiple threads simultaneously but if
+ * current block runs out, allocation of next block will happen in a single
+ * thread and block others from inserting further elements until completed.
+ * For that reason shared AppendList should not be used if there is a lot
+ * of contention and instead per-thread AppendList's are recommended.
+ *//*--------------------------------------------------------------------*/
+template<typename ElementType>
+class AppendList
+{
+public:
+								AppendList		(size_t blockSize);
+								~AppendList		(void);
+
+	void						append			(const ElementType& value);
+
+	size_t						size			(void) const { return m_numElements;	}
+
+private:
+								AppendList		(const AppendList<ElementType>&);
+	AppendList<ElementType>&	operator=		(const AppendList<ElementType>&);
+
+	struct Block
+	{
+		const size_t		blockNdx;
+		ElementType*		elements;
+		Block* volatile		next;
+
+		Block (size_t blockNdx_, size_t size)
+			: blockNdx	(blockNdx_)
+			, elements	(reinterpret_cast<ElementType*>(deAlignedMalloc(sizeof(ElementType)*size,
+																		deAlign32((deUint32)alignOf<ElementType>(), (deUint32)sizeof(void*)))))
+			, next		(DE_NULL)
+		{
+		}
+
+		~Block (void)
+		{
+			deAlignedFree(reinterpret_cast<void*>(elements));
+		}
+	};
+
+	const size_t				m_blockSize;
+	volatile size_t				m_numElements;
+	Block*						m_first;
+	Block* volatile				m_last;
+
+public:
+	template<typename CompatibleType>
+	class Iterator
+	{
+	public:
+									Iterator						(Block* curBlock_, size_t blockSize_, size_t slotNdx_)
+																		: m_curBlock	(curBlock_)
+																		, m_blockSize	(blockSize_)
+																		, m_slotNdx		(slotNdx_)
+		{}
+
+		bool						operator!=						(const Iterator<CompatibleType>& other) const
+		{
+			return m_curBlock != other.m_curBlock || m_slotNdx != other.m_slotNdx;
+		}
+		bool						operator==						(const Iterator<CompatibleType>& other) const
+		{
+			return m_curBlock == other.m_curBlock && m_slotNdx == other.m_slotNdx;
+		}
+
+		Iterator<CompatibleType>&	operator++						(void)
+		{
+			++m_slotNdx;
+
+			if (m_slotNdx == m_blockSize)
+			{
+				m_slotNdx = 0;
+				m_curBlock = m_curBlock->next;
+			}
+
+			return *this;
+		}
+
+		Iterator<CompatibleType>	operator++						(int) const
+		{
+			Iterator<CompatibleType> copy(*this);
+			return ++copy;
+		}
+
+		CompatibleType&				operator*						(void) const
+		{
+			return m_curBlock->elements[m_slotNdx];
+		}
+
+		operator					Iterator<const CompatibleType>	(void) const
+		{
+			return Iterator<const CompatibleType>(m_curBlock, m_blockSize, m_slotNdx);
+		}
+
+	private:
+		Block*			m_curBlock;
+		size_t			m_blockSize;
+		size_t			m_slotNdx;
+	};
+
+	typedef Iterator<const ElementType>	const_iterator;
+	typedef Iterator<ElementType>		iterator;
+
+	const_iterator				begin			(void) const;
+	iterator					begin			(void);
+
+	const_iterator				end				(void) const;
+	iterator					end				(void);
+};
+
+template<typename ElementType>
+AppendList<ElementType>::AppendList (size_t blockSize)
+	: m_blockSize	(blockSize)
+	, m_numElements	(0)
+	, m_first		(new Block(0, blockSize))
+	, m_last		(m_first)
+{
+}
+
+template<typename ElementType>
+AppendList<ElementType>::~AppendList (void)
+{
+	size_t	elementNdx	= 0;
+	Block*	curBlock	= m_first;
+
+	while (curBlock)
+	{
+		Block* const	delBlock	= curBlock;
+
+		curBlock = delBlock->next;
+
+		// Call destructor for allocated elements
+		for (; elementNdx < min(m_numElements, delBlock->blockNdx*m_blockSize); ++elementNdx)
+			delBlock->elements[elementNdx%m_blockSize].~ElementType();
+
+		delete delBlock;
+	}
+}
+
+template<typename ElementType>
+void AppendList<ElementType>::append (const ElementType& value)
+{
+	// Fetch curBlock first before allocating slot. Otherwise m_last might get updated before
+	// this thread gets chance of reading it, leading to curBlock->blockNdx > blockNdx.
+	Block*			curBlock	= m_last;
+
+	deMemoryReadWriteFence();
+
+	{
+		const size_t	elementNdx	= deAtomicIncrementUSize(&m_numElements) - 1;
+		const size_t	blockNdx	= elementNdx / m_blockSize;
+		const size_t	slotNdx		= elementNdx - (blockNdx * m_blockSize);
+
+		while (curBlock->blockNdx != blockNdx)
+		{
+			if (curBlock->next)
+				curBlock = curBlock->next;
+			else
+			{
+				// Other thread(s) are currently allocating additional block(s)
+				deYield();
+			}
+		}
+
+		// Did we allocate last slot? If so, add a new block
+		if (slotNdx+1 == m_blockSize)
+		{
+			Block* const	newBlock	= new Block(blockNdx+1, m_blockSize);
+
+			deMemoryReadWriteFence();
+
+			// At this point if any other thread is trying to allocate more blocks
+			// they are being blocked by curBlock->next being null. This guarantees
+			// that this thread has exclusive modify access to m_last.
+			m_last = newBlock;
+			deMemoryReadWriteFence();
+
+			// At this point other threads might have skipped to newBlock, but we
+			// still have exclusive modify access to curBlock->next.
+			curBlock->next = newBlock;
+			deMemoryReadWriteFence();
+		}
+
+		new (&curBlock->elements[slotNdx]) ElementType(value);
+	}
+}
+
+template<typename ElementType>
+typename AppendList<ElementType>::const_iterator AppendList<ElementType>::begin (void) const
+{
+	return const_iterator(m_first, m_blockSize, 0);
+}
+
+template<typename ElementType>
+typename AppendList<ElementType>::iterator AppendList<ElementType>::begin (void)
+{
+	return iterator(m_first, m_blockSize, 0);
+}
+
+template<typename ElementType>
+typename AppendList<ElementType>::const_iterator AppendList<ElementType>::end (void) const
+{
+	return const_iterator(m_last, m_blockSize, m_numElements%m_blockSize);
+}
+
+template<typename ElementType>
+typename AppendList<ElementType>::iterator AppendList<ElementType>::end (void)
+{
+	return iterator(m_last, m_blockSize, m_numElements%m_blockSize);
+}
+
+void	AppendList_selfTest		(void);
+
+} // de
+
+#endif // _DEAPPENDLIST_HPP
diff --git a/framework/delibs/decpp/deDefs.hpp b/framework/delibs/decpp/deDefs.hpp
index 73b84fb..13ac89e 100644
--- a/framework/delibs/decpp/deDefs.hpp
+++ b/framework/delibs/decpp/deDefs.hpp
@@ -32,22 +32,22 @@
 namespace de
 {
 
-//!< Compute absolute value of x.
+//! Compute absolute value of x.
 template<typename T> inline T		abs			(T x)			{ return x < T(0) ? -x : x; }
 
-//!< Get minimum of x and y.
+//! Get minimum of x and y.
 template<typename T> inline T		min			(T x, T y)		{ return x <= y ? x : y; }
 
-//!< Get maximum of x and y.
+//! Get maximum of x and y.
 template<typename T> inline T		max			(T x, T y)		{ return x >= y ? x : y; }
 
-//!< Clamp x in range a <= x <= b.
+//! Clamp x in range a <= x <= b.
 template<typename T> inline T		clamp		(T x, T a, T b)	{ DE_ASSERT(a <= b); return x < a ? a : (x > b ? b : x); }
 
-//!< Test if x is in bounds a <= x < b.
+//! Test if x is in bounds a <= x < b.
 template<typename T> inline bool	inBounds	(T x, T a, T b)	{ return a <= x && x < b; }
 
-//!< Test if x is in range a <= x <= b.
+//! Test if x is in range a <= x <= b.
 template<typename T> inline bool	inRange		(T x, T a, T b)	{ return a <= x && x <= b; }
 
 //! Helper for DE_CHECK() macros.
@@ -71,6 +71,14 @@
 	inline void operator() (T* ptr) const { delete[] ptr; }
 };
 
+//! Get required memory alignment for type
+template<typename T>
+size_t alignOf (void)
+{
+	struct PaddingCheck { deUint8 b; T t; };
+	return (size_t)DE_OFFSET_OF(PaddingCheck, t);
+}
+
 } // de
 
 /*--------------------------------------------------------------------*//*!
diff --git a/framework/delibs/dethread/deAtomic.h b/framework/delibs/dethread/deAtomic.h
index 74b3d51..a0cd786 100644
--- a/framework/delibs/dethread/deAtomic.h
+++ b/framework/delibs/dethread/deAtomic.h
@@ -32,39 +32,59 @@
 DE_BEGIN_EXTERN_C
 
 /*--------------------------------------------------------------------*//*!
- * \brief Atomic increment and fetch.
+ * \brief Atomic increment and fetch 32-bit signed integer.
  * \param dstAddr	Destination address.
  * \return Incremented value.
  *//*--------------------------------------------------------------------*/
-DE_INLINE deInt32 deAtomicIncrement32 (deInt32 volatile* dstAddr)
+DE_INLINE deInt32 deAtomicIncrementInt32 (volatile deInt32* dstAddr)
 {
 #if (DE_COMPILER == DE_COMPILER_MSC)
 	return _InterlockedIncrement((long volatile*)dstAddr);
 #elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
 	return __sync_add_and_fetch(dstAddr, 1);
 #else
-#	error "Implement deAtomicIncrement32()"
+#	error "Implement deAtomicIncrementInt32()"
 #endif
 }
 
 /*--------------------------------------------------------------------*//*!
- * \brief Atomic decrement and fetch.
+ * \brief Atomic increment and fetch 32-bit unsigned integer.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deUint32 deAtomicIncrementUint32 (volatile deUint32* dstAddr)
+{
+	return deAtomicIncrementInt32((deInt32 volatile*)dstAddr);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic decrement and fetch 32-bit signed integer.
  * \param dstAddr	Destination address.
  * \return Decremented value.
  *//*--------------------------------------------------------------------*/
-DE_INLINE deInt32 deAtomicDecrement32 (deInt32 volatile* dstAddr)
+DE_INLINE deInt32 deAtomicDecrementInt32 (volatile deInt32* dstAddr)
 {
 #if (DE_COMPILER == DE_COMPILER_MSC)
-	return _InterlockedDecrement((long volatile*)dstAddr);
+	return _InterlockedDecrement((volatile long*)dstAddr);
 #elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
 	return __sync_sub_and_fetch(dstAddr, 1);
 #else
-#	error "Implement deAtomicDecrement32()"
+#	error "Implement deAtomicDecrementInt32()"
 #endif
 }
 
 /*--------------------------------------------------------------------*//*!
- * \brief Atomic compare and exchange (CAS).
+ * \brief Atomic decrement and fetch 32-bit unsigned integer.
+ * \param dstAddr	Destination address.
+ * \return Decremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deUint32 deAtomicDecrementUint32 (volatile deUint32* dstAddr)
+{
+	return deAtomicDecrementInt32((volatile deInt32*)dstAddr);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic compare and exchange (CAS) 32-bit value.
  * \param dstAddr	Destination address.
  * \param compare	Old value.
  * \param exchange	New value.
@@ -77,10 +97,10 @@
  * If CAS succeeds, compare value is returned. Otherwise value stored in
  * dstAddr is returned.
  *//*--------------------------------------------------------------------*/
-DE_INLINE deUint32 deAtomicCompareExchange32 (deUint32 volatile* dstAddr, deUint32 compare, deUint32 exchange)
+DE_INLINE deUint32 deAtomicCompareExchangeUint32 (volatile deUint32* dstAddr, deUint32 compare, deUint32 exchange)
 {
 #if (DE_COMPILER == DE_COMPILER_MSC)
-	return _InterlockedCompareExchange((long volatile*)dstAddr, exchange, compare);
+	return _InterlockedCompareExchange((volatile long*)dstAddr, exchange, compare);
 #elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
 	return __sync_val_compare_and_swap(dstAddr, compare, exchange);
 #else
@@ -88,6 +108,149 @@
 #endif
 }
 
+/* Deprecated names */
+#define deAtomicIncrement32			deAtomicIncrementInt32
+#define deAtomicDecrement32			deAtomicDecrementInt32
+#define deAtomicCompareExchange32	deAtomicCompareExchangeUint32
+
+#if (DE_PTR_SIZE == 8)
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic increment and fetch 64-bit signed integer.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deInt64 deAtomicIncrementInt64 (volatile deInt64* dstAddr)
+{
+#if (DE_COMPILER == DE_COMPILER_MSC)
+	return _InterlockedIncrement64((volatile long long*)dstAddr);
+#elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
+	return __sync_add_and_fetch(dstAddr, 1);
+#else
+#	error "Implement deAtomicIncrementInt64()"
+#endif
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic increment and fetch 64-bit unsigned integer.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deUint64 deAtomicIncrementUint64 (volatile deUint64* dstAddr)
+{
+	return deAtomicIncrementInt64((volatile deInt64*)dstAddr);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic decrement and fetch.
+ * \param dstAddr	Destination address.
+ * \return Decremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deInt64 deAtomicDecrementInt64 (volatile deInt64* dstAddr)
+{
+#if (DE_COMPILER == DE_COMPILER_MSC)
+	return _InterlockedDecrement64((volatile long long*)dstAddr);
+#elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
+	return __sync_sub_and_fetch(dstAddr, 1);
+#else
+#	error "Implement deAtomicDecrementInt64()"
+#endif
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic increment and fetch 64-bit unsigned integer.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deUint64 deAtomicDecrementUint64 (volatile deUint64* dstAddr)
+{
+	return deAtomicDecrementInt64((volatile deInt64*)dstAddr);
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic compare and exchange (CAS) 64-bit value.
+ * \param dstAddr	Destination address.
+ * \param compare	Old value.
+ * \param exchange	New value.
+ * \return			compare value if CAS passes, *dstAddr value otherwise
+ *
+ * Performs standard Compare-And-Swap with 64b data. Dst value is compared
+ * to compare value and if that comparison passes, value is replaced with
+ * exchange value.
+ *
+ * If CAS succeeds, compare value is returned. Otherwise value stored in
+ * dstAddr is returned.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE deUint64 deAtomicCompareExchangeUint64 (volatile deUint64* dstAddr, deUint64 compare, deUint64 exchange)
+{
+#if (DE_COMPILER == DE_COMPILER_MSC)
+	return _InterlockedCompareExchange64((volatile long long*)dstAddr, exchange, compare);
+#elif (DE_COMPILER == DE_COMPILER_GCC) || (DE_COMPILER == DE_COMPILER_CLANG)
+	return __sync_val_compare_and_swap(dstAddr, compare, exchange);
+#else
+#	error "Implement deAtomicCompareExchangeUint64()"
+#endif
+}
+
+#endif /* (DE_PTR_SIZE == 8) */
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic increment and fetch size_t.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE size_t deAtomicIncrementUSize (volatile size_t* size)
+{
+#if (DE_PTR_SIZE == 8)
+	return deAtomicIncrementUint64((volatile deUint64*)size);
+#elif (DE_PTR_SIZE == 4)
+	return deAtomicIncrementUint32((volatile deUint32*)size);
+#else
+#	error "Invalid DE_PTR_SIZE value"
+#endif
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic increment and fetch size_t.
+ * \param dstAddr	Destination address.
+ * \return Incremented value.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE size_t deAtomicDecrementUSize (volatile size_t* size)
+{
+#if (DE_PTR_SIZE == 8)
+	return deAtomicDecrementUint64((volatile deUint64*)size);
+#elif (DE_PTR_SIZE == 4)
+	return deAtomicDecrementUint32((volatile deUint32*)size);
+#else
+#	error "Invalid DE_PTR_SIZE value"
+#endif
+}
+
+/*--------------------------------------------------------------------*//*!
+ * \brief Atomic compare and exchange (CAS) pointer.
+ * \param dstAddr	Destination address.
+ * \param compare	Old value.
+ * \param exchange	New value.
+ * \return			compare value if CAS passes, *dstAddr value otherwise
+ *
+ * Performs standard Compare-And-Swap with pointer value. Dst value is compared
+ * to compare value and if that comparison passes, value is replaced with
+ * exchange value.
+ *
+ * If CAS succeeds, compare value is returned. Otherwise value stored in
+ * dstAddr is returned.
+ *//*--------------------------------------------------------------------*/
+DE_INLINE void* deAtomicCompareExchangePtr (void* volatile* dstAddr, void* compare, void* exchange)
+{
+#if (DE_PTR_SIZE == 8)
+	return (void*)deAtomicCompareExchangeUint64((volatile deUint64*)dstAddr, (deUint64)compare, (deUint64)exchange);
+#elif (DE_PTR_SIZE == 4)
+	return (void*)deAtomicCompareExchangeUint32((volatile deUint32*)dstAddr, (deUint32)compare, (deUint32)exchange);
+#else
+#	error "Invalid DE_PTR_SIZE value"
+#endif
+}
+
 /*--------------------------------------------------------------------*//*!
  * \brief Issue hardware memory read-write fence.
  *//*--------------------------------------------------------------------*/
diff --git a/framework/delibs/dethread/deThreadTest.c b/framework/delibs/dethread/deThreadTest.c
index dfd3b27..2476908 100644
--- a/framework/delibs/dethread/deThreadTest.c
+++ b/framework/delibs/dethread/deThreadTest.c
@@ -471,16 +471,55 @@
 {
 	/* Single-threaded tests. */
 	{
-		volatile int a = 11;
-		DE_TEST_ASSERT(deAtomicIncrement32(&a) == 12);
+		volatile deInt32 a = 11;
+		DE_TEST_ASSERT(deAtomicIncrementInt32(&a) == 12);
 		DE_TEST_ASSERT(a == 12);
-		DE_TEST_ASSERT(deAtomicIncrement32(&a) == 13);
+		DE_TEST_ASSERT(deAtomicIncrementInt32(&a) == 13);
 		DE_TEST_ASSERT(a == 13);
 
-		DE_TEST_ASSERT(deAtomicDecrement32(&a) == 12);
+		a = -2;
+		DE_TEST_ASSERT(deAtomicIncrementInt32(&a) == -1);
+		DE_TEST_ASSERT(a == -1);
+		DE_TEST_ASSERT(deAtomicIncrementInt32(&a) == 0);
+		DE_TEST_ASSERT(a == 0);
+
+		a = 11;
+		DE_TEST_ASSERT(deAtomicDecrementInt32(&a) == 10);
+		DE_TEST_ASSERT(a == 10);
+		DE_TEST_ASSERT(deAtomicDecrementInt32(&a) == 9);
+		DE_TEST_ASSERT(a == 9);
+
+		a = 0;
+		DE_TEST_ASSERT(deAtomicDecrementInt32(&a) == -1);
+		DE_TEST_ASSERT(a == -1);
+		DE_TEST_ASSERT(deAtomicDecrementInt32(&a) == -2);
+		DE_TEST_ASSERT(a == -2);
+
+		a = 0x7fffffff;
+		DE_TEST_ASSERT(deAtomicIncrementInt32(&a) == (int)0x80000000);
+		DE_TEST_ASSERT(a == (int)0x80000000);
+		DE_TEST_ASSERT(deAtomicDecrementInt32(&a) == (int)0x7fffffff);
+		DE_TEST_ASSERT(a == 0x7fffffff);
+	}
+
+	{
+		volatile deUint32 a = 11;
+		DE_TEST_ASSERT(deAtomicIncrementUint32(&a) == 12);
 		DE_TEST_ASSERT(a == 12);
-		DE_TEST_ASSERT(deAtomicDecrement32(&a) == 11);
-		DE_TEST_ASSERT(a == 11);
+		DE_TEST_ASSERT(deAtomicIncrementUint32(&a) == 13);
+		DE_TEST_ASSERT(a == 13);
+
+		a = 0x7fffffff;
+		DE_TEST_ASSERT(deAtomicIncrementUint32(&a) == 0x80000000);
+		DE_TEST_ASSERT(a == 0x80000000);
+		DE_TEST_ASSERT(deAtomicDecrementUint32(&a) == 0x7fffffff);
+		DE_TEST_ASSERT(a == 0x7fffffff);
+
+		a = 0xfffffffe;
+		DE_TEST_ASSERT(deAtomicIncrementUint32(&a) == 0xffffffff);
+		DE_TEST_ASSERT(a == 0xffffffff);
+		DE_TEST_ASSERT(deAtomicDecrementUint32(&a) == 0xfffffffe);
+		DE_TEST_ASSERT(a == 0xfffffffe);
 	}
 
 	{
@@ -501,6 +540,40 @@
 		DE_TEST_ASSERT(p == 8);
 	}
 
+#if (DE_PTR_SIZE == 8)
+	{
+		volatile deInt64 a = 11;
+		DE_TEST_ASSERT(deAtomicIncrementInt64(&a) == 12);
+		DE_TEST_ASSERT(a == 12);
+		DE_TEST_ASSERT(deAtomicIncrementInt64(&a) == 13);
+		DE_TEST_ASSERT(a == 13);
+
+		a = -2;
+		DE_TEST_ASSERT(deAtomicIncrementInt64(&a) == -1);
+		DE_TEST_ASSERT(a == -1);
+		DE_TEST_ASSERT(deAtomicIncrementInt64(&a) == 0);
+		DE_TEST_ASSERT(a == 0);
+
+		a = 11;
+		DE_TEST_ASSERT(deAtomicDecrementInt64(&a) == 10);
+		DE_TEST_ASSERT(a == 10);
+		DE_TEST_ASSERT(deAtomicDecrementInt64(&a) == 9);
+		DE_TEST_ASSERT(a == 9);
+
+		a = 0;
+		DE_TEST_ASSERT(deAtomicDecrementInt64(&a) == -1);
+		DE_TEST_ASSERT(a == -1);
+		DE_TEST_ASSERT(deAtomicDecrementInt64(&a) == -2);
+		DE_TEST_ASSERT(a == -2);
+
+		a = (deInt64)((1ull << 63) - 1ull);
+		DE_TEST_ASSERT(deAtomicIncrementInt64(&a) == (deInt64)(1ull << 63));
+		DE_TEST_ASSERT(a == (deInt64)(1ull << 63));
+		DE_TEST_ASSERT(deAtomicDecrementInt64(&a) == (deInt64)((1ull << 63) - 1));
+		DE_TEST_ASSERT(a == (deInt64)((1ull << 63) - 1));
+	}
+#endif /* (DE_PTR_SIZE == 8) */
+
 	/* \todo [2012-10-26 pyry] Implement multi-threaded tests. */
 }
 
diff --git a/framework/delibs/deutil/deClock.c b/framework/delibs/deutil/deClock.c
index 5bfc1d5..75a8266 100644
--- a/framework/delibs/deutil/deClock.c
+++ b/framework/delibs/deutil/deClock.c
@@ -43,8 +43,17 @@
 	QueryPerformanceFrequency(&freq);
 	DE_ASSERT(freq.LowPart != 0 || freq.HighPart != 0);
 	/* \todo [2010-03-26 kalle] consider adding a 32bit-friendly implementation */
-	DE_ASSERT(freq.QuadPart >= 1000000);
-	return count.QuadPart / (freq.QuadPart / 1000000);
+
+	if (count.QuadPart < MAXLONGLONG / 1000000)
+	{
+		DE_ASSERT(freq.QuadPart != 0);
+		return count.QuadPart * 1000000 / freq.QuadPart;
+	}
+	else
+	{
+		DE_ASSERT(freq.QuadPart >= 1000000);
+		return count.QuadPart / (freq.QuadPart / 1000000);
+	}
 
 #elif (DE_OS == DE_OS_UNIX) || (DE_OS == DE_OS_ANDROID)
 	struct timespec currTime;
diff --git a/framework/egl/egluUtil.cpp b/framework/egl/egluUtil.cpp
index da126cc..a774f47 100644
--- a/framework/egl/egluUtil.cpp
+++ b/framework/egl/egluUtil.cpp
@@ -83,12 +83,12 @@
 	return de::contains(extensions.begin(), extensions.end(), str);
 }
 
-vector<string> getPlatformExtensions (const Library& egl)
+vector<string> getClientExtensions (const Library& egl)
 {
 	return getExtensions(egl, EGL_NO_DISPLAY);
 }
 
-vector<string> getClientExtensions (const Library& egl, EGLDisplay display)
+vector<string> getDisplayExtensions (const Library& egl, EGLDisplay display)
 {
 	DE_ASSERT(display != EGL_NO_DISPLAY);
 
@@ -236,7 +236,7 @@
 
 	if (supportsPlatformGetDisplay)
 	{
-		const vector<string> platformExts = getPlatformExtensions(egl);
+		const vector<string> platformExts = getClientExtensions(egl);
 		usePlatformExt = de::contains(platformExts.begin(), platformExts.end(), string("EGL_EXT_platform_base")) &&
 						 de::contains(platformExts.begin(), platformExts.end(), string(nativeDisplay.getPlatformExtensionName()));
 	}
@@ -289,7 +289,7 @@
 
 	if (supportsPlatformCreate)
 	{
-		const vector<string> platformExts = getPlatformExtensions(egl);
+		const vector<string> platformExts = getClientExtensions(egl);
 		usePlatformExt = de::contains(platformExts.begin(), platformExts.end(), string("EGL_EXT_platform_base")) &&
 						 de::contains(platformExts.begin(), platformExts.end(), string(nativeDisplay.getPlatformExtensionName()));
 	}
@@ -330,7 +330,7 @@
 
 	if (supportsPlatformCreate)
 	{
-		const vector<string> platformExts = getPlatformExtensions(egl);
+		const vector<string> platformExts = getClientExtensions(egl);
 		usePlatformExt = de::contains(platformExts.begin(), platformExts.end(), string("EGL_EXT_platform_base")) &&
 						 de::contains(platformExts.begin(), platformExts.end(), string(nativeDisplay.getPlatformExtensionName()));
 	}
diff --git a/framework/egl/egluUtil.hpp b/framework/egl/egluUtil.hpp
index 8c5dd3d..1fe322a 100644
--- a/framework/egl/egluUtil.hpp
+++ b/framework/egl/egluUtil.hpp
@@ -56,8 +56,8 @@
 
 Version							getVersion					(const eglw::Library& egl, eglw::EGLDisplay display);
 
-std::vector<std::string>		getPlatformExtensions		(const eglw::Library& egl);
-std::vector<std::string>		getClientExtensions			(const eglw::Library& egl, eglw::EGLDisplay display);
+std::vector<std::string>		getClientExtensions			(const eglw::Library& egl);
+std::vector<std::string>		getDisplayExtensions		(const eglw::Library& egl, eglw::EGLDisplay display);
 bool							hasExtension				(const eglw::Library& egl, eglw::EGLDisplay display, const std::string& extName);
 
 std::vector<eglw::EGLConfig>	getConfigs					(const eglw::Library& egl, eglw::EGLDisplay display);
diff --git a/framework/opengl/gluFboRenderContext.cpp b/framework/opengl/gluFboRenderContext.cpp
index b357cac..b663fbb 100644
--- a/framework/opengl/gluFboRenderContext.cpp
+++ b/framework/opengl/gluFboRenderContext.cpp
@@ -35,6 +35,32 @@
 namespace glu
 {
 
+static int getNumDepthBits (const tcu::TextureFormat& format)
+{
+	if (format.order == tcu::TextureFormat::DS)
+	{
+		const tcu::TextureFormat	depthOnlyFormat		= tcu::getEffectiveDepthStencilTextureFormat(format, tcu::Sampler::MODE_DEPTH);
+		return tcu::getTextureFormatBitDepth(depthOnlyFormat).x();
+	}
+	else if (format.order == tcu::TextureFormat::D)
+		return tcu::getTextureFormatBitDepth(format).x();
+	else
+		return 0;
+}
+
+static int getNumStencilBits (const tcu::TextureFormat& format)
+{
+	if (format.order == tcu::TextureFormat::DS)
+	{
+		const tcu::TextureFormat	stencilOnlyFormat		= tcu::getEffectiveDepthStencilTextureFormat(format, tcu::Sampler::MODE_STENCIL);
+		return tcu::getTextureFormatBitDepth(stencilOnlyFormat).x();
+	}
+	else if (format.order == tcu::TextureFormat::S)
+		return tcu::getTextureFormatBitDepth(format).x();
+	else
+		return 0;
+}
+
 static tcu::PixelFormat getPixelFormat (deUint32 colorFormat)
 {
 	const tcu::IVec4 bits = tcu::getTextureFormatBitDepth(glu::mapGLInternalFormat(colorFormat));
@@ -43,9 +69,10 @@
 
 static void getDepthStencilBits (deUint32 depthStencilFormat, int* depthBits, int* stencilBits)
 {
-	const tcu::IVec4 bits = tcu::getTextureFormatBitDepth(glu::mapGLInternalFormat(depthStencilFormat));
-	*depthBits		= bits[0];
-	*stencilBits	= bits[3];
+	const tcu::TextureFormat	combinedFormat	= glu::mapGLInternalFormat(depthStencilFormat);
+
+	*depthBits		= getNumDepthBits(combinedFormat);
+	*stencilBits	= getNumStencilBits(combinedFormat);
 }
 
 deUint32 chooseColorFormat (const glu::RenderConfig& config)
@@ -103,15 +130,17 @@
 
 	for (int fmtNdx = 0; fmtNdx < DE_LENGTH_OF_ARRAY(s_formats); fmtNdx++)
 	{
-		const deUint32		format	= s_formats[fmtNdx];
-		const tcu::IVec4	bits	= tcu::getTextureFormatBitDepth(glu::mapGLInternalFormat(format));
+		const deUint32				format			= s_formats[fmtNdx];
+		const tcu::TextureFormat	combinedFormat	= glu::mapGLInternalFormat(format);
+		const int					depthBits		= getNumDepthBits(combinedFormat);
+		const int					stencilBits		= getNumStencilBits(combinedFormat);
 
 		if (config.depthBits != glu::RenderConfig::DONT_CARE &&
-			config.depthBits != bits[0])
+			config.depthBits != depthBits)
 			continue;
 
 		if (config.stencilBits != glu::RenderConfig::DONT_CARE &&
-			config.stencilBits != bits[3])
+			config.stencilBits != stencilBits)
 			continue;
 
 		return format;
diff --git a/framework/opengl/gluTexture.cpp b/framework/opengl/gluTexture.cpp
index d81bda1..bf6fdbc 100644
--- a/framework/opengl/gluTexture.cpp
+++ b/framework/opengl/gluTexture.cpp
@@ -136,7 +136,7 @@
 	const glw::Functions& gl = context.getFunctions();
 
 	if (!contextInfo.isCompressedTextureFormatSupported(m_format))
-		throw tcu::NotSupportedError("Compressed texture format not supported", "", __FILE__, __LINE__);
+		TCU_THROW(NotSupportedError, "Compressed texture format not supported");
 
 	gl.genTextures(1, &m_glTexture);
 	GLU_EXPECT_NO_ERROR(gl.getError(), "glGenTextures() failed");
diff --git a/framework/platform/CMakeLists.txt b/framework/platform/CMakeLists.txt
index f1ff95b..0bbc1f6 100644
--- a/framework/platform/CMakeLists.txt
+++ b/framework/platform/CMakeLists.txt
@@ -98,6 +98,10 @@
 endif ()
 
 add_library(tcutil-platform STATIC ${TCUTIL_PLATFORM_SRCS})
+
+# Add vkutil to the deps before tcutil so that it picks up the c++11 dependencies
+target_link_libraries(tcutil-platform vkutil)
+
 target_link_libraries(tcutil-platform tcutil ${TCUTIL_PLATFORM_LIBS})
 
 # Always link to glutil as some platforms such as Win32 always support GL
diff --git a/framework/platform/X11/tcuX11Platform.cpp b/framework/platform/X11/tcuX11Platform.cpp
index e3f9257..81c6712 100644
--- a/framework/platform/X11/tcuX11Platform.cpp
+++ b/framework/platform/X11/tcuX11Platform.cpp
@@ -25,7 +25,10 @@
 
 #include "deUniquePtr.hpp"
 #include "gluPlatform.hpp"
+#include "vkPlatform.hpp"
 #include "tcuX11.hpp"
+#include "tcuFunctionLibrary.hpp"
+#include "deMemory.h"
 
 #if defined (DEQP_SUPPORT_GLX)
 #	include "tcuX11GlxPlatform.hpp"
@@ -34,6 +37,7 @@
 #	include "tcuX11EglPlatform.hpp"
 #endif
 
+#include <sys/utsname.h>
 
 namespace tcu
 {
@@ -49,23 +53,67 @@
 	}
 };
 
+class VulkanLibrary : public vk::Library
+{
+public:
+	VulkanLibrary (void)
+		: m_library	("libvulkan-1.so")
+		, m_driver	(m_library)
+	{
+	}
+
+	const vk::PlatformInterface& getPlatformInterface (void) const
+	{
+		return m_driver;
+	}
+
+private:
+	const tcu::DynamicFunctionLibrary	m_library;
+	const vk::PlatformDriver			m_driver;
+};
+
+class X11VulkanPlatform : public vk::Platform
+{
+public:
+	vk::Library* createLibrary (void) const
+	{
+		return new VulkanLibrary();
+	}
+
+	void describePlatform (std::ostream& dst) const
+	{
+		utsname		sysInfo;
+
+		deMemset(&sysInfo, 0, sizeof(sysInfo));
+
+		if (uname(&sysInfo) != 0)
+			throw std::runtime_error("uname() failed");
+
+		dst << "OS: " << sysInfo.sysname << " " << sysInfo.release << " " << sysInfo.version << "\n";
+		dst << "CPU: " << sysInfo.machine << "\n";
+	}
+};
+
 class X11Platform : public tcu::Platform
 {
 public:
-							X11Platform		(void);
-	bool					processEvents	(void) { return !m_eventState.getQuitFlag(); }
-	const glu::Platform&	getGLPlatform	(void) const { return m_glPlatform; }
+							X11Platform			(void);
+	bool					processEvents		(void) { return !m_eventState.getQuitFlag(); }
+	const glu::Platform&	getGLPlatform		(void) const { return m_glPlatform; }
 
 #if defined (DEQP_SUPPORT_EGL)
-	const eglu::Platform&	getEGLPlatform	(void) const { return m_eglPlatform; }
+	const eglu::Platform&	getEGLPlatform		(void) const { return m_eglPlatform; }
 #endif // DEQP_SUPPORT_EGL
 
+	const vk::Platform&		getVulkanPlatform	(void) const { return m_vkPlatform; }
+
 private:
 	EventState				m_eventState;
 #if defined (DEQP_SUPPORT_EGL)
 	x11::egl::Platform		m_eglPlatform;
 #endif // DEQP_SPPORT_EGL
 	X11GLPlatform			m_glPlatform;
+	X11VulkanPlatform		m_vkPlatform;
 };
 
 X11Platform::X11Platform (void)
diff --git a/framework/platform/android/tcuAndroidPlatform.cpp b/framework/platform/android/tcuAndroidPlatform.cpp
index eca3fe0..89c5c5c 100644
--- a/framework/platform/android/tcuAndroidPlatform.cpp
+++ b/framework/platform/android/tcuAndroidPlatform.cpp
@@ -22,6 +22,7 @@
  *//*--------------------------------------------------------------------*/
 
 #include "tcuAndroidPlatform.hpp"
+#include "tcuAndroidUtil.hpp"
 #include "gluRenderContext.hpp"
 #include "egluNativeDisplay.hpp"
 #include "egluNativeWindow.hpp"
@@ -29,6 +30,7 @@
 #include "egluUtil.hpp"
 #include "eglwLibrary.hpp"
 #include "eglwEnums.hpp"
+#include "tcuFunctionLibrary.hpp"
 
 // Assume no call translation is needed
 #include <android/native_window.h>
@@ -158,7 +160,6 @@
 	return createWindow(params, format);
 }
 
-
 eglu::NativeWindow* NativeWindowFactory::createWindow (const eglu::WindowParams& params, int32_t format) const
 {
 	Window* window = m_windowRegistry.tryAcquireWindow();
@@ -183,9 +184,31 @@
 	return new NativeDisplay();
 }
 
+// Vulkan
+
+class VulkanLibrary : public vk::Library
+{
+public:
+	VulkanLibrary (void)
+		: m_library	("libvulkan.so")
+		, m_driver	(m_library)
+	{
+	}
+
+	const vk::PlatformInterface& getPlatformInterface (void) const
+	{
+		return m_driver;
+	}
+
+private:
+	const tcu::DynamicFunctionLibrary	m_library;
+	const vk::PlatformDriver			m_driver;
+};
+
 // Platform
 
-Platform::Platform (void)
+Platform::Platform (NativeActivity& activity)
+	: m_activity(activity)
 {
 	m_nativeDisplayFactoryRegistry.registerFactory(new NativeDisplayFactory(m_windowRegistry));
 	m_contextFactoryRegistry.registerFactory(new eglu::GLContextFactory(m_nativeDisplayFactoryRegistry));
@@ -201,5 +224,15 @@
 	return true;
 }
 
+vk::Library* Platform::createLibrary (void) const
+{
+	return new VulkanLibrary();
+}
+
+void Platform::describePlatform (std::ostream& dst) const
+{
+	tcu::Android::describePlatform(m_activity.getNativeActivity(), dst);
+}
+
 } // Android
 } // tcu
diff --git a/framework/platform/android/tcuAndroidPlatform.hpp b/framework/platform/android/tcuAndroidPlatform.hpp
index d8463c3..63ea6a6 100644
--- a/framework/platform/android/tcuAndroidPlatform.hpp
+++ b/framework/platform/android/tcuAndroidPlatform.hpp
@@ -27,27 +27,34 @@
 #include "tcuPlatform.hpp"
 #include "egluPlatform.hpp"
 #include "gluPlatform.hpp"
+#include "vkPlatform.hpp"
 #include "tcuAndroidWindow.hpp"
+#include "tcuAndroidNativeActivity.hpp"
 
 namespace tcu
 {
 namespace Android
 {
 
-class Platform : public tcu::Platform, private eglu::Platform, private glu::Platform
+class Platform : public tcu::Platform, private eglu::Platform, private glu::Platform, private vk::Platform
 {
 public:
-									Platform			(void);
+									Platform			(NativeActivity& activity);
 	virtual							~Platform			(void);
 
 	virtual bool					processEvents		(void);
 
 	virtual const glu::Platform&	getGLPlatform		(void) const { return static_cast<const glu::Platform&>(*this);		}
 	virtual const eglu::Platform&	getEGLPlatform		(void) const { return static_cast<const eglu::Platform&>(*this);	}
+	virtual const vk::Platform&		getVulkanPlatform	(void) const { return static_cast<const vk::Platform&>(*this);		}
 
 	WindowRegistry&					getWindowRegistry	(void) { return m_windowRegistry; }
 
+	vk::Library*					createLibrary		(void) const;
+	void							describePlatform	(std::ostream& dst) const;
+
 private:
+	NativeActivity&					m_activity;
 	WindowRegistry					m_windowRegistry;
 };
 
diff --git a/framework/platform/android/tcuAndroidTestActivity.cpp b/framework/platform/android/tcuAndroidTestActivity.cpp
index 31b7ac1..b8cff3c 100644
--- a/framework/platform/android/tcuAndroidTestActivity.cpp
+++ b/framework/platform/android/tcuAndroidTestActivity.cpp
@@ -41,6 +41,7 @@
 TestThread::TestThread (NativeActivity& activity, const CommandLine& cmdLine)
 	: RenderThread	(activity)
 	, m_cmdLine		(cmdLine)
+	, m_platform	(activity)
 	, m_archive		(activity.getNativeActivity()->assetManager)
 	, m_log			(m_cmdLine.getLogFileName(), m_cmdLine.getLogFlags())
 	, m_app			(m_platform, m_archive, m_log, m_cmdLine)
diff --git a/framework/platform/android/tcuAndroidUtil.cpp b/framework/platform/android/tcuAndroidUtil.cpp
index 7f29c6a..3c09ce9 100644
--- a/framework/platform/android/tcuAndroidUtil.cpp
+++ b/framework/platform/android/tcuAndroidUtil.cpp
@@ -23,12 +23,35 @@
 
 #include "tcuAndroidUtil.hpp"
 
+#include <vector>
+
 namespace tcu
 {
 namespace Android
 {
 
 using std::string;
+using std::vector;
+
+void checkJNIException (JNIEnv* env)
+{
+	if (env->ExceptionCheck())
+	{
+		env->ExceptionDescribe();
+		env->ExceptionClear();
+		throw std::runtime_error("Got JNI exception");
+	}
+}
+
+string getJNIStringValue (JNIEnv* env, jstring jniStr)
+{
+	const char*		ptr		= env->GetStringUTFChars(jniStr, DE_NULL);
+	const string	str		= string(ptr);
+
+	env->ReleaseStringUTFChars(jniStr, ptr);
+
+	return str;
+}
 
 static string getIntentStringExtra (JNIEnv* env, jobject activity, const char* name)
 {
@@ -49,12 +72,7 @@
 	env->DeleteLocalRef(extraName);
 
 	if (extraStr)
-	{
-		const char* ptr = env->GetStringUTFChars(extraStr, DE_NULL);
-		string str = string(ptr);
-		env->ReleaseStringUTFChars(extraStr, ptr);
-		return str;
-	}
+		return getJNIStringValue(env, extraStr);
 	else
 		return string();
 }
@@ -92,5 +110,175 @@
 	}
 }
 
+template<typename Type>
+const char* getJNITypeStr (void);
+
+template<>
+const char* getJNITypeStr<int> (void)
+{
+	return "I";
+}
+
+template<>
+const char* getJNITypeStr<string> (void)
+{
+	return "Ljava/lang/String;";
+}
+
+template<>
+const char* getJNITypeStr<vector<string> > (void)
+{
+	return "[Ljava/lang/String;";
+}
+
+template<typename FieldType>
+FieldType getStaticFieldValue (JNIEnv* env, jclass cls, jfieldID fieldId);
+
+template<>
+int getStaticFieldValue<int> (JNIEnv* env, jclass cls, jfieldID fieldId)
+{
+	DE_ASSERT(cls && fieldId);
+	return env->GetStaticIntField(cls, fieldId);
+}
+
+template<>
+string getStaticFieldValue<string> (JNIEnv* env, jclass cls, jfieldID fieldId)
+{
+	const jstring	jniStr	= (jstring)env->GetStaticObjectField(cls, fieldId);
+
+	if (jniStr)
+		return getJNIStringValue(env, jniStr);
+	else
+		return string();
+}
+
+template<>
+vector<string> getStaticFieldValue<vector<string> > (JNIEnv* env, jclass cls, jfieldID fieldId)
+{
+	const jobjectArray	array		= (jobjectArray)env->GetStaticObjectField(cls, fieldId);
+	vector<string>		result;
+
+	checkJNIException(env);
+
+	if (array)
+	{
+		const int	numElements		= env->GetArrayLength(array);
+
+		for (int ndx = 0; ndx < numElements; ndx++)
+		{
+			const jstring	jniStr	= (jstring)env->GetObjectArrayElement(array, ndx);
+
+			checkJNIException(env);
+
+			if (jniStr)
+				result.push_back(getJNIStringValue(env, jniStr));
+		}
+	}
+
+	return result;
+}
+
+template<typename FieldType>
+FieldType getStaticField (JNIEnv* env, const char* className, const char* fieldName)
+{
+	const jclass	cls			= env->FindClass(className);
+	const jfieldID	fieldId		= cls ? env->GetStaticFieldID(cls, fieldName, getJNITypeStr<FieldType>()) : (jfieldID)0;
+
+	checkJNIException(env);
+
+	if (cls && fieldId)
+		return getStaticFieldValue<FieldType>(env, cls, fieldId);
+	else
+		return FieldType();
+}
+
+class ScopedJNIEnv
+{
+public:
+
+					ScopedJNIEnv	(JavaVM* vm);
+					~ScopedJNIEnv	(void);
+
+	JavaVM*			getVM			(void) const { return m_vm;		}
+	JNIEnv*			getEnv			(void) const { return m_env;	}
+
+private:
+	JavaVM* const	m_vm;
+	JNIEnv*			m_env;
+	bool			m_detach;
+};
+
+ScopedJNIEnv::ScopedJNIEnv (JavaVM* vm)
+	: m_vm		(vm)
+	, m_env		(DE_NULL)
+	, m_detach	(false)
+{
+	const int	getEnvRes	= m_vm->GetEnv((void**)&m_env, JNI_VERSION_1_6);
+
+	if (getEnvRes == JNI_EDETACHED)
+	{
+		if (m_vm->AttachCurrentThread(&m_env, DE_NULL) != JNI_OK)
+			throw std::runtime_error("JNI AttachCurrentThread() failed");
+
+		m_detach = true;
+	}
+	else if (getEnvRes != JNI_OK)
+		throw std::runtime_error("JNI GetEnv() failed");
+
+	DE_ASSERT(m_env);
+}
+
+ScopedJNIEnv::~ScopedJNIEnv (void)
+{
+	if (m_detach)
+		m_vm->DetachCurrentThread();
+}
+
+void describePlatform (ANativeActivity* activity, std::ostream& dst)
+{
+	const ScopedJNIEnv	env				(activity->vm);
+	const char* const	buildClass		= "android/os/Build";
+	const char* const	versionClass	= "android/os/Build$VERSION";
+
+	static const struct
+	{
+		const char*		classPath;
+		const char*		className;
+		const char*		fieldName;
+	} s_stringFields[] =
+	{
+		{ buildClass,	"Build",			"BOARD"			},
+		{ buildClass,	"Build",			"BRAND"			},
+		{ buildClass,	"Build",			"DEVICE"		},
+		{ buildClass,	"Build",			"DISPLAY"		},
+		{ buildClass,	"Build",			"FINGERPRINT"	},
+		{ buildClass,	"Build",			"HARDWARE"		},
+		{ buildClass,	"Build",			"MANUFACTURER"	},
+		{ buildClass,	"Build",			"MODEL"			},
+		{ buildClass,	"Build",			"PRODUCT"		},
+		{ buildClass,	"Build",			"TAGS"			},
+		{ buildClass,	"Build",			"TYPE"			},
+		{ versionClass,	"Build.VERSION",	"RELEASE"		},
+	};
+
+	for (int ndx = 0; ndx < DE_LENGTH_OF_ARRAY(s_stringFields); ndx++)
+		dst << s_stringFields[ndx].className << "." << s_stringFields[ndx].fieldName
+			<< ": " << getStaticField<string>(env.getEnv(), s_stringFields[ndx].classPath, s_stringFields[ndx].fieldName)
+			<< "\n";
+
+	dst << "Build.VERSION.SDK_INT: " << getStaticField<int>(env.getEnv(), versionClass, "SDK_INT") << "\n";
+
+	{
+		const vector<string>	supportedAbis	= getStaticField<vector<string> >(env.getEnv(), buildClass, "SUPPORTED_ABIS");
+
+		dst << "Build.SUPPORTED_ABIS: ";
+
+		for (size_t ndx = 0; ndx < supportedAbis.size(); ndx++)
+			dst << (ndx != 0 ? ", " : "") << supportedAbis[ndx];
+
+		dst << "\n";
+	}
+}
+
 } // Android
 } // tcu
diff --git a/framework/platform/android/tcuAndroidUtil.hpp b/framework/platform/android/tcuAndroidUtil.hpp
index a625433..254e890 100644
--- a/framework/platform/android/tcuAndroidUtil.hpp
+++ b/framework/platform/android/tcuAndroidUtil.hpp
@@ -27,6 +27,7 @@
 #include "tcuCommandLine.hpp"
 
 #include <string>
+#include <ostream>
 
 #include <android/native_activity.h>
 
@@ -49,6 +50,8 @@
 
 ScreenOrientation	mapScreenRotation			(ScreenRotation rotation);
 
+void				describePlatform			(ANativeActivity* activity, std::ostream& dst);
+
 } // Android
 } // tcu
 
diff --git a/framework/platform/null/tcuNullPlatform.cpp b/framework/platform/null/tcuNullPlatform.cpp
index ea790e5..e0721ec 100644
--- a/framework/platform/null/tcuNullPlatform.cpp
+++ b/framework/platform/null/tcuNullPlatform.cpp
@@ -26,6 +26,7 @@
 #include "tcuNullRenderContext.hpp"
 #include "egluNativeDisplay.hpp"
 #include "eglwLibrary.hpp"
+#include "vkNullDriver.hpp"
 
 namespace tcu
 {
@@ -74,6 +75,11 @@
 {
 }
 
+vk::Library* Platform::createLibrary (void) const
+{
+	return vk::createNullDriver();
+}
+
 } // null
 } // tcu
 
diff --git a/framework/platform/null/tcuNullPlatform.hpp b/framework/platform/null/tcuNullPlatform.hpp
index 81ff514..612edf2 100644
--- a/framework/platform/null/tcuNullPlatform.hpp
+++ b/framework/platform/null/tcuNullPlatform.hpp
@@ -27,20 +27,26 @@
 #include "tcuPlatform.hpp"
 #include "gluPlatform.hpp"
 #include "egluPlatform.hpp"
+#include "vkPlatform.hpp"
 
 namespace tcu
 {
 namespace null
 {
 
-class Platform : public tcu::Platform, private glu::Platform, private eglu::Platform
+class Platform : public tcu::Platform, private glu::Platform, private eglu::Platform, private vk::Platform
 {
 public:
-									Platform		(void);
-	virtual							~Platform		(void);
+									Platform			(void);
+	virtual							~Platform			(void);
 
-	virtual const glu::Platform&	getGLPlatform	(void) const { return static_cast<const glu::Platform&>(*this); }
-	virtual const eglu::Platform&	getEGLPlatform	(void) const { return static_cast<const eglu::Platform&>(*this); }
+	virtual const glu::Platform&	getGLPlatform		(void) const { return static_cast<const glu::Platform&>(*this); }
+	virtual const eglu::Platform&	getEGLPlatform		(void) const { return static_cast<const eglu::Platform&>(*this); }
+	virtual const vk::Platform&		getVulkanPlatform	(void) const { return static_cast<const vk::Platform&>(*this);	}
+
+private:
+	virtual vk::Library*			createLibrary		(void) const;
+
 };
 
 } // null
diff --git a/framework/platform/null/tcuNullRenderContext.cpp b/framework/platform/null/tcuNullRenderContext.cpp
index b91d2ee..61b6640 100644
--- a/framework/platform/null/tcuNullRenderContext.cpp
+++ b/framework/platform/null/tcuNullRenderContext.cpp
@@ -361,7 +361,7 @@
 			break;
 
 		case GL_COMPRESSED_TEXTURE_FORMATS:
-			deMemcpy(params, &ctx->compressedTextureList[0], (int)ctx->compressedTextureList.size());
+			deMemcpy(params, &ctx->compressedTextureList[0], ctx->compressedTextureList.size()*sizeof(deUint32));
 			break;
 
 		case GL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS:
diff --git a/framework/platform/win32/tcuWin32Platform.cpp b/framework/platform/win32/tcuWin32Platform.cpp
index dc509a5..3b7257f 100644
--- a/framework/platform/win32/tcuWin32Platform.cpp
+++ b/framework/platform/win32/tcuWin32Platform.cpp
@@ -21,8 +21,16 @@
  * \brief Win32 platform port.
  *//*--------------------------------------------------------------------*/
 
+// \todo [2016-01-22 pyry] GetVersionEx() used by getOSInfo() is deprecated.
+//						   Find a way to get version info without using deprecated APIs.
+#pragma warning(disable : 4996)
+
 #include "tcuWin32Platform.hpp"
 #include "tcuWGLContextFactory.hpp"
+#include "tcuFunctionLibrary.hpp"
+#include "tcuFormatUtil.hpp"
+
+#include "deMemory.h"
 
 #if defined(DEQP_SUPPORT_EGL)
 #	include "tcuWin32EGLNativeDisplayFactory.hpp"
@@ -32,6 +40,25 @@
 namespace tcu
 {
 
+class VulkanLibrary : public vk::Library
+{
+public:
+	VulkanLibrary (void)
+		: m_library	("vulkan-1.dll")
+		, m_driver	(m_library)
+	{
+	}
+
+	const vk::PlatformInterface& getPlatformInterface (void) const
+	{
+		return m_driver;
+	}
+
+private:
+	const tcu::DynamicFunctionLibrary	m_library;
+	const vk::PlatformDriver			m_driver;
+};
+
 Win32Platform::Win32Platform (void)
 	: m_instance(GetModuleHandle(NULL))
 {
@@ -86,6 +113,89 @@
 	return true;
 }
 
+vk::Library* Win32Platform::createLibrary (void) const
+{
+	return new VulkanLibrary();
+}
+
+const char* getProductTypeName (WORD productType)
+{
+	switch (productType)
+	{
+		case VER_NT_DOMAIN_CONTROLLER:	return "Windows Server (domain controller)";
+		case VER_NT_SERVER:				return "Windows Server";
+		case VER_NT_WORKSTATION:		return "Windows NT";
+		default:						return DE_NULL;
+	}
+}
+
+static void getOSInfo (std::ostream& dst)
+{
+	OSVERSIONINFOEX	osInfo;
+
+	deMemset(&osInfo, 0, sizeof(osInfo));
+	osInfo.dwOSVersionInfoSize = (DWORD)sizeof(osInfo);
+
+	GetVersionEx((OSVERSIONINFO*)&osInfo);
+
+	{
+		const char* const	productName	= getProductTypeName(osInfo.wProductType);
+
+		if (productName)
+			dst << productName;
+		else
+			dst << "unknown product " << tcu::toHex(osInfo.wProductType);
+	}
+
+	dst << " " << osInfo.dwMajorVersion << "." << osInfo.dwMinorVersion
+		<< ", service pack " << osInfo.wServicePackMajor << "." << osInfo.wServicePackMinor
+		<< ", build " << osInfo.dwBuildNumber;
+}
+
+const char* getProcessorArchitectureName (WORD arch)
+{
+	switch (arch)
+	{
+		case PROCESSOR_ARCHITECTURE_AMD64:		return "AMD64";
+		case PROCESSOR_ARCHITECTURE_ARM:		return "ARM";
+		case PROCESSOR_ARCHITECTURE_IA64:		return "IA64";
+		case PROCESSOR_ARCHITECTURE_INTEL:		return "INTEL";
+		case PROCESSOR_ARCHITECTURE_UNKNOWN:	return "UNKNOWN";
+		default:								return DE_NULL;
+	}
+}
+
+static void getProcessorInfo (std::ostream& dst)
+{
+	SYSTEM_INFO	sysInfo;
+
+	deMemset(&sysInfo, 0, sizeof(sysInfo));
+	GetSystemInfo(&sysInfo);
+
+	dst << "arch ";
+	{
+		const char* const	archName	= getProcessorArchitectureName(sysInfo.wProcessorArchitecture);
+
+		if (archName)
+			dst << archName;
+		else
+			dst << tcu::toHex(sysInfo.wProcessorArchitecture);
+	}
+
+	dst << ", level " << tcu::toHex(sysInfo.wProcessorLevel) << ", revision " << tcu::toHex(sysInfo.wProcessorRevision);
+}
+
+void Win32Platform::describePlatform (std::ostream& dst) const
+{
+	dst << "OS: ";
+	getOSInfo(dst);
+	dst << "\n";
+
+	dst << "CPU: ";
+	getProcessorInfo(dst);
+	dst << "\n";
+}
+
 } // tcu
 
 // Create platform
diff --git a/framework/platform/win32/tcuWin32Platform.hpp b/framework/platform/win32/tcuWin32Platform.hpp
index b8f2437..ba62105 100644
--- a/framework/platform/win32/tcuWin32Platform.hpp
+++ b/framework/platform/win32/tcuWin32Platform.hpp
@@ -26,6 +26,7 @@
 #include "tcuDefs.hpp"
 #include "tcuPlatform.hpp"
 #include "gluPlatform.hpp"
+#include "vkPlatform.hpp"
 #include "tcuWin32API.h"
 
 #if defined(DEQP_SUPPORT_EGL)
@@ -37,7 +38,7 @@
 namespace tcu
 {
 
-class Win32Platform : public tcu::Platform, private glu::Platform
+class Win32Platform : public tcu::Platform, private glu::Platform, private vk::Platform
 #if defined(DEQP_SUPPORT_EGL)
 	, private eglu::Platform
 #endif
@@ -54,7 +55,12 @@
 	const eglu::Platform&	getEGLPlatform		(void) const { return static_cast<const eglu::Platform&>(*this);	}
 #endif
 
+	const vk::Platform&		getVulkanPlatform	(void) const { return static_cast<const vk::Platform&>(*this);		}
+
 private:
+	vk::Library*			createLibrary		(void) const;
+	void					describePlatform	(std::ostream& dst) const;
+
 	HINSTANCE				m_instance;
 };
 
diff --git a/framework/qphelper/qpTestLog.c b/framework/qphelper/qpTestLog.c
index 5bd6301..77e182a 100644
--- a/framework/qphelper/qpTestLog.c
+++ b/framework/qphelper/qpTestLog.c
@@ -1024,6 +1024,7 @@
 deBool qpTestLog_writeShader (qpTestLog* log, qpShaderType type, const char* source, deBool compileOk, const char* infoLog)
 {
 	const char*		tagName				= QP_LOOKUP_STRING(s_qpShaderTypeMap, type);
+	const char*		sourceStr			= ((log->flags & QP_TEST_LOG_EXCLUDE_SHADER_SOURCES) == 0 || !compileOk) ? source : "";
 	int				numShaderAttribs	= 0;
 	qpXmlAttribute	shaderAttribs[4];
 
@@ -1034,7 +1035,7 @@
 	shaderAttribs[numShaderAttribs++]	= qpSetStringAttrib("CompileStatus", compileOk ? "OK" : "Fail");
 
 	if (!qpXmlWriter_startElement(log->writer, tagName, numShaderAttribs, shaderAttribs) ||
-		!qpXmlWriter_writeStringElement(log->writer, "ShaderSource", source) ||
+		!qpXmlWriter_writeStringElement(log->writer, "ShaderSource", sourceStr) ||
 		!qpXmlWriter_writeStringElement(log->writer, "InfoLog", infoLog) ||
 		!qpXmlWriter_endElement(log->writer, tagName))
 	{
@@ -1216,10 +1217,12 @@
  *//*--------------------------------------------------------------------*/
 deBool qpTestLog_writeKernelSource (qpTestLog* log, const char* source)
 {
+	const char*		sourceStr	= (log->flags & QP_TEST_LOG_EXCLUDE_SHADER_SOURCES) != 0 ? "" : source;
+
 	DE_ASSERT(log);
 	deMutex_lock(log->lock);
 
-	if (!qpXmlWriter_writeStringElement(log->writer, "KernelSource", source))
+	if (!qpXmlWriter_writeStringElement(log->writer, "KernelSource", sourceStr))
 	{
 		qpPrintf("qpTestLog_writeKernelSource(): Writing XML failed\n");
 		deMutex_unlock(log->lock);
diff --git a/framework/qphelper/qpTestLog.h b/framework/qphelper/qpTestLog.h
index 38cad23..d03dfdd 100644
--- a/framework/qphelper/qpTestLog.h
+++ b/framework/qphelper/qpTestLog.h
@@ -132,7 +132,8 @@
 /* Test log flags. */
 typedef enum qpTestLogFlag_e
 {
-	QP_TEST_LOG_EXCLUDE_IMAGES	= (1<<0)		/*!< Do not log images. This reduces log size considerably.		*/
+	QP_TEST_LOG_EXCLUDE_IMAGES			= (1<<0),		/*!< Do not log images. This reduces log size considerably.			*/
+	QP_TEST_LOG_EXCLUDE_SHADER_SOURCES	= (1<<1)		/*!< Do not log shader sources. Helps to reduce log size further.	*/
 } qpTestLogFlag;
 
 /* Shader type. */
diff --git a/framework/referencerenderer/rrFragmentOperations.cpp b/framework/referencerenderer/rrFragmentOperations.cpp
index 6d0f1f9..d0fb3c3 100644
--- a/framework/referencerenderer/rrFragmentOperations.cpp
+++ b/framework/referencerenderer/rrFragmentOperations.cpp
@@ -24,6 +24,7 @@
 #include "rrFragmentOperations.hpp"
 #include "tcuVectorUtil.hpp"
 #include "tcuTextureUtil.hpp"
+#include <limits>
 
 using tcu::IVec2;
 using tcu::Vec3;
@@ -303,20 +304,20 @@
 
 void FragmentProcessor::executeBlendFactorComputeRGB (const Vec4& blendColor, const BlendState& blendRGBState)
 {
-#define SAMPLE_REGISTER_BLEND_FACTOR(FACTOR_NAME, FACTOR_EXPRESSION)											\
-	for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)								\
-	{																											\
-		if (m_sampleRegister[regSampleNdx].isAlive)																\
-		{																										\
-			const Vec4& src		= m_sampleRegister[regSampleNdx].clampedBlendSrcColor;							\
-			const Vec4& src1	= m_sampleRegister[regSampleNdx].clampedBlendSrc1Color;							\
-			const Vec4& dst		= m_sampleRegister[regSampleNdx].clampedBlendDstColor;							\
-			DE_UNREF(src);																						\
-			DE_UNREF(src1);																						\
-			DE_UNREF(dst);																						\
-																												\
-			m_sampleRegister[regSampleNdx].FACTOR_NAME = clamp((FACTOR_EXPRESSION), Vec3(0.0f), Vec3(1.0f));	\
-		}																										\
+#define SAMPLE_REGISTER_BLEND_FACTOR(FACTOR_NAME, FACTOR_EXPRESSION)																				\
+	for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)																	\
+	{																																				\
+		if (m_sampleRegister[regSampleNdx].isAlive)																									\
+		{																																			\
+			const Vec4& src		= m_sampleRegister[regSampleNdx].clampedBlendSrcColor;																\
+			const Vec4& src1	= m_sampleRegister[regSampleNdx].clampedBlendSrc1Color;																\
+			const Vec4& dst		= m_sampleRegister[regSampleNdx].clampedBlendDstColor;																\
+			DE_UNREF(src);																															\
+			DE_UNREF(src1);																															\
+			DE_UNREF(dst);																															\
+																																					\
+			m_sampleRegister[regSampleNdx].FACTOR_NAME = (FACTOR_EXPRESSION);																		\
+		}																																			\
 	}
 
 #define SWITCH_SRC_OR_DST_FACTOR_RGB(FUNC_NAME, FACTOR_NAME)																					\
@@ -354,20 +355,20 @@
 
 void FragmentProcessor::executeBlendFactorComputeA (const Vec4& blendColor, const BlendState& blendAState)
 {
-#define SAMPLE_REGISTER_BLEND_FACTOR(FACTOR_NAME, FACTOR_EXPRESSION)								\
-	for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)					\
-	{																								\
-		if (m_sampleRegister[regSampleNdx].isAlive)													\
-		{																							\
-			const Vec4& src		= m_sampleRegister[regSampleNdx].clampedBlendSrcColor;				\
-			const Vec4& src1	= m_sampleRegister[regSampleNdx].clampedBlendSrc1Color;				\
-			const Vec4& dst		= m_sampleRegister[regSampleNdx].clampedBlendDstColor;				\
-			DE_UNREF(src);																			\
-			DE_UNREF(src1);																			\
-			DE_UNREF(dst);																			\
-																									\
-			m_sampleRegister[regSampleNdx].FACTOR_NAME = clamp((FACTOR_EXPRESSION), 0.0f, 1.0f);	\
-		}																							\
+#define SAMPLE_REGISTER_BLEND_FACTOR(FACTOR_NAME, FACTOR_EXPRESSION)														\
+	for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)											\
+	{																														\
+		if (m_sampleRegister[regSampleNdx].isAlive)																			\
+		{																													\
+			const Vec4& src		= m_sampleRegister[regSampleNdx].clampedBlendSrcColor;										\
+			const Vec4& src1	= m_sampleRegister[regSampleNdx].clampedBlendSrc1Color;										\
+			const Vec4& dst		= m_sampleRegister[regSampleNdx].clampedBlendDstColor;										\
+			DE_UNREF(src);																									\
+			DE_UNREF(src1);																									\
+			DE_UNREF(dst);																									\
+																															\
+			m_sampleRegister[regSampleNdx].FACTOR_NAME = (FACTOR_EXPRESSION);												\
+		}																													\
 	}
 
 #define SWITCH_SRC_OR_DST_FACTOR_A(FUNC_NAME, FACTOR_NAME)																		\
@@ -843,6 +844,28 @@
 		switch (fragmentDataType)
 		{
 			case rr::GENERICVECTYPE_FLOAT:
+			{
+				// Select min/max clamping values for blending factors and operands
+				Vec4 minClampValue;
+				Vec4 maxClampValue;
+
+				if (colorbufferClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT)
+				{
+					minClampValue = Vec4(0.0f);
+					maxClampValue = Vec4(1.0f);
+				}
+				else if (colorbufferClass == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT)
+				{
+					minClampValue = Vec4(-1.0f);
+					maxClampValue = Vec4(1.0f);
+				}
+				else
+				{
+					// No clamping
+					minClampValue = Vec4(-std::numeric_limits<float>::infinity());
+					maxClampValue = Vec4(std::numeric_limits<float>::infinity());
+				}
+
 				// Blend calculation - only if using blend.
 				if (state.blendMode == BLENDMODE_STANDARD)
 				{
@@ -855,9 +878,9 @@
 							const Fragment&		frag			= inputFragments[groupFirstFragNdx + regSampleNdx/numSamplesPerFragment];
 							Vec4				dstColor		= colorBuffer.getPixel(fragSampleNdx, frag.pixelCoord.x(), frag.pixelCoord.y());
 
-							m_sampleRegister[regSampleNdx].clampedBlendSrcColor		= clamp(frag.value.get<float>(), Vec4(0.0f), Vec4(1.0f));
-							m_sampleRegister[regSampleNdx].clampedBlendSrc1Color	= clamp(frag.value1.get<float>(), Vec4(0.0f), Vec4(1.0f));
-							m_sampleRegister[regSampleNdx].clampedBlendDstColor		= clamp(sRGBTarget ? tcu::sRGBToLinear(dstColor) : dstColor, Vec4(0.0f), Vec4(1.0f));
+							m_sampleRegister[regSampleNdx].clampedBlendSrcColor		= clamp(frag.value.get<float>(), minClampValue, maxClampValue);
+							m_sampleRegister[regSampleNdx].clampedBlendSrc1Color	= clamp(frag.value1.get<float>(), minClampValue, maxClampValue);
+							m_sampleRegister[regSampleNdx].clampedBlendDstColor		= clamp(sRGBTarget ? tcu::sRGBToLinear(dstColor) : dstColor, minClampValue, maxClampValue);
 						}
 					}
 
@@ -881,8 +904,8 @@
 							const Vec4			srcColor		= frag.value.get<float>();
 							const Vec4			dstColor		= colorBuffer.getPixel(fragSampleNdx, frag.pixelCoord.x(), frag.pixelCoord.y());
 
-							m_sampleRegister[regSampleNdx].clampedBlendSrcColor		= unpremultiply(clamp(srcColor, Vec4(0.0f), Vec4(1.0f)));
-							m_sampleRegister[regSampleNdx].clampedBlendDstColor		= unpremultiply(clamp(sRGBTarget ? tcu::sRGBToLinear(dstColor) : dstColor, Vec4(0.0f), Vec4(1.0f)));
+							m_sampleRegister[regSampleNdx].clampedBlendSrcColor		= unpremultiply(clamp(srcColor, minClampValue, maxClampValue));
+							m_sampleRegister[regSampleNdx].clampedBlendDstColor		= unpremultiply(clamp(sRGBTarget ? tcu::sRGBToLinear(dstColor) : dstColor, minClampValue, maxClampValue));
 						}
 					}
 
@@ -905,6 +928,19 @@
 					}
 				}
 
+				// Clamp result values in sample register
+				if (colorbufferClass != tcu::TEXTURECHANNELCLASS_FLOATING_POINT)
+				{
+					for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)
+					{
+						if (m_sampleRegister[regSampleNdx].isAlive)
+						{
+							m_sampleRegister[regSampleNdx].blendedRGB	= clamp(m_sampleRegister[regSampleNdx].blendedRGB, minClampValue.swizzle(0, 1, 2), maxClampValue.swizzle(0, 1, 2));
+							m_sampleRegister[regSampleNdx].blendedA		= clamp(m_sampleRegister[regSampleNdx].blendedA, minClampValue.w(), maxClampValue.w());
+						}
+					}
+				}
+
 				// Finally, write the colors to the color buffer.
 
 				if (state.colorMask[0] && state.colorMask[1] && state.colorMask[2] && state.colorMask[3])
@@ -917,7 +953,7 @@
 				else if (state.colorMask[0] || state.colorMask[1] || state.colorMask[2] || state.colorMask[3])
 					executeMaskedColorWrite(groupFirstFragNdx, numSamplesPerFragment, inputFragments, colorMaskFactor, colorMaskNegationFactor, sRGBTarget, colorBuffer);
 				break;
-
+			}
 			case rr::GENERICVECTYPE_INT32:
 				// Write fragments
 				for (int regSampleNdx = 0; regSampleNdx < SAMPLE_REGISTER_SIZE; regSampleNdx++)
diff --git a/modules/egl/teglAndroidUtil.cpp b/modules/egl/teglAndroidUtil.cpp
index 64cbe9b..e32c18a 100644
--- a/modules/egl/teglAndroidUtil.cpp
+++ b/modules/egl/teglAndroidUtil.cpp
@@ -101,7 +101,8 @@
 class AndroidNativeImageSource : public ImageSource
 {
 public:
-							AndroidNativeImageSource	(GLenum format) : m_format(format) {}
+							AndroidNativeImageSource	(GLenum format) : m_format(format), m_libui(DE_NULL) {}
+							~AndroidNativeImageSource	(void);
 	MovePtr<ClientBuffer>	createBuffer 				(const glw::Functions&, Texture2D*) const;
 	string					getRequiredExtension		(void) const { return "EGL_ANDROID_image_native_buffer"; }
 	EGLImageKHR				createImage					(const Library& egl, EGLDisplay dpy, EGLContext ctx, EGLClientBuffer clientBuffer) const;
@@ -109,9 +110,26 @@
 
 protected:
 	GLenum					m_format;
-	LibUI					m_libui;
+
+	const LibUI&			getLibUI					(void) const;
+
+private:
+	mutable LibUI*			m_libui;
 };
 
+AndroidNativeImageSource::~AndroidNativeImageSource (void)
+{
+	delete m_libui;
+}
+
+const LibUI& AndroidNativeImageSource::getLibUI (void) const
+{
+	if (!m_libui)
+		m_libui = new LibUI();
+
+	return *m_libui;
+}
+
 void checkStatus (status_t status)
 {
 	if (status != tcu::Android::internal::OK)
@@ -120,7 +138,7 @@
 
 MovePtr<ClientBuffer> AndroidNativeImageSource::createBuffer (const glw::Functions&, Texture2D* ref) const
 {
-	MovePtr<AndroidNativeClientBuffer>	buffer			(new AndroidNativeClientBuffer(m_libui, m_format));
+	MovePtr<AndroidNativeClientBuffer>	buffer			(new AndroidNativeClientBuffer(getLibUI(), m_format));
 	GraphicBuffer&						graphicBuffer	= buffer->getGraphicBuffer();
 	if (ref != DE_NULL)
 	{
@@ -157,7 +175,7 @@
 	{
 		return MovePtr<ImageSource>(new AndroidNativeImageSource(format));
 	}
-	catch (std::runtime_error& exc)
+	catch (const std::runtime_error& exc)
 	{
 		return createUnsupportedImageSource(string("Android native buffers unsupported: ") + exc.what(), format);
 	}
diff --git a/modules/egl/teglCreateContextExtTests.cpp b/modules/egl/teglCreateContextExtTests.cpp
index 5253926..5153b41 100644
--- a/modules/egl/teglCreateContextExtTests.cpp
+++ b/modules/egl/teglCreateContextExtTests.cpp
@@ -369,7 +369,7 @@
 {
 	bool			isOk = true;
 	set<string>		requiredExtensions;
-	vector<string>	extensions			= eglu::getClientExtensions(m_eglTestCtx.getLibrary(), m_display);
+	vector<string>	extensions			= eglu::getDisplayExtensions(m_eglTestCtx.getLibrary(), m_display);
 
 	{
 		const EGLint* iter = &(m_attribList[0]);
diff --git a/modules/egl/teglCreateSurfaceTests.cpp b/modules/egl/teglCreateSurfaceTests.cpp
index 49c2381..712d06d 100644
--- a/modules/egl/teglCreateSurfaceTests.cpp
+++ b/modules/egl/teglCreateSurfaceTests.cpp
@@ -56,7 +56,7 @@
 
 void checkEGLPlatformSupport (const Library& egl, const char* platformExt)
 {
-	std::vector<std::string> extensions = eglu::getPlatformExtensions(egl);
+	std::vector<std::string> extensions = eglu::getClientExtensions(egl);
 
 	if (!de::contains(extensions.begin(), extensions.end(), platformExt))
 		throw tcu::NotSupportedError((std::string("Platform extension '") + platformExt + "' not supported").c_str(), "", __FILE__, __LINE__);
diff --git a/modules/egl/teglGLES2SharedRenderingPerfTests.cpp b/modules/egl/teglGLES2SharedRenderingPerfTests.cpp
index 5e2dcc9..d5e2312 100644
--- a/modules/egl/teglGLES2SharedRenderingPerfTests.cpp
+++ b/modules/egl/teglGLES2SharedRenderingPerfTests.cpp
@@ -495,7 +495,7 @@
 		|| m_config.textureType == TestConfig::TEXTURETYPE_SHARED_IMAGE
 		|| m_config.textureType == TestConfig::TEXTURETYPE_SHARED_IMAGE_TEXTURE)
 	{
-		const vector<string> extensions = eglu::getClientExtensions(egl, m_eglDisplay);
+		const vector<string> extensions = eglu::getDisplayExtensions(egl, m_eglDisplay);
 
 		if (!de::contains(extensions.begin(), extensions.end(), "EGL_KHR_image_base") ||
 			!de::contains(extensions.begin(), extensions.end(), "EGL_KHR_gl_texture_2D_image"))
diff --git a/modules/egl/teglGetProcAddressTests.cpp b/modules/egl/teglGetProcAddressTests.cpp
index 7fd18c9..4c620ca 100644
--- a/modules/egl/teglGetProcAddressTests.cpp
+++ b/modules/egl/teglGetProcAddressTests.cpp
@@ -124,10 +124,25 @@
 
 void GetProcAddressCase::init (void)
 {
+	try
+	{
+		m_supported = eglu::getClientExtensions(m_eglTestCtx.getLibrary());
+	}
+	catch (const eglu::Error& error)
+	{
+		// EGL_BAD_DISPLAY is generated if client extensions are not supported.
+		if (error.getError() != EGL_BAD_DISPLAY)
+			throw;
+	}
+
 	DE_ASSERT(m_display == EGL_NO_DISPLAY);
 
-	m_display	= eglu::getAndInitDisplay(m_eglTestCtx.getNativeDisplay());
-	m_supported	= eglu::getClientExtensions(m_eglTestCtx.getLibrary(), m_display);
+	m_display = eglu::getAndInitDisplay(m_eglTestCtx.getNativeDisplay());
+
+	{
+		const std::vector<std::string> displayExtensios = eglu::getDisplayExtensions(m_eglTestCtx.getLibrary(), m_display);
+		m_supported.insert(m_supported.end(), displayExtensios.begin(), displayExtensios.end());
+	}
 
 	m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
 }
diff --git a/modules/egl/teglImageFormatTests.cpp b/modules/egl/teglImageFormatTests.cpp
index e7cf0f4..2d46751 100644
--- a/modules/egl/teglImageFormatTests.cpp
+++ b/modules/egl/teglImageFormatTests.cpp
@@ -923,7 +923,7 @@
 	const EGLDisplay		dpy		= m_display;
 	set<string>				exts;
 	const vector<string>	glExts	= de::splitString((const char*) m_gl.getString(GL_EXTENSIONS));
-	const vector<string>	eglExts	= eglu::getClientExtensions(egl, dpy);
+	const vector<string>	eglExts	= eglu::getDisplayExtensions(egl, dpy);
 
 	exts.insert(glExts.begin(), glExts.end());
 	exts.insert(eglExts.begin(), eglExts.end());
diff --git a/modules/egl/teglInfoTests.cpp b/modules/egl/teglInfoTests.cpp
index 8ed6daf..44fd58e 100644
--- a/modules/egl/teglInfoTests.cpp
+++ b/modules/egl/teglInfoTests.cpp
@@ -149,7 +149,7 @@
 	IterateResult iterate (void)
 	{
 		const Library&	egl			= m_eglTestCtx.getLibrary();
-		vector<string>	extensions	= eglu::getClientExtensions(egl, m_display);
+		vector<string>	extensions	= eglu::getDisplayExtensions(egl, m_display);
 
 		for (vector<string>::const_iterator i = extensions.begin(); i != extensions.end(); i++)
 			m_testCtx.getLog() << tcu::TestLog::Message << *i << tcu::TestLog::EndMessage;
diff --git a/modules/egl/teglNegativeApiTests.cpp b/modules/egl/teglNegativeApiTests.cpp
index c2b7ebc..9a80ff9 100644
--- a/modules/egl/teglNegativeApiTests.cpp
+++ b/modules/egl/teglNegativeApiTests.cpp
@@ -27,8 +27,11 @@
 #include "egluNativeDisplay.hpp"
 #include "egluNativeWindow.hpp"
 #include "egluUtil.hpp"
+#include "egluUtil.hpp"
 #include "egluUnique.hpp"
 
+#include "eglwLibrary.hpp"
+
 #include <memory>
 
 using tcu::TestLog;
@@ -181,27 +184,40 @@
 			log << TestLog::EndSection;
 		});
 
+	static const EGLint s_validGenericPbufferAttrib[] = { EGL_WIDTH, 64, EGL_HEIGHT, 64, EGL_NONE };
+
 	TEGL_ADD_API_CASE(copy_buffers, "eglCopyBuffers() negative tests",
 		{
-			TestLog&	log			= m_testCtx.getLog();
-			EGLDisplay	display		= getDisplay();
+			TestLog&							log				= m_testCtx.getLog();
+			const eglw::Library&				egl				= m_eglTestCtx.getLibrary();
+			EGLDisplay							display			= getDisplay();
+			const eglu::NativePixmapFactory&	factory			= eglu::selectNativePixmapFactory(m_eglTestCtx.getNativeDisplayFactory(), m_testCtx.getCommandLine());
+			de::UniquePtr<eglu::NativePixmap>	pixmap			(factory.createPixmap(&m_eglTestCtx.getNativeDisplay(), 64, 64));
+			EGLConfig							config;
 
-			log << TestLog::Section("Test1", "EGL_BAD_DISPLAY is generated if display is not an EGL display connection");
+			{
+				if (getConfig(&config, FilterList() << surfaceBits<EGL_PBUFFER_BIT>))
+				{
+					eglu::UniqueSurface	surface	(egl, display, egl.createPbufferSurface(display, config, s_validGenericPbufferAttrib));
 
-			expectFalse(eglCopyBuffers(EGL_NO_DISPLAY, EGL_NO_SURFACE, (EGLNativePixmapType)0));
-			expectError(EGL_BAD_DISPLAY);
+					log << TestLog::Section("Test1", "EGL_BAD_DISPLAY is generated if display is not an EGL display connection");
 
-			expectFalse(eglCopyBuffers((EGLDisplay)-1, EGL_NO_SURFACE, (EGLNativePixmapType)0));
-			expectError(EGL_BAD_DISPLAY);
+					expectFalse(eglCopyBuffers(EGL_NO_DISPLAY, EGL_NO_SURFACE, pixmap->getLegacyNative()));
+					expectError(EGL_BAD_DISPLAY);
 
-			log << TestLog::EndSection;
+					expectFalse(eglCopyBuffers((EGLDisplay)-1, EGL_NO_SURFACE, pixmap->getLegacyNative()));
+					expectError(EGL_BAD_DISPLAY);
+
+					log << TestLog::EndSection;
+				}
+			}
 
 			log << TestLog::Section("Test2", "EGL_BAD_SURFACE is generated if surface is not an EGL surface");
 
-			expectFalse(eglCopyBuffers(display, EGL_NO_SURFACE, (EGLNativePixmapType)0));
+			expectFalse(eglCopyBuffers(display, EGL_NO_SURFACE, pixmap->getLegacyNative()));
 			expectError(EGL_BAD_SURFACE);
 
-			expectFalse(eglCopyBuffers(display, (EGLSurface)-1, (EGLNativePixmapType)0));
+			expectFalse(eglCopyBuffers(display, (EGLSurface)-1, pixmap->getLegacyNative()));
 			expectError(EGL_BAD_SURFACE);
 
 			log << TestLog::EndSection;
@@ -212,9 +228,8 @@
 	static const EGLint s_invalidChooseConfigAttribList2[]	= { EGL_BIND_TO_TEXTURE_RGB, 4, EGL_NONE };
 	static const EGLint s_invalidChooseConfigAttribList3[]	= { EGL_BIND_TO_TEXTURE_RGBA, 5, EGL_NONE };
 	static const EGLint s_invalidChooseConfigAttribList4[]	= { EGL_COLOR_BUFFER_TYPE, 0, EGL_NONE };
-	static const EGLint s_invalidChooseConfigAttribList5[]	= { EGL_MATCH_NATIVE_PIXMAP, -1, EGL_NONE };
-	static const EGLint s_invalidChooseConfigAttribList6[]	= { EGL_NATIVE_RENDERABLE, 6, EGL_NONE };
-	static const EGLint s_invalidChooseConfigAttribList7[]	= { EGL_TRANSPARENT_TYPE, 6, EGL_NONE };
+	static const EGLint s_invalidChooseConfigAttribList5[]	= { EGL_NATIVE_RENDERABLE, 6, EGL_NONE };
+	static const EGLint s_invalidChooseConfigAttribList6[]	= { EGL_TRANSPARENT_TYPE, 6, EGL_NONE };
 	static const EGLint* s_invalidChooseConfigAttribLists[] =
 	{
 		&s_invalidChooseConfigAttribList0[0],
@@ -223,8 +238,7 @@
 		&s_invalidChooseConfigAttribList3[0],
 		&s_invalidChooseConfigAttribList4[0],
 		&s_invalidChooseConfigAttribList5[0],
-		&s_invalidChooseConfigAttribList6[0],
-		&s_invalidChooseConfigAttribList7[0]
+		&s_invalidChooseConfigAttribList6[0]
 	};
 
 	TEGL_ADD_API_CASE(choose_config, "eglChooseConfig() negative tests",
@@ -456,28 +470,29 @@
 
 			log << TestLog::EndSection;
 
-			log << TestLog::Section("Test2", "EGL_BAD_CONFIG or EGL_BAD_PARAMETER is generated if config is not an EGL frame buffer configuration and if buffer is not valid OpenVG image");
+			if (isAPISupported(EGL_OPENVG_API))
+			{
+				log << TestLog::Section("Test2", "EGL_BAD_CONFIG or EGL_BAD_PARAMETER is generated if config is not an EGL frame buffer configuration and if buffer is not valid OpenVG image");
 
-			expectNoSurface(eglCreatePbufferFromClientBuffer(display, EGL_OPENVG_IMAGE, (EGLClientBuffer)-1, (EGLConfig)-1, DE_NULL));
-			expectEitherError(EGL_BAD_CONFIG, EGL_BAD_PARAMETER);
+				expectNoSurface(eglCreatePbufferFromClientBuffer(display, EGL_OPENVG_IMAGE, (EGLClientBuffer)-1, (EGLConfig)-1, DE_NULL));
+				expectEitherError(EGL_BAD_CONFIG, EGL_BAD_PARAMETER);
 
-			log << TestLog::EndSection;
+				log << TestLog::EndSection;
 
-			log << TestLog::Section("Test3", "EGL_BAD_PARAMETER is generated if buftype is not EGL_OPENVG_IMAGE");
+				log << TestLog::Section("Test3", "EGL_BAD_PARAMETER is generated if buftype is not EGL_OPENVG_IMAGE");
 
-			log << TestLog::EndSection;
+				expectTrue(eglGetConfigs(display, &anyConfig, 1, &unused));
 
-			expectTrue(eglGetConfigs(display, &anyConfig, 1, &unused));
+				log << TestLog::EndSection;
 
-			log << TestLog::Section("Test4", "EGL_BAD_PARAMETER is generated if buffer is not valid OpenVG image");
-			expectNoSurface(eglCreatePbufferFromClientBuffer(display, EGL_OPENVG_IMAGE, (EGLClientBuffer)-1, anyConfig, DE_NULL));
-			expectError(EGL_BAD_PARAMETER);
+				log << TestLog::Section("Test4", "EGL_BAD_PARAMETER is generated if buffer is not valid OpenVG image");
+				expectNoSurface(eglCreatePbufferFromClientBuffer(display, EGL_OPENVG_IMAGE, (EGLClientBuffer)-1, anyConfig, DE_NULL));
+				expectError(EGL_BAD_PARAMETER);
 
-			log << TestLog::EndSection;
+				log << TestLog::EndSection;
+			}
 		});
 
-	static const EGLint s_validGenericPbufferAttrib[] = { EGL_WIDTH, 64, EGL_HEIGHT, 64, EGL_NONE };
-
 	static const EGLint s_invalidGenericPbufferAttrib0[] = { 0, EGL_NONE };
 	static const EGLint s_invalidGenericPbufferAttrib1[] = { (EGLint)0xffffffff };
 	static const EGLint s_negativeWidthPbufferAttrib[] = { EGL_WIDTH, -1, EGL_HEIGHT, 64, EGL_NONE };
@@ -646,41 +661,30 @@
 			expectError(EGL_BAD_CONFIG);
 
 			log << TestLog::EndSection;
-
-			log << TestLog::Section("Test3", "EGL_BAD_NATIVE_PIXMAP may be generated if native_pixmap is not a valid native pixmap");
-
-			// Any pixmap-capable config.
-			EGLConfig pixmapConfig;
-			if (getConfig(&pixmapConfig, FilterList() << surfaceBits<EGL_PIXMAP_BIT>))
-			{
-				expectNoSurface(eglCreatePixmapSurface(display, pixmapConfig, DE_NULL, s_emptyAttribList));
-				expectError(EGL_BAD_NATIVE_PIXMAP);
-			}
-
-			log << TestLog::EndSection;
 		});
 
 	TEGL_ADD_API_CASE(create_window_surface, "eglCreateWindowSurface() negative tests",
 		{
-			TestLog&	log			= m_testCtx.getLog();
-			EGLDisplay	display		= getDisplay();
+			EGLConfig				config			= DE_NULL;
+			bool					gotConfig		= getConfig(&config, FilterList() << renderable<EGL_OPENGL_ES2_BIT> << surfaceBits<EGL_WINDOW_BIT>);
 
-			log << TestLog::Section("Test1", "EGL_BAD_DISPLAY is generated if display is not an EGL display connection");
+			if (gotConfig)
+			{
+				TestLog&							log				= m_testCtx.getLog();
+				EGLDisplay							display			= getDisplay();
+				const eglu::NativeWindowFactory&	factory			= eglu::selectNativeWindowFactory(m_eglTestCtx.getNativeDisplayFactory(), m_testCtx.getCommandLine());
+				de::UniquePtr<eglu::NativeWindow>	window			(factory.createWindow(&m_eglTestCtx.getNativeDisplay(), display, config, DE_NULL, eglu::WindowParams(256, 256, eglu::parseWindowVisibility(m_testCtx.getCommandLine()))));
 
-			expectNoSurface(eglCreateWindowSurface(EGL_NO_DISPLAY, DE_NULL, DE_NULL, s_emptyAttribList));
-			expectError(EGL_BAD_DISPLAY);
+				log << TestLog::Section("Test1", "EGL_BAD_DISPLAY is generated if display is not an EGL display connection");
 
-			expectNoSurface(eglCreateWindowSurface((EGLDisplay)-1, DE_NULL, DE_NULL, s_emptyAttribList));
-			expectError(EGL_BAD_DISPLAY);
+				expectNoSurface(eglCreateWindowSurface(EGL_NO_DISPLAY, config, window->getLegacyNative(), s_emptyAttribList));
+				expectError(EGL_BAD_DISPLAY);
 
-			log << TestLog::EndSection;
+				expectNoSurface(eglCreateWindowSurface((EGLDisplay)-1, config, window->getLegacyNative(), s_emptyAttribList));
+				expectError(EGL_BAD_DISPLAY);
 
-			log << TestLog::Section("Test2", "EGL_BAD_CONFIG is generated if config is not an EGL frame buffer configuration");
-
-			expectNoSurface(eglCreateWindowSurface(display, (EGLConfig)-1, DE_NULL, s_emptyAttribList));
-			expectError(EGL_BAD_CONFIG);
-
-			log << TestLog::EndSection;
+				log << TestLog::EndSection;
+			}
 		});
 
 	TEGL_ADD_API_CASE(destroy_context, "eglDestroyContext() negative tests",
@@ -801,12 +805,6 @@
 			log << TestLog::EndSection;
 		});
 
-	TEGL_ADD_API_CASE(get_display, "eglGetDisplay() negative tests",
-		{
-			expectNoDisplay(eglGetDisplay((EGLNativeDisplayType)-1));
-			expectError(EGL_SUCCESS);
-		});
-
 	TEGL_ADD_API_CASE(initialize, "eglInitialize() negative tests",
 		{
 			TestLog&	log			= m_testCtx.getLog();
diff --git a/modules/gles2/functional/es2fDitheringTests.cpp b/modules/gles2/functional/es2fDitheringTests.cpp
index b5d5239..faf9cb0 100644
--- a/modules/gles2/functional/es2fDitheringTests.cpp
+++ b/modules/gles2/functional/es2fDitheringTests.cpp
@@ -315,8 +315,9 @@
 
 	if (!m_ditheringEnabled)
 	{
-		const int increasingDirectionSize	= isVerticallyIncreasing ? renderedImg.getHeight() : renderedImg.getWidth();
-		const int constantDirectionSize		= isVerticallyIncreasing ? renderedImg.getWidth() : renderedImg.getHeight();
+		const int	increasingDirectionSize	= isVerticallyIncreasing ? renderedImg.getHeight() : renderedImg.getWidth();
+		const int	constantDirectionSize	= isVerticallyIncreasing ? renderedImg.getWidth() : renderedImg.getHeight();
+		bool		colorHasChanged			= false;
 
 		for (int incrPos = 0; incrPos < increasingDirectionSize; incrPos++)
 		{
@@ -329,13 +330,20 @@
 
 				if (constPos > 0 && clr != prevConstantDirectionPix)
 				{
-					log << TestLog::Message
-						<< "Failure: colors should be constant per " << (isVerticallyIncreasing ? "row" : "column")
-						<< " (since dithering is disabled), but the color at position (" << x << ", " << y << ") is " << clr
-						<< " and does not equal the color at (" << (isVerticallyIncreasing ? x-1 : x) << ", " << (isVerticallyIncreasing ? y : y-1) << "), which is " << prevConstantDirectionPix
-						<< TestLog::EndMessage;
+					// Allow color to change once to take into account possibly
+					// discontinuity between triangles
+					if (colorHasChanged)
+					{
+						log << TestLog::Message
+							<< "Failure: colors should be constant per " << (isVerticallyIncreasing ? "row" : "column")
+							<< " (since dithering is disabled), but the color at position (" << x << ", " << y << ") is " << clr
+							<< " and does not equal the color at (" << (isVerticallyIncreasing ? x-1 : x) << ", " << (isVerticallyIncreasing ? y : y-1) << "), which is " << prevConstantDirectionPix
+							<< TestLog::EndMessage;
 
-					return false;
+						return false;
+					}
+					else
+						colorHasChanged = true;
 				}
 
 				prevConstantDirectionPix = clr;
diff --git a/modules/gles2/functional/es2fFboRenderTest.cpp b/modules/gles2/functional/es2fFboRenderTest.cpp
index f573947..c517d68 100644
--- a/modules/gles2/functional/es2fFboRenderTest.cpp
+++ b/modules/gles2/functional/es2fFboRenderTest.cpp
@@ -625,7 +625,7 @@
 	}
 
 	// Compare images
-	const float		threshold	= 0.02f;
+	const float		threshold	= 0.03f;
 	bool			imagesOk	= tcu::fuzzyCompare(log, "ComparisonResult", "Image comparison result", refFrame, gles2Frame, threshold, tcu::COMPARE_LOG_RESULT);
 
 	if (!imagesOk && !failReason)
diff --git a/modules/gles2/functional/es2fShaderLoopTests.cpp b/modules/gles2/functional/es2fShaderLoopTests.cpp
index db80378..ccfe9f3 100644
--- a/modules/gles2/functional/es2fShaderLoopTests.cpp
+++ b/modules/gles2/functional/es2fShaderLoopTests.cpp
@@ -30,6 +30,7 @@
  *//*--------------------------------------------------------------------*/
 
 #include "es2fShaderLoopTests.hpp"
+#include "glsShaderLibrary.hpp"
 #include "glsShaderRenderCase.hpp"
 #include "gluShaderUtil.hpp"
 #include "tcuStringTemplate.hpp"
@@ -1336,6 +1337,11 @@
 			}
 		}
 	}
+
+	// Additional smaller handwritten tests.
+	const std::vector<tcu::TestNode*> children = gls::ShaderLibrary(m_context.getTestContext(), m_context.getRenderContext(), m_context.getContextInfo()).loadShaderFile("shaders/loops.test");
+	for (int i = 0; i < (int)children.size(); i++)
+		addChild(children[i]);
 }
 
 } // Functional
diff --git a/modules/gles3/functional/es3fASTCDecompressionCases.cpp b/modules/gles3/functional/es3fASTCDecompressionCases.cpp
index cc652ab..bef75bc 100644
--- a/modules/gles3/functional/es3fASTCDecompressionCases.cpp
+++ b/modules/gles3/functional/es3fASTCDecompressionCases.cpp
@@ -68,6 +68,7 @@
 using tcu::Vec4;
 using tcu::Sampler;
 using tcu::Surface;
+using tcu::astc::BlockTestType;
 using std::vector;
 using std::string;
 
@@ -86,1395 +87,6 @@
 namespace ASTCDecompressionCaseInternal
 {
 
-static const int ASTC_BLOCK_SIZE_BYTES = 128/8;
-
-static inline int divRoundUp (int a, int b)
-{
-	return a/b + ((a%b) ? 1 : 0);
-}
-
-namespace ASTCBlockGeneratorInternal
-{
-
-static inline deUint32 reverseBits (deUint32 src, int numBits)
-{
-	DE_ASSERT(de::inRange(numBits, 0, 32));
-	deUint32 result = 0;
-	for (int i = 0; i < numBits; i++)
-		result |= ((src >> i) & 1) << (numBits-1-i);
-	return result;
-}
-
-static inline deUint32 getBit (deUint32 src, int ndx)
-{
-	DE_ASSERT(de::inBounds(ndx, 0, 32));
-	return (src >> ndx) & 1;
-}
-
-static inline deUint32 getBits (deUint32 src, int low, int high)
-{
-	const int numBits = (high-low) + 1;
-	if (numBits == 0)
-		return 0;
-	DE_ASSERT(de::inRange(numBits, 1, 32));
-	return (src >> low) & ((1u<<numBits)-1);
-}
-
-#if defined(DE_DEBUG)
-static inline bool isFloat16InfOrNan (deFloat16 v)
-{
-	return getBits(v, 10, 14) == 31;
-}
-#endif
-
-template <typename T, typename Y>
-struct isSameType			{ enum { V = 0 }; };
-template <typename T>
-struct isSameType<T, T>		{ enum { V = 1 }; };
-
-// Helper class for setting bits in a 128-bit block.
-class AssignBlock128
-{
-private:
-	typedef deUint64 Word;
-
-	enum
-	{
-		WORD_BYTES	= sizeof(Word),
-		WORD_BITS	= 8*WORD_BYTES,
-		NUM_WORDS	= 128 / WORD_BITS
-	};
-
-	DE_STATIC_ASSERT(128 % WORD_BITS == 0);
-
-public:
-	AssignBlock128 (void)
-	{
-		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
-			m_words[wordNdx] = 0;
-	}
-
-	void setBit (int ndx, deUint32 val)
-	{
-		DE_ASSERT(de::inBounds(ndx, 0, 128));
-		DE_ASSERT((val & 1) == val);
-		const int wordNdx	= ndx / WORD_BITS;
-		const int bitNdx	= ndx % WORD_BITS;
-		m_words[wordNdx] = (m_words[wordNdx] & ~((Word)1 << bitNdx)) | ((Word)val << bitNdx);
-	}
-
-	void setBits (int low, int high, deUint32 bits)
-	{
-		DE_ASSERT(de::inBounds(low, 0, 128));
-		DE_ASSERT(de::inBounds(high, 0, 128));
-		DE_ASSERT(de::inRange(high-low+1, 0, 32));
-		DE_ASSERT((bits & (((Word)1 << (high-low+1)) - 1)) == bits);
-
-		if (high-low+1 == 0)
-			return;
-
-		const int word0Ndx		= low / WORD_BITS;
-		const int word1Ndx		= high / WORD_BITS;
-		const int lowNdxInW0	= low % WORD_BITS;
-
-		if (word0Ndx == word1Ndx)
-			m_words[word0Ndx] = (m_words[word0Ndx] & ~((((Word)1 << (high-low+1)) - 1) << lowNdxInW0)) | ((Word)bits << lowNdxInW0);
-		else
-		{
-			DE_ASSERT(word1Ndx == word0Ndx + 1);
-
-			const int	highNdxInW1			= high % WORD_BITS;
-			const int	numBitsToSetInW0	= WORD_BITS - lowNdxInW0;
-			const Word	bitsLowMask			= ((Word)1 << numBitsToSetInW0) - 1;
-
-			m_words[word0Ndx] = (m_words[word0Ndx] & (((Word)1 << lowNdxInW0) - 1))			| (((Word)bits & bitsLowMask) << lowNdxInW0);
-			m_words[word1Ndx] = (m_words[word1Ndx] & ~(((Word)1 << (highNdxInW1+1)) - 1))	| (((Word)bits & ~bitsLowMask) >> numBitsToSetInW0);
-		}
-	}
-
-	void assignToMemory (deUint8* dst) const
-	{
-		for (int wordNdx = 0; wordNdx < NUM_WORDS; wordNdx++)
-		{
-			for (int byteNdx = 0; byteNdx < WORD_BYTES; byteNdx++)
-				dst[wordNdx*WORD_BYTES + byteNdx] = (deUint8)((m_words[wordNdx] >> (8*byteNdx)) & 0xff);
-		}
-	}
-
-	void pushBytesToVector (vector<deUint8>& dst) const
-	{
-		const int assignStartIndex = (int)dst.size();
-		dst.resize(dst.size() + ASTC_BLOCK_SIZE_BYTES);
-		assignToMemory(&dst[assignStartIndex]);
-	}
-
-private:
-	Word m_words[NUM_WORDS];
-};
-
-// A helper for sequential access into a AssignBlock128.
-class BitAssignAccessStream
-{
-public:
-	BitAssignAccessStream (AssignBlock128& dst, int startNdxInSrc, int length, bool forward)
-		: m_dst				(dst)
-		, m_startNdxInSrc	(startNdxInSrc)
-		, m_length			(length)
-		, m_forward			(forward)
-		, m_ndx				(0)
-	{
-	}
-
-	// Set the next num bits. Bits at positions greater than or equal to m_length are not touched.
-	void setNext (int num, deUint32 bits)
-	{
-		DE_ASSERT((bits & (((deUint64)1 << num) - 1)) == bits);
-
-		if (num == 0 || m_ndx >= m_length)
-			return;
-
-		const int		end				= m_ndx + num;
-		const int		numBitsToDst	= de::max(0, de::min(m_length, end) - m_ndx);
-		const int		low				= m_ndx;
-		const int		high			= m_ndx + numBitsToDst - 1;
-		const deUint32	actualBits		= getBits(bits, 0, numBitsToDst-1);
-
-		m_ndx += num;
-
-		return m_forward ? m_dst.setBits(m_startNdxInSrc + low,  m_startNdxInSrc + high, actualBits)
-						 : m_dst.setBits(m_startNdxInSrc - high, m_startNdxInSrc - low, reverseBits(actualBits, numBitsToDst));
-	}
-
-private:
-	AssignBlock128&		m_dst;
-	const int			m_startNdxInSrc;
-	const int			m_length;
-	const bool			m_forward;
-
-	int					m_ndx;
-};
-
-struct VoidExtentParams
-{
-	DE_STATIC_ASSERT((isSameType<deFloat16, deUint16>::V));
-	bool		isHDR;
-	deUint16	r;
-	deUint16	g;
-	deUint16	b;
-	deUint16	a;
-	// \note Currently extent coordinates are all set to all-ones.
-
-	VoidExtentParams (bool isHDR_, deUint16 r_, deUint16 g_, deUint16 b_, deUint16 a_) : isHDR(isHDR_), r(r_), g(g_), b(b_), a(a_) {}
-};
-
-static AssignBlock128 generateVoidExtentBlock (const VoidExtentParams& params)
-{
-	AssignBlock128 block;
-
-	block.setBits(0, 8, 0x1fc); // \note Marks void-extent block.
-	block.setBit(9, params.isHDR);
-	block.setBits(10, 11, 3); // \note Spec shows that these bits are both set, although they serve no purpose.
-
-	// Extent coordinates - currently all-ones.
-	block.setBits(12, 24, 0x1fff);
-	block.setBits(25, 37, 0x1fff);
-	block.setBits(38, 50, 0x1fff);
-	block.setBits(51, 63, 0x1fff);
-
-	DE_ASSERT(!params.isHDR || (!isFloat16InfOrNan(params.r) &&
-								!isFloat16InfOrNan(params.g) &&
-								!isFloat16InfOrNan(params.b) &&
-								!isFloat16InfOrNan(params.a)));
-
-	block.setBits(64,  79,  params.r);
-	block.setBits(80,  95,  params.g);
-	block.setBits(96,  111, params.b);
-	block.setBits(112, 127, params.a);
-
-	return block;
-}
-
-enum ISEMode
-{
-	ISEMODE_TRIT = 0,
-	ISEMODE_QUINT,
-	ISEMODE_PLAIN_BIT,
-
-	ISEMODE_LAST
-};
-
-struct ISEParams
-{
-	ISEMode		mode;
-	int			numBits;
-
-	ISEParams (ISEMode mode_, int numBits_) : mode(mode_), numBits(numBits_) {}
-};
-
-// An input array of ISE inputs for an entire ASTC block. Can be given as either single values in the
-// range [0, maximumValueOfISERange] or as explicit block value specifications. The latter is needed
-// so we can test all possible values of T and Q in a block, since multiple T or Q values may map
-// to the same set of decoded values.
-struct ISEInput
-{
-	struct Block
-	{
-		deUint32 tOrQValue; //!< The 8-bit T or 7-bit Q in a trit or quint ISE block.
-		deUint32 bitValues[5];
-	};
-
-	bool isGivenInBlockForm;
-	union
-	{
-		//!< \note 64 comes from the maximum number of weight values in an ASTC block.
-		deUint32	plain[64];
-		Block		block[64];
-	} value;
-
-	ISEInput (void)
-		: isGivenInBlockForm (false)
-	{
-	}
-};
-
-static inline int computeNumRequiredBits (const ISEParams& iseParams, int numValues)
-{
-	switch (iseParams.mode)
-	{
-		case ISEMODE_TRIT:			return divRoundUp(numValues*8, 5) + numValues*iseParams.numBits;
-		case ISEMODE_QUINT:			return divRoundUp(numValues*7, 3) + numValues*iseParams.numBits;
-		case ISEMODE_PLAIN_BIT:		return numValues*iseParams.numBits;
-		default:
-			DE_ASSERT(false);
-			return -1;
-	}
-}
-
-static inline deUint32 computeISERangeMax (const ISEParams& iseParams)
-{
-	switch (iseParams.mode)
-	{
-		case ISEMODE_TRIT:			return (1u << iseParams.numBits) * 3 - 1;
-		case ISEMODE_QUINT:			return (1u << iseParams.numBits) * 5 - 1;
-		case ISEMODE_PLAIN_BIT:		return (1u << iseParams.numBits)     - 1;
-		default:
-			DE_ASSERT(false);
-			return -1;
-	}
-}
-
-struct NormalBlockParams
-{
-	int					weightGridWidth;
-	int					weightGridHeight;
-	ISEParams			weightISEParams;
-	bool				isDualPlane;
-	deUint32			ccs; //! \note Irrelevant if !isDualPlane.
-	int					numPartitions;
-	deUint32			colorEndpointModes[4];
-	// \note Below members are irrelevant if numPartitions == 1.
-	bool				isMultiPartSingleCemMode; //! \note If true, the single CEM is at colorEndpointModes[0].
-	deUint32			partitionSeed;
-
-	NormalBlockParams (void)
-		: weightGridWidth			(-1)
-		, weightGridHeight			(-1)
-		, weightISEParams			(ISEMODE_LAST, -1)
-		, isDualPlane				(true)
-		, ccs						((deUint32)-1)
-		, numPartitions				(-1)
-		, isMultiPartSingleCemMode	(false)
-		, partitionSeed				((deUint32)-1)
-	{
-		colorEndpointModes[0] = 0;
-		colorEndpointModes[1] = 0;
-		colorEndpointModes[2] = 0;
-		colorEndpointModes[3] = 0;
-	}
-};
-
-struct NormalBlockISEInputs
-{
-	ISEInput weight;
-	ISEInput endpoint;
-
-	NormalBlockISEInputs (void)
-		: weight	()
-		, endpoint	()
-	{
-	}
-};
-
-static inline int computeNumWeights (const NormalBlockParams& params)
-{
-	return params.weightGridWidth * params.weightGridHeight * (params.isDualPlane ? 2 : 1);
-}
-
-static inline int computeNumBitsForColorEndpoints (const NormalBlockParams& params)
-{
-	const int numWeightBits			= computeNumRequiredBits(params.weightISEParams, computeNumWeights(params));
-	const int numConfigDataBits		= (params.numPartitions == 1 ? 17 : params.isMultiPartSingleCemMode ? 29 : 25 + 3*params.numPartitions) +
-									  (params.isDualPlane ? 2 : 0);
-
-	return 128 - numWeightBits - numConfigDataBits;
-}
-
-static inline int computeNumColorEndpointValues (deUint32 endpointMode)
-{
-	DE_ASSERT(endpointMode < 16);
-	return (endpointMode/4 + 1) * 2;
-}
-
-static inline int computeNumColorEndpointValues (const deUint32* endpointModes, int numPartitions, bool isMultiPartSingleCemMode)
-{
-	if (isMultiPartSingleCemMode)
-		return numPartitions * computeNumColorEndpointValues(endpointModes[0]);
-	else
-	{
-		int result = 0;
-		for (int i = 0; i < numPartitions; i++)
-			result += computeNumColorEndpointValues(endpointModes[i]);
-		return result;
-	}
-}
-
-static inline bool isValidBlockParams (const NormalBlockParams& params, int blockWidth, int blockHeight)
-{
-	const int numWeights				= computeNumWeights(params);
-	const int numWeightBits				= computeNumRequiredBits(params.weightISEParams, numWeights);
-	const int numColorEndpointValues	= computeNumColorEndpointValues(&params.colorEndpointModes[0], params.numPartitions, params.isMultiPartSingleCemMode);
-	const int numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(params);
-
-	return numWeights <= 64										&&
-		   de::inRange(numWeightBits, 24, 96)					&&
-		   params.weightGridWidth <= blockWidth					&&
-		   params.weightGridHeight <= blockHeight				&&
-		   !(params.numPartitions == 4 && params.isDualPlane)	&&
-		   numColorEndpointValues <= 18							&&
-		   numBitsForColorEndpoints >= divRoundUp(13*numColorEndpointValues, 5);
-}
-
-// Write bits 0 to 10 of an ASTC block.
-static void writeBlockMode (AssignBlock128& dst, const NormalBlockParams& blockParams)
-{
-	const deUint32	d = blockParams.isDualPlane != 0;
-	// r and h initialized in switch below.
-	deUint32		r;
-	deUint32		h;
-	// a, b and blockModeLayoutNdx initialized in block mode layout index detecting loop below.
-	deUint32		a = (deUint32)-1;
-	deUint32		b = (deUint32)-1;
-	int				blockModeLayoutNdx;
-
-	// Find the values of r and h (ISE range).
-	switch (computeISERangeMax(blockParams.weightISEParams))
-	{
-		case 1:		r = 2; h = 0;	break;
-		case 2:		r = 3; h = 0;	break;
-		case 3:		r = 4; h = 0;	break;
-		case 4:		r = 5; h = 0;	break;
-		case 5:		r = 6; h = 0;	break;
-		case 7:		r = 7; h = 0;	break;
-
-		case 9:		r = 2; h = 1;	break;
-		case 11:	r = 3; h = 1;	break;
-		case 15:	r = 4; h = 1;	break;
-		case 19:	r = 5; h = 1;	break;
-		case 23:	r = 6; h = 1;	break;
-		case 31:	r = 7; h = 1;	break;
-
-		default:
-			DE_ASSERT(false);
-			r = (deUint32)-1;
-			h = (deUint32)-1;
-	}
-
-	// Find block mode layout index, i.e. appropriate row in the "2d block mode layout" table in ASTC spec.
-
-	{
-		enum BlockModeLayoutABVariable { Z=0, A=1, B=2 };
-
-		static const struct BlockModeLayout
-		{
-			int							aNumBits;
-			int							bNumBits;
-			BlockModeLayoutABVariable	gridWidthVariableTerm;
-			int							gridWidthConstantTerm;
-			BlockModeLayoutABVariable	gridHeightVariableTerm;
-			int							gridHeightConstantTerm;
-		} blockModeLayouts[] =
-		{
-			{ 2, 2,   B,  4,   A,  2},
-			{ 2, 2,   B,  8,   A,  2},
-			{ 2, 2,   A,  2,   B,  8},
-			{ 2, 1,   A,  2,   B,  6},
-			{ 2, 1,   B,  2,   A,  2},
-			{ 2, 0,   Z, 12,   A,  2},
-			{ 2, 0,   A,  2,   Z, 12},
-			{ 0, 0,   Z,  6,   Z, 10},
-			{ 0, 0,   Z, 10,   Z,  6},
-			{ 2, 2,   A,  6,   B,  6}
-		};
-
-		for (blockModeLayoutNdx = 0; blockModeLayoutNdx < DE_LENGTH_OF_ARRAY(blockModeLayouts); blockModeLayoutNdx++)
-		{
-			const BlockModeLayout&	layout					= blockModeLayouts[blockModeLayoutNdx];
-			const int				aMax					= (1 << layout.aNumBits) - 1;
-			const int				bMax					= (1 << layout.bNumBits) - 1;
-			const int				variableOffsetsMax[3]	= { 0, aMax, bMax };
-			const int				widthMin				= layout.gridWidthConstantTerm;
-			const int				heightMin				= layout.gridHeightConstantTerm;
-			const int				widthMax				= widthMin  + variableOffsetsMax[layout.gridWidthVariableTerm];
-			const int				heightMax				= heightMin + variableOffsetsMax[layout.gridHeightVariableTerm];
-
-			DE_ASSERT(layout.gridWidthVariableTerm != layout.gridHeightVariableTerm || layout.gridWidthVariableTerm == Z);
-
-			if (de::inRange(blockParams.weightGridWidth, widthMin, widthMax) &&
-				de::inRange(blockParams.weightGridHeight, heightMin, heightMax))
-			{
-				deUint32	dummy			= 0;
-				deUint32&	widthVariable	= layout.gridWidthVariableTerm == A  ? a : layout.gridWidthVariableTerm == B  ? b : dummy;
-				deUint32&	heightVariable	= layout.gridHeightVariableTerm == A ? a : layout.gridHeightVariableTerm == B ? b : dummy;
-
-				widthVariable	= blockParams.weightGridWidth  - layout.gridWidthConstantTerm;
-				heightVariable	= blockParams.weightGridHeight - layout.gridHeightConstantTerm;
-
-				break;
-			}
-		}
-	}
-
-	// Set block mode bits.
-
-	const deUint32 a0 = getBit(a, 0);
-	const deUint32 a1 = getBit(a, 1);
-	const deUint32 b0 = getBit(b, 0);
-	const deUint32 b1 = getBit(b, 1);
-	const deUint32 r0 = getBit(r, 0);
-	const deUint32 r1 = getBit(r, 1);
-	const deUint32 r2 = getBit(r, 2);
-
-#define SB(NDX, VAL) dst.setBit((NDX), (VAL))
-#define ASSIGN_BITS(B10, B9, B8, B7, B6, B5, B4, B3, B2, B1, B0) do { SB(10,(B10)); SB(9,(B9)); SB(8,(B8)); SB(7,(B7)); SB(6,(B6)); SB(5,(B5)); SB(4,(B4)); SB(3,(B3)); SB(2,(B2)); SB(1,(B1)); SB(0,(B0)); } while (false)
-
-	switch (blockModeLayoutNdx)
-	{
-		case 0: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 0,  0,  r2, r1);									break;
-		case 1: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 0,  1,  r2, r1);									break;
-		case 2: ASSIGN_BITS(d,  h,  b1, b0, a1, a0, r0, 1,  0,  r2, r1);									break;
-		case 3: ASSIGN_BITS(d,  h,   0,  b, a1, a0, r0, 1,  1,  r2, r1);									break;
-		case 4: ASSIGN_BITS(d,  h,   1,  b, a1, a0, r0, 1,  1,  r2, r1);									break;
-		case 5: ASSIGN_BITS(d,  h,   0,  0, a1, a0, r0, r2, r1,  0,  0);									break;
-		case 6: ASSIGN_BITS(d,  h,   0,  1, a1, a0, r0, r2, r1,  0,  0);									break;
-		case 7: ASSIGN_BITS(d,  h,   1,  1,  0,  0, r0, r2, r1,  0,  0);									break;
-		case 8: ASSIGN_BITS(d,  h,   1,  1,  0,  1, r0, r2, r1,  0,  0);									break;
-		case 9: ASSIGN_BITS(b1, b0,  1,  0, a1, a0, r0, r2, r1,  0,  0); DE_ASSERT(d == 0 && h == 0);		break;
-		default:
-			DE_ASSERT(false);
-	}
-
-#undef ASSIGN_BITS
-#undef SB
-}
-
-// Write color endpoint mode data of an ASTC block.
-static void writeColorEndpointModes (AssignBlock128& dst, const deUint32* colorEndpointModes, bool isMultiPartSingleCemMode, int numPartitions, int extraCemBitsStart)
-{
-	if (numPartitions == 1)
-		dst.setBits(13, 16, colorEndpointModes[0]);
-	else
-	{
-		if (isMultiPartSingleCemMode)
-		{
-			dst.setBits(23, 24, 0);
-			dst.setBits(25, 28, colorEndpointModes[0]);
-		}
-		else
-		{
-			DE_ASSERT(numPartitions > 0);
-			const deUint32 minCem				= *std::min_element(&colorEndpointModes[0], &colorEndpointModes[numPartitions]);
-			const deUint32 maxCem				= *std::max_element(&colorEndpointModes[0], &colorEndpointModes[numPartitions]);
-			const deUint32 minCemClass			= minCem/4;
-			const deUint32 maxCemClass			= maxCem/4;
-			DE_ASSERT(maxCemClass - minCemClass <= 1);
-			DE_UNREF(minCemClass); // \note For non-debug builds.
-			const deUint32 highLevelSelector	= de::max(1u, maxCemClass);
-
-			dst.setBits(23, 24, highLevelSelector);
-
-			for (int partNdx = 0; partNdx < numPartitions; partNdx++)
-			{
-				const deUint32 c			= colorEndpointModes[partNdx] / 4 == highLevelSelector ? 1 : 0;
-				const deUint32 m			= colorEndpointModes[partNdx] % 4;
-				const deUint32 lowMBit0Ndx	= numPartitions + 2*partNdx;
-				const deUint32 lowMBit1Ndx	= numPartitions + 2*partNdx + 1;
-				dst.setBit(25 + partNdx, c);
-				dst.setBit(lowMBit0Ndx < 4 ? 25+lowMBit0Ndx : extraCemBitsStart+lowMBit0Ndx-4, getBit(m, 0));
-				dst.setBit(lowMBit1Ndx < 4 ? 25+lowMBit1Ndx : extraCemBitsStart+lowMBit1Ndx-4, getBit(m, 1));
-			}
-		}
-	}
-}
-
-static ISEParams computeMaximumRangeISEParams (int numAvailableBits, int numValuesInSequence)
-{
-	int curBitsForTritMode		= 6;
-	int curBitsForQuintMode		= 5;
-	int curBitsForPlainBitMode	= 8;
-
-	while (true)
-	{
-		DE_ASSERT(curBitsForTritMode > 0 || curBitsForQuintMode > 0 || curBitsForPlainBitMode > 0);
-
-		const int tritRange			= curBitsForTritMode > 0		? (3 << curBitsForTritMode) - 1			: -1;
-		const int quintRange		= curBitsForQuintMode > 0		? (5 << curBitsForQuintMode) - 1		: -1;
-		const int plainBitRange		= curBitsForPlainBitMode > 0	? (1 << curBitsForPlainBitMode) - 1		: -1;
-		const int maxRange			= de::max(de::max(tritRange, quintRange), plainBitRange);
-
-		if (maxRange == tritRange)
-		{
-			const ISEParams params(ISEMODE_TRIT, curBitsForTritMode);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_TRIT, curBitsForTritMode);
-			curBitsForTritMode--;
-		}
-		else if (maxRange == quintRange)
-		{
-			const ISEParams params(ISEMODE_QUINT, curBitsForQuintMode);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_QUINT, curBitsForQuintMode);
-			curBitsForQuintMode--;
-		}
-		else
-		{
-			const ISEParams params(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
-			DE_ASSERT(maxRange == plainBitRange);
-			if (computeNumRequiredBits(params, numValuesInSequence) <= numAvailableBits)
-				return ISEParams(ISEMODE_PLAIN_BIT, curBitsForPlainBitMode);
-			curBitsForPlainBitMode--;
-		}
-	}
-}
-
-static void encodeISETritBlock (BitAssignAccessStream& dst, int numBits, bool fromExplicitInputBlock, const ISEInput::Block& blockInput, const deUint32* nonBlockInput, int numValues)
-{
-	// tritBlockTValue[t0][t1][t2][t3][t4] is a value of T (not necessarily the only one) that will yield the given trits when decoded.
-	static const deUint32 tritBlockTValue[3][3][3][3][3] =
-	{
-		{
-			{{{0, 128, 96}, {32, 160, 224}, {64, 192, 28}}, {{16, 144, 112}, {48, 176, 240}, {80, 208, 156}}, {{3, 131, 99}, {35, 163, 227}, {67, 195, 31}}},
-			{{{4, 132, 100}, {36, 164, 228}, {68, 196, 60}}, {{20, 148, 116}, {52, 180, 244}, {84, 212, 188}}, {{19, 147, 115}, {51, 179, 243}, {83, 211, 159}}},
-			{{{8, 136, 104}, {40, 168, 232}, {72, 200, 92}}, {{24, 152, 120}, {56, 184, 248}, {88, 216, 220}}, {{12, 140, 108}, {44, 172, 236}, {76, 204, 124}}}
-		},
-		{
-			{{{1, 129, 97}, {33, 161, 225}, {65, 193, 29}}, {{17, 145, 113}, {49, 177, 241}, {81, 209, 157}}, {{7, 135, 103}, {39, 167, 231}, {71, 199, 63}}},
-			{{{5, 133, 101}, {37, 165, 229}, {69, 197, 61}}, {{21, 149, 117}, {53, 181, 245}, {85, 213, 189}}, {{23, 151, 119}, {55, 183, 247}, {87, 215, 191}}},
-			{{{9, 137, 105}, {41, 169, 233}, {73, 201, 93}}, {{25, 153, 121}, {57, 185, 249}, {89, 217, 221}}, {{13, 141, 109}, {45, 173, 237}, {77, 205, 125}}}
-		},
-		{
-			{{{2, 130, 98}, {34, 162, 226}, {66, 194, 30}}, {{18, 146, 114}, {50, 178, 242}, {82, 210, 158}}, {{11, 139, 107}, {43, 171, 235}, {75, 203, 95}}},
-			{{{6, 134, 102}, {38, 166, 230}, {70, 198, 62}}, {{22, 150, 118}, {54, 182, 246}, {86, 214, 190}}, {{27, 155, 123}, {59, 187, 251}, {91, 219, 223}}},
-			{{{10, 138, 106}, {42, 170, 234}, {74, 202, 94}}, {{26, 154, 122}, {58, 186, 250}, {90, 218, 222}}, {{14, 142, 110}, {46, 174, 238}, {78, 206, 126}}}
-		}
-	};
-
-	DE_ASSERT(de::inRange(numValues, 1, 5));
-
-	deUint32 tritParts[5];
-	deUint32 bitParts[5];
-
-	for (int i = 0; i < 5; i++)
-	{
-		if (i < numValues)
-		{
-			if (fromExplicitInputBlock)
-			{
-				bitParts[i]		= blockInput.bitValues[i];
-				tritParts[i]	= -1; // \note Won't be used, but silences warning.
-			}
-			else
-			{
-				bitParts[i]		= getBits(nonBlockInput[i], 0, numBits-1);
-				tritParts[i]	= nonBlockInput[i] >> numBits;
-			}
-		}
-		else
-		{
-			bitParts[i]		= 0;
-			tritParts[i]	= 0;
-		}
-	}
-
-	const deUint32 T = fromExplicitInputBlock ? blockInput.tOrQValue : tritBlockTValue[tritParts[0]]
-																					  [tritParts[1]]
-																					  [tritParts[2]]
-																					  [tritParts[3]]
-																					  [tritParts[4]];
-
-	dst.setNext(numBits,	bitParts[0]);
-	dst.setNext(2,			getBits(T, 0, 1));
-	dst.setNext(numBits,	bitParts[1]);
-	dst.setNext(2,			getBits(T, 2, 3));
-	dst.setNext(numBits,	bitParts[2]);
-	dst.setNext(1,			getBit(T, 4));
-	dst.setNext(numBits,	bitParts[3]);
-	dst.setNext(2,			getBits(T, 5, 6));
-	dst.setNext(numBits,	bitParts[4]);
-	dst.setNext(1,			getBit(T, 7));
-}
-
-static void encodeISEQuintBlock (BitAssignAccessStream& dst, int numBits, bool fromExplicitInputBlock, const ISEInput::Block& blockInput, const deUint32* nonBlockInput, int numValues)
-{
-	// quintBlockQValue[q0][q1][q2] is a value of Q (not necessarily the only one) that will yield the given quints when decoded.
-	static const deUint32 quintBlockQValue[5][5][5] =
-	{
-		{{0, 32, 64, 96, 102}, {8, 40, 72, 104, 110}, {16, 48, 80, 112, 118}, {24, 56, 88, 120, 126}, {5, 37, 69, 101, 39}},
-		{{1, 33, 65, 97, 103}, {9, 41, 73, 105, 111}, {17, 49, 81, 113, 119}, {25, 57, 89, 121, 127}, {13, 45, 77, 109, 47}},
-		{{2, 34, 66, 98, 70}, {10, 42, 74, 106, 78}, {18, 50, 82, 114, 86}, {26, 58, 90, 122, 94}, {21, 53, 85, 117, 55}},
-		{{3, 35, 67, 99, 71}, {11, 43, 75, 107, 79}, {19, 51, 83, 115, 87}, {27, 59, 91, 123, 95}, {29, 61, 93, 125, 63}},
-		{{4, 36, 68, 100, 38}, {12, 44, 76, 108, 46}, {20, 52, 84, 116, 54}, {28, 60, 92, 124, 62}, {6, 14, 22, 30, 7}}
-	};
-
-	DE_ASSERT(de::inRange(numValues, 1, 3));
-
-	deUint32 quintParts[3];
-	deUint32 bitParts[3];
-
-	for (int i = 0; i < 3; i++)
-	{
-		if (i < numValues)
-		{
-			if (fromExplicitInputBlock)
-			{
-				bitParts[i]		= blockInput.bitValues[i];
-				quintParts[i]	= -1; // \note Won't be used, but silences warning.
-			}
-			else
-			{
-				bitParts[i]		= getBits(nonBlockInput[i], 0, numBits-1);
-				quintParts[i]	= nonBlockInput[i] >> numBits;
-			}
-		}
-		else
-		{
-			bitParts[i]		= 0;
-			quintParts[i]	= 0;
-		}
-	}
-
-	const deUint32 Q = fromExplicitInputBlock ? blockInput.tOrQValue : quintBlockQValue[quintParts[0]]
-																					   [quintParts[1]]
-																					   [quintParts[2]];
-
-	dst.setNext(numBits,	bitParts[0]);
-	dst.setNext(3,			getBits(Q, 0, 2));
-	dst.setNext(numBits,	bitParts[1]);
-	dst.setNext(2,			getBits(Q, 3, 4));
-	dst.setNext(numBits,	bitParts[2]);
-	dst.setNext(2,			getBits(Q, 5, 6));
-}
-
-static void encodeISEBitBlock (BitAssignAccessStream& dst, int numBits, deUint32 value)
-{
-	DE_ASSERT(de::inRange(value, 0u, (1u<<numBits)-1));
-	dst.setNext(numBits, value);
-}
-
-static void encodeISE (BitAssignAccessStream& dst, const ISEParams& params, const ISEInput& input, int numValues)
-{
-	if (params.mode == ISEMODE_TRIT)
-	{
-		const int numBlocks = divRoundUp(numValues, 5);
-		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-		{
-			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 5*(numBlocks-1) : 5;
-			encodeISETritBlock(dst, params.numBits, input.isGivenInBlockForm,
-							   input.isGivenInBlockForm ? input.value.block[blockNdx]	: ISEInput::Block(),
-							   input.isGivenInBlockForm ? DE_NULL						: &input.value.plain[5*blockNdx],
-							   numValuesInBlock);
-		}
-	}
-	else if (params.mode == ISEMODE_QUINT)
-	{
-		const int numBlocks = divRoundUp(numValues, 3);
-		for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-		{
-			const int numValuesInBlock = blockNdx == numBlocks-1 ? numValues - 3*(numBlocks-1) : 3;
-			encodeISEQuintBlock(dst, params.numBits, input.isGivenInBlockForm,
-								input.isGivenInBlockForm ? input.value.block[blockNdx]	: ISEInput::Block(),
-								input.isGivenInBlockForm ? DE_NULL						: &input.value.plain[3*blockNdx],
-								numValuesInBlock);
-		}
-	}
-	else
-	{
-		DE_ASSERT(params.mode == ISEMODE_PLAIN_BIT);
-		for (int i = 0; i < numValues; i++)
-			encodeISEBitBlock(dst, params.numBits, input.isGivenInBlockForm ? input.value.block[i].bitValues[0] : input.value.plain[i]);
-	}
-}
-
-static void writeWeightData (AssignBlock128& dst, const ISEParams& iseParams, const ISEInput& input, int numWeights)
-{
-	const int				numWeightBits	= computeNumRequiredBits(iseParams, numWeights);
-	BitAssignAccessStream	access			(dst, 127, numWeightBits, false);
-	encodeISE(access, iseParams, input, numWeights);
-}
-
-static void writeColorEndpointData (AssignBlock128& dst, const ISEParams& iseParams, const ISEInput& input, int numEndpoints, int numBitsForColorEndpoints, int colorEndpointDataStartNdx)
-{
-	BitAssignAccessStream access(dst, colorEndpointDataStartNdx, numBitsForColorEndpoints, true);
-	encodeISE(access, iseParams, input, numEndpoints);
-}
-
-static AssignBlock128 generateNormalBlock (const NormalBlockParams& blockParams, int blockWidth, int blockHeight, const NormalBlockISEInputs& iseInputs)
-{
-	DE_ASSERT(isValidBlockParams(blockParams, blockWidth, blockHeight));
-	DE_UNREF(blockWidth);	// \note For non-debug builds.
-	DE_UNREF(blockHeight);	// \note For non-debug builds.
-
-	AssignBlock128	block;
-	const int		numWeights		= computeNumWeights(blockParams);
-	const int		numWeightBits	= computeNumRequiredBits(blockParams.weightISEParams, numWeights);
-
-	writeBlockMode(block, blockParams);
-
-	block.setBits(11, 12, blockParams.numPartitions - 1);
-	if (blockParams.numPartitions > 1)
-		block.setBits(13, 22, blockParams.partitionSeed);
-
-	{
-		const int extraCemBitsStart = 127 - numWeightBits - (blockParams.numPartitions == 1 || blockParams.isMultiPartSingleCemMode		? -1
-															: blockParams.numPartitions == 4											? 7
-															: blockParams.numPartitions == 3											? 4
-															: blockParams.numPartitions == 2											? 1
-															: 0);
-
-		writeColorEndpointModes(block, &blockParams.colorEndpointModes[0], blockParams.isMultiPartSingleCemMode, blockParams.numPartitions, extraCemBitsStart);
-
-		if (blockParams.isDualPlane)
-			block.setBits(extraCemBitsStart-2, extraCemBitsStart-1, blockParams.ccs);
-	}
-
-	writeWeightData(block, blockParams.weightISEParams, iseInputs.weight, numWeights);
-
-	{
-		const int			numColorEndpointValues		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
-		const int			numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(blockParams);
-		const int			colorEndpointDataStartNdx	= blockParams.numPartitions == 1 ? 17 : 29;
-		const ISEParams&	colorEndpointISEParams		= computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues);
-
-		writeColorEndpointData(block, colorEndpointISEParams, iseInputs.endpoint, numColorEndpointValues, numBitsForColorEndpoints, colorEndpointDataStartNdx);
-	}
-
-	return block;
-}
-
-// Generate default ISE inputs for weight and endpoint data - gradient-ish values.
-static NormalBlockISEInputs generateDefaultISEInputs (const NormalBlockParams& blockParams)
-{
-	NormalBlockISEInputs result;
-
-	{
-		result.weight.isGivenInBlockForm = false;
-
-		const int numWeights		= computeNumWeights(blockParams);
-		const int weightRangeMax	= computeISERangeMax(blockParams.weightISEParams);
-
-		if (blockParams.isDualPlane)
-		{
-			for (int i = 0; i < numWeights; i += 2)
-				result.weight.value.plain[i] = (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
-
-			for (int i = 1; i < numWeights; i += 2)
-				result.weight.value.plain[i] = weightRangeMax - (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
-		}
-		else
-		{
-			for (int i = 0; i < numWeights; i++)
-				result.weight.value.plain[i] = (i*weightRangeMax + (numWeights-1)/2) / (numWeights-1);
-		}
-	}
-
-	{
-		result.endpoint.isGivenInBlockForm = false;
-
-		const int			numColorEndpointValues		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
-		const int			numBitsForColorEndpoints	= computeNumBitsForColorEndpoints(blockParams);
-		const ISEParams&	colorEndpointISEParams		= computeMaximumRangeISEParams(numBitsForColorEndpoints, numColorEndpointValues);
-		const int			colorEndpointRangeMax		= computeISERangeMax(colorEndpointISEParams);
-
-		for (int i = 0; i < numColorEndpointValues; i++)
-			result.endpoint.value.plain[i] = (i*colorEndpointRangeMax + (numColorEndpointValues-1)/2) / (numColorEndpointValues-1);
-	}
-
-	return result;
-}
-
-} // ASTCBlockGeneratorInternal
-
-static Vec4 getBlockTestTypeColorScale (ASTCBlockTestType testType)
-{
-	switch (testType)
-	{
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR:				return Vec4(0.5f/65504.0f);
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15:	return Vec4(1.0f/65504.0f, 1.0f/65504.0f, 1.0f/65504.0f, 1.0f);
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15:		return Vec4(1.0f/65504.0f);
-		default:											return Vec4(1.0f);
-	}
-}
-
-static Vec4 getBlockTestTypeColorBias (ASTCBlockTestType testType)
-{
-	switch (testType)
-	{
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR:		return Vec4(0.5f);
-		default:									return Vec4(0.0f);
-	}
-}
-
-// Generate block data for a given ASTCBlockTestType and format.
-static void generateBlockCaseTestData (vector<deUint8>& dst, CompressedTexFormat format, ASTCBlockTestType testType)
-{
-	using namespace ASTCBlockGeneratorInternal;
-
-	static const ISEParams weightISEParamsCandidates[] =
-	{
-		ISEParams(ISEMODE_PLAIN_BIT,	1),
-		ISEParams(ISEMODE_TRIT,			0),
-		ISEParams(ISEMODE_PLAIN_BIT,	2),
-		ISEParams(ISEMODE_QUINT,		0),
-		ISEParams(ISEMODE_TRIT,			1),
-		ISEParams(ISEMODE_PLAIN_BIT,	3),
-		ISEParams(ISEMODE_QUINT,		1),
-		ISEParams(ISEMODE_TRIT,			2),
-		ISEParams(ISEMODE_PLAIN_BIT,	4),
-		ISEParams(ISEMODE_QUINT,		2),
-		ISEParams(ISEMODE_TRIT,			3),
-		ISEParams(ISEMODE_PLAIN_BIT,	5)
-	};
-
-	DE_ASSERT(tcu::isAstcFormat(format));
-	DE_ASSERT(!(tcu::isAstcSRGBFormat(format) && isBlockTestTypeHDROnly(testType)));
-
-	const IVec3 blockSize = getBlockPixelSize(format);
-	DE_ASSERT(blockSize.z() == 1);
-
-	switch (testType)
-	{
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_LDR:
-		// Generate a gradient-like set of LDR void-extent blocks.
-		{
-			const int			numBlocks	= 1<<13;
-			const deUint32		numValues	= 1<<16;
-			dst.reserve(numBlocks*ASTC_BLOCK_SIZE_BYTES);
-
-			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-			{
-				const deUint32 baseValue	= blockNdx*(numValues-1) / (numBlocks-1);
-				const deUint16 r			= (deUint16)((baseValue + numValues*0/4) % numValues);
-				const deUint16 g			= (deUint16)((baseValue + numValues*1/4) % numValues);
-				const deUint16 b			= (deUint16)((baseValue + numValues*2/4) % numValues);
-				const deUint16 a			= (deUint16)((baseValue + numValues*3/4) % numValues);
-				AssignBlock128 block;
-
-				generateVoidExtentBlock(VoidExtentParams(false, r, g, b, a)).pushBytesToVector(dst);
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR:
-		// Generate a gradient-like set of HDR void-extent blocks, with values ranging from the largest finite negative to largest finite positive of fp16.
-		{
-			const float		minValue	= -65504.0f;
-			const float		maxValue	= +65504.0f;
-			const int		numBlocks	= 1<<13;
-			dst.reserve(numBlocks*ASTC_BLOCK_SIZE_BYTES);
-
-			for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-			{
-				const int			rNdx	= (blockNdx + numBlocks*0/4) % numBlocks;
-				const int			gNdx	= (blockNdx + numBlocks*1/4) % numBlocks;
-				const int			bNdx	= (blockNdx + numBlocks*2/4) % numBlocks;
-				const int			aNdx	= (blockNdx + numBlocks*3/4) % numBlocks;
-				const deFloat16		r		= deFloat32To16(minValue + (float)rNdx * (maxValue - minValue) / (float)(numBlocks-1));
-				const deFloat16		g		= deFloat32To16(minValue + (float)gNdx * (maxValue - minValue) / (float)(numBlocks-1));
-				const deFloat16		b		= deFloat32To16(minValue + (float)bNdx * (maxValue - minValue) / (float)(numBlocks-1));
-				const deFloat16		a		= deFloat32To16(minValue + (float)aNdx * (maxValue - minValue) / (float)(numBlocks-1));
-
-				generateVoidExtentBlock(VoidExtentParams(true, r, g, b, a)).pushBytesToVector(dst);
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_WEIGHT_GRID:
-		// Generate different combinations of plane count, weight ISE params, and grid size.
-		{
-			for (int isDualPlane = 0;		isDualPlane <= 1;												isDualPlane++)
-			for (int iseParamsNdx = 0;		iseParamsNdx < DE_LENGTH_OF_ARRAY(weightISEParamsCandidates);	iseParamsNdx++)
-			for (int weightGridWidth = 2;	weightGridWidth <= 12;											weightGridWidth++)
-			for (int weightGridHeight = 2;	weightGridHeight <= 12;											weightGridHeight++)
-			{
-				NormalBlockParams		blockParams;
-				NormalBlockISEInputs	iseInputs;
-
-				blockParams.weightGridWidth			= weightGridWidth;
-				blockParams.weightGridHeight		= weightGridHeight;
-				blockParams.isDualPlane				= isDualPlane != 0;
-				blockParams.weightISEParams			= weightISEParamsCandidates[iseParamsNdx];
-				blockParams.ccs						= 0;
-				blockParams.numPartitions			= 1;
-				blockParams.colorEndpointModes[0]	= 0;
-
-				if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
-					generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_WEIGHT_ISE:
-		// For each weight ISE param set, generate blocks that cover:
-		// - each single value of the ISE's range, at each position inside an ISE block
-		// - for trit and quint ISEs, each single T or Q value of an ISE block
-		{
-			for (int iseParamsNdx = 0;	iseParamsNdx < DE_LENGTH_OF_ARRAY(weightISEParamsCandidates);	iseParamsNdx++)
-			{
-				const ISEParams&	iseParams = weightISEParamsCandidates[iseParamsNdx];
-				NormalBlockParams	blockParams;
-
-				blockParams.weightGridWidth			= 4;
-				blockParams.weightGridHeight		= 4;
-				blockParams.weightISEParams			= iseParams;
-				blockParams.numPartitions			= 1;
-				blockParams.isDualPlane				= blockParams.weightGridWidth * blockParams.weightGridHeight < 24 ? true : false;
-				blockParams.ccs						= 0;
-				blockParams.colorEndpointModes[0]	= 0;
-
-				while (!isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
-				{
-					blockParams.weightGridWidth--;
-					blockParams.weightGridHeight--;
-				}
-
-				const int numValuesInISEBlock	= iseParams.mode == ISEMODE_TRIT ? 5 : iseParams.mode == ISEMODE_QUINT ? 3 : 1;
-				const int numWeights			= computeNumWeights(blockParams);
-
-				{
-					const int				numWeightValues		= (int)computeISERangeMax(iseParams) + 1;
-					const int				numBlocks			= divRoundUp(numWeightValues, numWeights);
-					NormalBlockISEInputs	iseInputs			= generateDefaultISEInputs(blockParams);
-					iseInputs.weight.isGivenInBlockForm = false;
-
-					for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
-					for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
-					{
-						for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
-							iseInputs.weight.value.plain[weightNdx] = (blockNdx*numWeights + weightNdx + offset) % numWeightValues;
-
-						generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-					}
-				}
-
-				if (iseParams.mode == ISEMODE_TRIT || iseParams.mode == ISEMODE_QUINT)
-				{
-					NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
-					iseInputs.weight.isGivenInBlockForm = true;
-
-					const int numTQValues			= 1 << (iseParams.mode == ISEMODE_TRIT ? 8 : 7);
-					const int numISEBlocksPerBlock	= divRoundUp(numWeights, numValuesInISEBlock);
-					const int numBlocks				= divRoundUp(numTQValues, numISEBlocksPerBlock);
-
-					for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
-					for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
-					{
-						for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocksPerBlock; iseBlockNdx++)
-						{
-							for (int i = 0; i < numValuesInISEBlock; i++)
-								iseInputs.weight.value.block[iseBlockNdx].bitValues[i] = 0;
-							iseInputs.weight.value.block[iseBlockNdx].tOrQValue = (blockNdx*numISEBlocksPerBlock + iseBlockNdx + offset) % numTQValues;
-						}
-
-						generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-					}
-				}
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_CEMS:
-		// For each plane count & partition count combination, generate all color endpoint mode combinations.
-		{
-			for (int isDualPlane = 0;		isDualPlane <= 1;								isDualPlane++)
-			for (int numPartitions = 1;		numPartitions <= (isDualPlane != 0 ? 3 : 4);	numPartitions++)
-			{
-				// Multi-partition, single-CEM mode.
-				if (numPartitions > 1)
-				{
-					for (deUint32 singleCem = 0; singleCem < 16; singleCem++)
-					{
-						NormalBlockParams blockParams;
-						blockParams.weightGridWidth				= 4;
-						blockParams.weightGridHeight			= 4;
-						blockParams.isDualPlane					= isDualPlane != 0;
-						blockParams.ccs							= 0;
-						blockParams.numPartitions				= numPartitions;
-						blockParams.isMultiPartSingleCemMode	= true;
-						blockParams.colorEndpointModes[0]		= singleCem;
-						blockParams.partitionSeed				= 634;
-
-						for (int iseParamsNdx = 0; iseParamsNdx < DE_LENGTH_OF_ARRAY(weightISEParamsCandidates); iseParamsNdx++)
-						{
-							blockParams.weightISEParams = weightISEParamsCandidates[iseParamsNdx];
-							if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
-							{
-								generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
-								break;
-							}
-						}
-					}
-				}
-
-				// Separate-CEM mode.
-				for (deUint32 cem0 = 0; cem0 < 16; cem0++)
-				for (deUint32 cem1 = 0; cem1 < (numPartitions >= 2 ? 16u : 1u); cem1++)
-				for (deUint32 cem2 = 0; cem2 < (numPartitions >= 3 ? 16u : 1u); cem2++)
-				for (deUint32 cem3 = 0; cem3 < (numPartitions >= 4 ? 16u : 1u); cem3++)
-				{
-					NormalBlockParams blockParams;
-					blockParams.weightGridWidth				= 4;
-					blockParams.weightGridHeight			= 4;
-					blockParams.isDualPlane					= isDualPlane != 0;
-					blockParams.ccs							= 0;
-					blockParams.numPartitions				= numPartitions;
-					blockParams.isMultiPartSingleCemMode	= false;
-					blockParams.colorEndpointModes[0]		= cem0;
-					blockParams.colorEndpointModes[1]		= cem1;
-					blockParams.colorEndpointModes[2]		= cem2;
-					blockParams.colorEndpointModes[3]		= cem3;
-					blockParams.partitionSeed				= 634;
-
-					{
-						const deUint32 minCem		= *std::min_element(&blockParams.colorEndpointModes[0], &blockParams.colorEndpointModes[numPartitions]);
-						const deUint32 maxCem		= *std::max_element(&blockParams.colorEndpointModes[0], &blockParams.colorEndpointModes[numPartitions]);
-						const deUint32 minCemClass	= minCem/4;
-						const deUint32 maxCemClass	= maxCem/4;
-
-						if (maxCemClass - minCemClass > 1)
-							continue;
-					}
-
-					for (int iseParamsNdx = 0; iseParamsNdx < DE_LENGTH_OF_ARRAY(weightISEParamsCandidates); iseParamsNdx++)
-					{
-						blockParams.weightISEParams = weightISEParamsCandidates[iseParamsNdx];
-						if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
-						{
-							generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
-							break;
-						}
-					}
-				}
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_PARTITION_SEED:
-		// Test all partition seeds ("partition pattern indices").
-		{
-			for (int		numPartitions = 2;	numPartitions <= 4;		numPartitions++)
-			for (deUint32	partitionSeed = 0;	partitionSeed < 1<<10;	partitionSeed++)
-			{
-				NormalBlockParams blockParams;
-				blockParams.weightGridWidth				= 4;
-				blockParams.weightGridHeight			= 4;
-				blockParams.weightISEParams				= ISEParams(ISEMODE_PLAIN_BIT, 2);
-				blockParams.isDualPlane					= false;
-				blockParams.numPartitions				= numPartitions;
-				blockParams.isMultiPartSingleCemMode	= true;
-				blockParams.colorEndpointModes[0]		= 0;
-				blockParams.partitionSeed				= partitionSeed;
-
-				generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
-			}
-
-			break;
-		}
-
-		// \note Fall-through.
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_LDR:
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15:
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15:
-		// For each endpoint mode, for each pair of components in the endpoint value, test 10x10 combinations of values for that pair.
-		// \note Separate modes for HDR and mode 15 due to different color scales and biases.
-		{
-			for (deUint32 cem = 0; cem < 16; cem++)
-			{
-				const bool isHDRCem = cem == 2		||
-									  cem == 3		||
-									  cem == 7		||
-									  cem == 11		||
-									  cem == 14		||
-									  cem == 15;
-
-				if ((testType == ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_LDR			&& isHDRCem)					||
-					(testType == ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15		&& (!isHDRCem || cem == 15))	||
-					(testType == ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15		&& cem != 15))
-					continue;
-
-				NormalBlockParams blockParams;
-				blockParams.weightGridWidth			= 3;
-				blockParams.weightGridHeight		= 4;
-				blockParams.weightISEParams			= ISEParams(ISEMODE_PLAIN_BIT, 2);
-				blockParams.isDualPlane				= false;
-				blockParams.numPartitions			= 1;
-				blockParams.colorEndpointModes[0]	= cem;
-
-				{
-					const int			numBitsForEndpoints		= computeNumBitsForColorEndpoints(blockParams);
-					const int			numEndpointParts		= computeNumColorEndpointValues(cem);
-					const ISEParams		endpointISE				= computeMaximumRangeISEParams(numBitsForEndpoints, numEndpointParts);
-					const int			endpointISERangeMax		= computeISERangeMax(endpointISE);
-
-					for (int endpointPartNdx0 = 0;						endpointPartNdx0 < numEndpointParts; endpointPartNdx0++)
-					for (int endpointPartNdx1 = endpointPartNdx0+1;		endpointPartNdx1 < numEndpointParts; endpointPartNdx1++)
-					{
-						NormalBlockISEInputs	iseInputs			= generateDefaultISEInputs(blockParams);
-						const int				numEndpointValues	= de::min(10, endpointISERangeMax+1);
-
-						for (int endpointValueNdx0 = 0; endpointValueNdx0 < numEndpointValues; endpointValueNdx0++)
-						for (int endpointValueNdx1 = 0; endpointValueNdx1 < numEndpointValues; endpointValueNdx1++)
-						{
-							const int endpointValue0 = endpointValueNdx0 * endpointISERangeMax / (numEndpointValues-1);
-							const int endpointValue1 = endpointValueNdx1 * endpointISERangeMax / (numEndpointValues-1);
-
-							iseInputs.endpoint.value.plain[endpointPartNdx0] = endpointValue0;
-							iseInputs.endpoint.value.plain[endpointPartNdx1] = endpointValue1;
-
-							generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-						}
-					}
-				}
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_ENDPOINT_ISE:
-		// Similar to ASTCBLOCKTESTTYPE_WEIGHT_ISE, see above.
-		{
-			static const deUint32 endpointRangeMaximums[] = { 5, 9, 11, 19, 23, 39, 47, 79, 95, 159, 191 };
-
-			for (int endpointRangeNdx = 0; endpointRangeNdx < DE_LENGTH_OF_ARRAY(endpointRangeMaximums); endpointRangeNdx++)
-			{
-				bool validCaseGenerated = false;
-
-				for (int numPartitions = 1;			!validCaseGenerated && numPartitions <= 4;													numPartitions++)
-				for (int isDual = 0;				!validCaseGenerated && isDual <= 1;															isDual++)
-				for (int weightISEParamsNdx = 0;	!validCaseGenerated && weightISEParamsNdx < DE_LENGTH_OF_ARRAY(weightISEParamsCandidates);	weightISEParamsNdx++)
-				for (int weightGridWidth = 2;		!validCaseGenerated && weightGridWidth <= 12;												weightGridWidth++)
-				for (int weightGridHeight = 2;		!validCaseGenerated && weightGridHeight <= 12;												weightGridHeight++)
-				{
-					NormalBlockParams blockParams;
-					blockParams.weightGridWidth				= weightGridWidth;
-					blockParams.weightGridHeight			= weightGridHeight;
-					blockParams.weightISEParams				= weightISEParamsCandidates[weightISEParamsNdx];
-					blockParams.isDualPlane					= isDual != 0;
-					blockParams.ccs							= 0;
-					blockParams.numPartitions				= numPartitions;
-					blockParams.isMultiPartSingleCemMode	= true;
-					blockParams.colorEndpointModes[0]		= 12;
-					blockParams.partitionSeed				= 634;
-
-					if (isValidBlockParams(blockParams, blockSize.x(), blockSize.y()))
-					{
-						const ISEParams endpointISEParams = computeMaximumRangeISEParams(computeNumBitsForColorEndpoints(blockParams),
-																						 computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], numPartitions, true));
-
-						if (computeISERangeMax(endpointISEParams) == endpointRangeMaximums[endpointRangeNdx])
-						{
-							validCaseGenerated = true;
-
-							const int numColorEndpoints		= computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], numPartitions, blockParams.isMultiPartSingleCemMode);
-							const int numValuesInISEBlock	= endpointISEParams.mode == ISEMODE_TRIT ? 5 : endpointISEParams.mode == ISEMODE_QUINT ? 3 : 1;
-
-							{
-								const int				numColorEndpointValues	= (int)computeISERangeMax(endpointISEParams) + 1;
-								const int				numBlocks				= divRoundUp(numColorEndpointValues, numColorEndpoints);
-								NormalBlockISEInputs	iseInputs				= generateDefaultISEInputs(blockParams);
-								iseInputs.endpoint.isGivenInBlockForm = false;
-
-								for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
-								for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
-								{
-									for (int endpointNdx = 0; endpointNdx < numColorEndpoints; endpointNdx++)
-										iseInputs.endpoint.value.plain[endpointNdx] = (blockNdx*numColorEndpoints + endpointNdx + offset) % numColorEndpointValues;
-
-									generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-								}
-							}
-
-							if (endpointISEParams.mode == ISEMODE_TRIT || endpointISEParams.mode == ISEMODE_QUINT)
-							{
-								NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
-								iseInputs.endpoint.isGivenInBlockForm = true;
-
-								const int numTQValues			= 1 << (endpointISEParams.mode == ISEMODE_TRIT ? 8 : 7);
-								const int numISEBlocksPerBlock	= divRoundUp(numColorEndpoints, numValuesInISEBlock);
-								const int numBlocks				= divRoundUp(numTQValues, numISEBlocksPerBlock);
-
-								for (int offset = 0;	offset < numValuesInISEBlock;	offset++)
-								for (int blockNdx = 0;	blockNdx < numBlocks;			blockNdx++)
-								{
-									for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocksPerBlock; iseBlockNdx++)
-									{
-										for (int i = 0; i < numValuesInISEBlock; i++)
-											iseInputs.endpoint.value.block[iseBlockNdx].bitValues[i] = 0;
-										iseInputs.endpoint.value.block[iseBlockNdx].tOrQValue = (blockNdx*numISEBlocksPerBlock + iseBlockNdx + offset) % numTQValues;
-									}
-
-									generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-								}
-							}
-						}
-					}
-				}
-
-				DE_ASSERT(validCaseGenerated);
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_CCS:
-		// For all partition counts, test all values of the CCS (color component selector).
-		{
-			for (int		numPartitions = 1;		numPartitions <= 3;		numPartitions++)
-			for (deUint32	ccs = 0;				ccs < 4;				ccs++)
-			{
-				NormalBlockParams blockParams;
-				blockParams.weightGridWidth				= 3;
-				blockParams.weightGridHeight			= 3;
-				blockParams.weightISEParams				= ISEParams(ISEMODE_PLAIN_BIT, 2);
-				blockParams.isDualPlane					= true;
-				blockParams.ccs							= ccs;
-				blockParams.numPartitions				= numPartitions;
-				blockParams.isMultiPartSingleCemMode	= true;
-				blockParams.colorEndpointModes[0]		= 8;
-				blockParams.partitionSeed				= 634;
-
-				generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), generateDefaultISEInputs(blockParams)).pushBytesToVector(dst);
-			}
-
-			break;
-		}
-
-		case ASTCBLOCKTESTTYPE_RANDOM:
-		// Generate a number of random (but valid) blocks.
-		{
-			const int		numBlocks			= 16384;
-			de::Random		rnd					(1);
-			int				numBlocksGenerated	= 0;
-
-			dst.reserve(numBlocks*ASTC_BLOCK_SIZE_BYTES);
-
-			for (numBlocksGenerated = 0; numBlocksGenerated < numBlocks; numBlocksGenerated++)
-			{
-				if (rnd.getFloat() < 0.1f)
-				{
-					// Void extent block.
-					const bool		isVoidExtentHDR		= rnd.getBool();
-					const deUint16	r					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
-					const deUint16	g					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
-					const deUint16	b					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
-					const deUint16	a					= isVoidExtentHDR ? deFloat32To16(rnd.getFloat(0.0f, 1.0f)) : (deUint16)rnd.getInt(0, 0xffff);
-					generateVoidExtentBlock(VoidExtentParams(isVoidExtentHDR, r, g, b, a)).pushBytesToVector(dst);
-				}
-				else
-				{
-					// Not void extent block.
-
-					// Generate block params.
-
-					NormalBlockParams blockParams;
-
-					do
-					{
-						blockParams.weightGridWidth				= rnd.getInt(2, blockSize.x());
-						blockParams.weightGridHeight			= rnd.getInt(2, blockSize.y());
-						blockParams.weightISEParams				= weightISEParamsCandidates[rnd.getInt(0, DE_LENGTH_OF_ARRAY(weightISEParamsCandidates)-1)];
-						blockParams.numPartitions				= rnd.getInt(1, 4);
-						blockParams.isMultiPartSingleCemMode	= rnd.getFloat() < 0.25f;
-						blockParams.isDualPlane					= blockParams.numPartitions != 4 && rnd.getBool();
-						blockParams.ccs							= rnd.getInt(0, 3);
-						blockParams.partitionSeed				= rnd.getInt(0, 1023);
-
-						blockParams.colorEndpointModes[0] = rnd.getInt(0, 15);
-
-						{
-							const int cemDiff = blockParams.isMultiPartSingleCemMode		? 0
-												: blockParams.colorEndpointModes[0] == 0	? 1
-												: blockParams.colorEndpointModes[0] == 15	? -1
-												: rnd.getBool()								? 1 : -1;
-
-							for (int i = 1; i < blockParams.numPartitions; i++)
-								blockParams.colorEndpointModes[i] = blockParams.colorEndpointModes[0] + (cemDiff == -1 ? rnd.getInt(-1, 0) : cemDiff == 1 ? rnd.getInt(0, 1) : 0);
-						}
-					} while (!isValidBlockParams(blockParams, blockSize.x(), blockSize.y()));
-
-					// Generate ISE inputs for both weight and endpoint data.
-
-					NormalBlockISEInputs iseInputs;
-
-					for (int weightOrEndpoints = 0; weightOrEndpoints <= 1; weightOrEndpoints++)
-					{
-						const bool			setWeights	= weightOrEndpoints == 0;
-						const int			numValues	= setWeights ? computeNumWeights(blockParams) :
-														  computeNumColorEndpointValues(&blockParams.colorEndpointModes[0], blockParams.numPartitions, blockParams.isMultiPartSingleCemMode);
-						const ISEParams		iseParams	= setWeights ? blockParams.weightISEParams : computeMaximumRangeISEParams(computeNumBitsForColorEndpoints(blockParams), numValues);
-						ISEInput&			iseInput	= setWeights ? iseInputs.weight : iseInputs.endpoint;
-
-						iseInput.isGivenInBlockForm = rnd.getBool();
-
-						if (iseInput.isGivenInBlockForm)
-						{
-							const int numValuesPerISEBlock	= iseParams.mode == ISEMODE_TRIT	? 5
-															: iseParams.mode == ISEMODE_QUINT	? 3
-															:									  1;
-							const int iseBitMax				= (1 << iseParams.numBits) - 1;
-							const int numISEBlocks			= divRoundUp(numValues, numValuesPerISEBlock);
-
-							for (int iseBlockNdx = 0; iseBlockNdx < numISEBlocks; iseBlockNdx++)
-							{
-								iseInput.value.block[iseBlockNdx].tOrQValue = rnd.getInt(0, 255);
-								for (int i = 0; i < numValuesPerISEBlock; i++)
-									iseInput.value.block[iseBlockNdx].bitValues[i] = rnd.getInt(0, iseBitMax);
-							}
-						}
-						else
-						{
-							const int rangeMax = computeISERangeMax(iseParams);
-
-							for (int valueNdx = 0; valueNdx < numValues; valueNdx++)
-								iseInput.value.plain[valueNdx] = rnd.getInt(0, rangeMax);
-						}
-					}
-
-					generateNormalBlock(blockParams, blockSize.x(), blockSize.y(), iseInputs).pushBytesToVector(dst);
-				}
-			}
-
-			break;
-		}
-
-		default:
-			DE_ASSERT(false);
-	}
-}
-
 // Get a string describing the data of an ASTC block. Currently contains just hex and bin dumps of the block.
 static string astcBlockDataStr (const deUint8* data)
 {
@@ -1484,7 +96,7 @@
 	{
 		static const char* const hexDigits = "0123456789ABCDEF";
 
-		for (int i = ASTC_BLOCK_SIZE_BYTES-1; i >= 0; i--)
+		for (int i = tcu::astc::BLOCK_SIZE_BYTES-1; i >= 0; i--)
 		{
 			if ((i+1) % 2 == 0)
 				result += "\n    ";
@@ -1499,7 +111,7 @@
 
 	result += "\n\n  Binary (big endian: upper left bit is block bit 127):";
 
-	for (int i = ASTC_BLOCK_SIZE_BYTES-1; i >= 0; i--)
+	for (int i = tcu::astc::BLOCK_SIZE_BYTES-1; i >= 0; i--)
 	{
 		if ((i+1) % 2 == 0)
 			result += "\n    ";
@@ -1754,11 +366,11 @@
 	glu::readPixels(renderCtx, viewport.x, viewport.y, resultDst.getAccess());
 }
 
-ASTCBlockCase2D::ASTCBlockCase2D (Context&					context,
-								  const char*				name,
-								  const char*				description,
-								  ASTCBlockTestType			testType,
-								  CompressedTexFormat		format)
+ASTCBlockCase2D::ASTCBlockCase2D (Context&				context,
+								  const char*			name,
+								  const char*			description,
+								  BlockTestType			testType,
+								  CompressedTexFormat	format)
 	: TestCase				(context, name, description)
 	, m_testType			(testType)
 	, m_format				(format)
@@ -1766,7 +378,7 @@
 	, m_currentIteration	(0)
 	, m_renderer			(new ASTCRenderer2D(context, format, deStringHash(getName())))
 {
-	DE_ASSERT(!(tcu::isAstcSRGBFormat(m_format) && isBlockTestTypeHDROnly(m_testType))); // \note There is no HDR sRGB mode, so these would be redundant.
+	DE_ASSERT(!(tcu::isAstcSRGBFormat(m_format) && tcu::astc::isBlockTestTypeHDROnly(m_testType))); // \note There is no HDR sRGB mode, so these would be redundant.
 }
 
 ASTCBlockCase2D::~ASTCBlockCase2D (void)
@@ -1776,13 +388,13 @@
 
 void ASTCBlockCase2D::init (void)
 {
-	m_renderer->initialize(64, 64, getBlockTestTypeColorScale(m_testType), getBlockTestTypeColorBias(m_testType));
+	m_renderer->initialize(64, 64, tcu::astc::getBlockTestTypeColorScale(m_testType), tcu::astc::getBlockTestTypeColorBias(m_testType));
 
 	generateBlockCaseTestData(m_blockData, m_format, m_testType);
 	DE_ASSERT(!m_blockData.empty());
-	DE_ASSERT(m_blockData.size() % ASTC_BLOCK_SIZE_BYTES == 0);
+	DE_ASSERT(m_blockData.size() % tcu::astc::BLOCK_SIZE_BYTES == 0);
 
-	m_testCtx.getLog() << TestLog::Message << "Total " << m_blockData.size() / ASTC_BLOCK_SIZE_BYTES << " blocks to test" << TestLog::EndMessage
+	m_testCtx.getLog() << TestLog::Message << "Total " << m_blockData.size() / tcu::astc::BLOCK_SIZE_BYTES << " blocks to test" << TestLog::EndMessage
 					   << TestLog::Message << "Note: Legitimate ASTC error pixels will be ignored when comparing to reference" << TestLog::EndMessage;
 }
 
@@ -1796,7 +408,7 @@
 {
 	TestLog&						log						= m_testCtx.getLog();
 
-	if (m_renderer->getASTCSupport() == ASTCSUPPORTLEVEL_LDR && isBlockTestTypeHDROnly(m_testType))
+	if (m_renderer->getASTCSupport() == ASTCSUPPORTLEVEL_LDR && tcu::astc::isBlockTestTypeHDROnly(m_testType))
 	{
 		log << TestLog::Message << "Passing the case immediately, since only LDR support was detected and test only contains HDR blocks" << TestLog::EndMessage;
 		m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
@@ -1804,7 +416,7 @@
 	}
 
 	const IVec2						blockSize				= m_renderer->getBlockSize();
-	const int						totalNumBlocks			= (int)m_blockData.size() / ASTC_BLOCK_SIZE_BYTES;
+	const int						totalNumBlocks			= (int)m_blockData.size() / tcu::astc::BLOCK_SIZE_BYTES;
 	const int						numXBlocksPerImage		= de::min(m_context.getRenderTarget().getWidth(),  512) / blockSize.x();
 	const int						numYBlocksPerImage		= de::min(m_context.getRenderTarget().getHeight(), 512) / blockSize.y();
 	const int						numBlocksPerImage		= numXBlocksPerImage * numYBlocksPerImage;
@@ -1826,10 +438,10 @@
 			<< TestLog::EndMessage;
 	}
 
-	DE_ASSERT(compressed.getDataSize() == numBlocksPerImage*ASTC_BLOCK_SIZE_BYTES);
-	deMemcpy(compressed.getData(), &m_blockData[m_numBlocksTested*ASTC_BLOCK_SIZE_BYTES], curNumNonDummyBlocks*ASTC_BLOCK_SIZE_BYTES);
+	DE_ASSERT(compressed.getDataSize() == numBlocksPerImage*tcu::astc::BLOCK_SIZE_BYTES);
+	deMemcpy(compressed.getData(), &m_blockData[m_numBlocksTested*tcu::astc::BLOCK_SIZE_BYTES], curNumNonDummyBlocks*tcu::astc::BLOCK_SIZE_BYTES);
 	if (curNumDummyBlocks > 1)
-		generateDummyBlocks((deUint8*)compressed.getData() + curNumNonDummyBlocks*ASTC_BLOCK_SIZE_BYTES, curNumDummyBlocks);
+		tcu::astc::generateDummyVoidExtentBlocks((deUint8*)compressed.getData() + curNumNonDummyBlocks*tcu::astc::BLOCK_SIZE_BYTES, curNumDummyBlocks);
 
 	// Create texture and render.
 
@@ -1873,7 +485,7 @@
 					DE_ASSERT(blockNdx < totalNumBlocks);
 
 					log << TestLog::Message << "First failed block at column " << firstFailedBlockCoord.x() << " and row " << firstFailedBlockCoord.y() << TestLog::EndMessage
-						<< TestLog::Message << "Data of first failed block:\n" << astcBlockDataStr(&m_blockData[blockNdx*ASTC_BLOCK_SIZE_BYTES]) << TestLog::EndMessage;
+						<< TestLog::Message << "Data of first failed block:\n" << astcBlockDataStr(&m_blockData[blockNdx*tcu::astc::BLOCK_SIZE_BYTES]) << TestLog::EndMessage;
 
 					m_testCtx.setTestResult(QP_TEST_RESULT_FAIL, "Image comparison failed");
 					return STOP;
@@ -1904,15 +516,7 @@
 	return CONTINUE;
 }
 
-// Generate a number of trivial dummy blocks to fill unneeded space in a texture.
-void ASTCBlockCase2D::generateDummyBlocks (deUint8* dst, int num)
-{
-	using namespace ASTCBlockGeneratorInternal;
 
-	AssignBlock128 block = generateVoidExtentBlock(VoidExtentParams(false, 0, 0, 0, 0));
-	for (int i = 0; i < num; i++)
-		block.assignToMemory(&dst[i * ASTC_BLOCK_SIZE_BYTES]);
-}
 
 ASTCBlockSizeRemainderCase2D::ASTCBlockSizeRemainderCase2D (Context&			context,
 															const char*			name,
@@ -1949,15 +553,15 @@
 	const int						curRemainderY			= m_currentIteration / blockSize.x();
 	const int						imageWidth				= (MAX_NUM_BLOCKS_X-1)*blockSize.x() + curRemainderX;
 	const int						imageHeight				= (MAX_NUM_BLOCKS_Y-1)*blockSize.y() + curRemainderY;
-	const int						numBlocksX				= divRoundUp(imageWidth, blockSize.x());
-	const int						numBlocksY				= divRoundUp(imageHeight, blockSize.y());
+	const int						numBlocksX				= deDivRoundUp32(imageWidth, blockSize.x());
+	const int						numBlocksY				= deDivRoundUp32(imageHeight, blockSize.y());
 	const int						totalNumBlocks			= numBlocksX * numBlocksY;
 	const glu::RenderContext&		renderCtx				= m_context.getRenderContext();
 	const tcu::RGBA					threshold				= renderCtx.getRenderTarget().getPixelFormat().getColorThreshold() + (tcu::isAstcSRGBFormat(m_format) ? tcu::RGBA(2,2,2,2) : tcu::RGBA(1,1,1,1));
 	tcu::CompressedTexture			compressed				(m_format, imageWidth, imageHeight);
 
-	DE_ASSERT(compressed.getDataSize() == totalNumBlocks*ASTC_BLOCK_SIZE_BYTES);
-	generateDefaultBlockData((deUint8*)compressed.getData(), totalNumBlocks, blockSize.x(), blockSize.y());
+	DE_ASSERT(compressed.getDataSize() == totalNumBlocks*tcu::astc::BLOCK_SIZE_BYTES);
+	tcu::astc::generateDummyNormalBlocks((deUint8*)compressed.getData(), totalNumBlocks, blockSize.x(), blockSize.y());
 
 	// Create texture and render.
 
@@ -2005,85 +609,6 @@
 	return CONTINUE;
 }
 
-void ASTCBlockSizeRemainderCase2D::generateDefaultBlockData (deUint8* dst, int numBlocks, int blockWidth, int blockHeight)
-{
-	using namespace ASTCBlockGeneratorInternal;
-
-	NormalBlockParams blockParams;
-
-	blockParams.weightGridWidth			= 3;
-	blockParams.weightGridHeight		= 3;
-	blockParams.weightISEParams			= ISEParams(ISEMODE_PLAIN_BIT, 5);
-	blockParams.isDualPlane				= false;
-	blockParams.numPartitions			= 1;
-	blockParams.colorEndpointModes[0]	= 8;
-
-	NormalBlockISEInputs iseInputs = generateDefaultISEInputs(blockParams);
-	iseInputs.weight.isGivenInBlockForm = false;
-
-	const int numWeights		= computeNumWeights(blockParams);
-	const int weightRangeMax	= computeISERangeMax(blockParams.weightISEParams);
-
-	for (int blockNdx = 0; blockNdx < numBlocks; blockNdx++)
-	{
-		for (int weightNdx = 0; weightNdx < numWeights; weightNdx++)
-			iseInputs.weight.value.plain[weightNdx] = (blockNdx*numWeights + weightNdx) * weightRangeMax / (numBlocks*numWeights-1);
-
-		generateNormalBlock(blockParams, blockWidth, blockHeight, iseInputs).assignToMemory(dst + blockNdx*ASTC_BLOCK_SIZE_BYTES);
-	}
-}
-
-const char* getBlockTestTypeName (ASTCBlockTestType testType)
-{
-	switch (testType)
-	{
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_LDR:				return "void_extent_ldr";
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR:				return "void_extent_hdr";
-		case ASTCBLOCKTESTTYPE_WEIGHT_GRID:					return "weight_grid";
-		case ASTCBLOCKTESTTYPE_WEIGHT_ISE:					return "weight_ise";
-		case ASTCBLOCKTESTTYPE_CEMS:						return "color_endpoint_modes";
-		case ASTCBLOCKTESTTYPE_PARTITION_SEED:				return "partition_pattern_index";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_LDR:			return "endpoint_value_ldr";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15:	return "endpoint_value_hdr_cem_not_15";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15:		return "endpoint_value_hdr_cem_15";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_ISE:				return "endpoint_ise";
-		case ASTCBLOCKTESTTYPE_CCS:							return "color_component_selector";
-		case ASTCBLOCKTESTTYPE_RANDOM:						return "random";
-		default:
-			DE_ASSERT(false);
-			return DE_NULL;
-	}
-}
-
-const char* getBlockTestTypeDescription (ASTCBlockTestType testType)
-{
-	switch (testType)
-	{
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_LDR:				return "Test void extent block, LDR mode";
-		case ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR:				return "Test void extent block, HDR mode";
-		case ASTCBLOCKTESTTYPE_WEIGHT_GRID:					return "Test combinations of plane count, weight integer sequence encoding parameters, and weight grid size";
-		case ASTCBLOCKTESTTYPE_WEIGHT_ISE:					return "Test different integer sequence encoding block values for weight grid";
-		case ASTCBLOCKTESTTYPE_CEMS:						return "Test different color endpoint mode combinations, combined with different plane and partition counts";
-		case ASTCBLOCKTESTTYPE_PARTITION_SEED:				return "Test different partition pattern indices";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_LDR:			return "Test various combinations of each pair of color endpoint values, for each LDR color endpoint mode";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15:	return "Test various combinations of each pair of color endpoint values, for each HDR color endpoint mode other than mode 15";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15:		return "Test various combinations of each pair of color endpoint values, HDR color endpoint mode 15";
-		case ASTCBLOCKTESTTYPE_ENDPOINT_ISE:				return "Test different integer sequence encoding block values for color endpoints";
-		case ASTCBLOCKTESTTYPE_CCS:							return "Test color component selector, for different partition counts";
-		case ASTCBLOCKTESTTYPE_RANDOM:						return "Random block test";
-		default:
-			DE_ASSERT(false);
-			return DE_NULL;
-	}
-}
-
-bool isBlockTestTypeHDROnly (ASTCBlockTestType testType)
-{
-	return testType == ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR			||
-		   testType == ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15	||
-		   testType == ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15;
-}
-
 } // Functional
 } // gles3
 } // deqp
diff --git a/modules/gles3/functional/es3fASTCDecompressionCases.hpp b/modules/gles3/functional/es3fASTCDecompressionCases.hpp
index 9d7e480..e331075 100644
--- a/modules/gles3/functional/es3fASTCDecompressionCases.hpp
+++ b/modules/gles3/functional/es3fASTCDecompressionCases.hpp
@@ -26,6 +26,7 @@
 #include "deDefs.h"
 #include "tes3TestCase.hpp"
 #include "tcuCompressedTexture.hpp"
+#include "tcuAstcUtil.hpp"
 #include "deUniquePtr.hpp"
 
 #include <vector>
@@ -44,24 +45,6 @@
 
 }
 
-enum ASTCBlockTestType
-{
-	ASTCBLOCKTESTTYPE_VOID_EXTENT_LDR = 0,
-	ASTCBLOCKTESTTYPE_VOID_EXTENT_HDR,
-	ASTCBLOCKTESTTYPE_WEIGHT_GRID,
-	ASTCBLOCKTESTTYPE_WEIGHT_ISE,
-	ASTCBLOCKTESTTYPE_CEMS,
-	ASTCBLOCKTESTTYPE_PARTITION_SEED,
-	ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_LDR,
-	ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_NO_15,
-	ASTCBLOCKTESTTYPE_ENDPOINT_VALUE_HDR_15,
-	ASTCBLOCKTESTTYPE_ENDPOINT_ISE,
-	ASTCBLOCKTESTTYPE_CCS,
-	ASTCBLOCKTESTTYPE_RANDOM,
-
-	ASTCBLOCKTESTTYPE_LAST
-};
-
 // General ASTC block test class.
 class ASTCBlockCase2D : public TestCase
 {
@@ -69,7 +52,7 @@
 																	ASTCBlockCase2D			(Context&						context,
 																							 const char*					name,
 																							 const char*					description,
-																							 ASTCBlockTestType				testType,
+																							 tcu::astc::BlockTestType		testType,
 																							 tcu::CompressedTexFormat		format);
 																	~ASTCBlockCase2D		(void);
 
@@ -78,12 +61,10 @@
 	IterateResult													iterate					(void);
 
 private:
-	static void														generateDummyBlocks		(deUint8* dst, int num);
-
 																	ASTCBlockCase2D			(const ASTCBlockCase2D& other);
 	ASTCBlockCase2D&												operator=				(const ASTCBlockCase2D& other);
 
-	const ASTCBlockTestType											m_testType;
+	const tcu::astc::BlockTestType									m_testType;
 	const tcu::CompressedTexFormat									m_format;
 	std::vector<deUint8>											m_blockData;
 
@@ -114,8 +95,6 @@
 		MAX_NUM_BLOCKS_Y = 5
 	};
 
-	static void														generateDefaultBlockData		(deUint8* dst, int numBlocks, int blockWidth, int blockHeight);
-
 																	ASTCBlockSizeRemainderCase2D	(const ASTCBlockSizeRemainderCase2D& other);
 	ASTCBlockSizeRemainderCase2D&									operator=						(const ASTCBlockSizeRemainderCase2D& other);
 
@@ -126,10 +105,6 @@
 	de::UniquePtr<ASTCDecompressionCaseInternal::ASTCRenderer2D>	m_renderer;
 };
 
-const char*		getBlockTestTypeName			(ASTCBlockTestType testType);
-const char*		getBlockTestTypeDescription		(ASTCBlockTestType testType);
-bool			isBlockTestTypeHDROnly			(ASTCBlockTestType testType);
-
 } // Functional
 } // gles3
 } // deqp
diff --git a/modules/gles3/functional/es3fCompressedTextureTests.cpp b/modules/gles3/functional/es3fCompressedTextureTests.cpp
index f8e6297..878428d 100644
--- a/modules/gles3/functional/es3fCompressedTextureTests.cpp
+++ b/modules/gles3/functional/es3fCompressedTextureTests.cpp
@@ -70,10 +70,10 @@
 
 		// Block test cases.
 
-		for (int astcTestTypeI = 0; astcTestTypeI < ASTCBLOCKTESTTYPE_LAST; astcTestTypeI++)
+		for (int astcTestTypeI = 0; astcTestTypeI < tcu::astc::BLOCK_TEST_TYPE_LAST; astcTestTypeI++)
 		{
-			const ASTCBlockTestType		astcTestType	= (ASTCBlockTestType)astcTestTypeI;
-			TestCaseGroup* const		testTypeGroup	= new TestCaseGroup(m_context, getBlockTestTypeName(astcTestType), getBlockTestTypeDescription(astcTestType));
+			const tcu::astc::BlockTestType	astcTestType	= (tcu::astc::BlockTestType)astcTestTypeI;
+			TestCaseGroup* const			testTypeGroup	= new TestCaseGroup(m_context, getBlockTestTypeName(astcTestType), getBlockTestTypeDescription(astcTestType));
 			astcGroup->addChild(testTypeGroup);
 
 			for (int formatI = 0; formatI < tcu::COMPRESSEDTEXFORMAT_LAST; formatI++)
@@ -82,7 +82,7 @@
 
 				if (!tcu::isAstcFormat(format))
 					continue;
-				if (tcu::isAstcSRGBFormat(format) && isBlockTestTypeHDROnly(astcTestType))
+				if (tcu::isAstcSRGBFormat(format) && tcu::astc::isBlockTestTypeHDROnly(astcTestType))
 					continue;
 
 				testTypeGroup->addChild(new ASTCBlockCase2D(m_context, getASTCFormatShortName(format).c_str(), glu::getCompressedTextureFormatName(glu::getGLFormat(format)), astcTestType, format));
diff --git a/modules/gles3/functional/es3fDitheringTests.cpp b/modules/gles3/functional/es3fDitheringTests.cpp
index 350b734..ad02947 100644
--- a/modules/gles3/functional/es3fDitheringTests.cpp
+++ b/modules/gles3/functional/es3fDitheringTests.cpp
@@ -315,8 +315,9 @@
 
 	if (!m_ditheringEnabled)
 	{
-		const int increasingDirectionSize	= isVerticallyIncreasing ? renderedImg.getHeight() : renderedImg.getWidth();
-		const int constantDirectionSize		= isVerticallyIncreasing ? renderedImg.getWidth() : renderedImg.getHeight();
+		const int	increasingDirectionSize	= isVerticallyIncreasing ? renderedImg.getHeight() : renderedImg.getWidth();
+		const int	constantDirectionSize	= isVerticallyIncreasing ? renderedImg.getWidth() : renderedImg.getHeight();
+		bool		colorHasChanged			= false;
 
 		for (int incrPos = 0; incrPos < increasingDirectionSize; incrPos++)
 		{
@@ -329,13 +330,18 @@
 
 				if (constPos > 0 && clr != prevConstantDirectionPix)
 				{
-					log << TestLog::Message
-						<< "Failure: colors should be constant per " << (isVerticallyIncreasing ? "row" : "column")
-						<< " (since dithering is disabled), but the color at position (" << x << ", " << y << ") is " << clr
-						<< " and does not equal the color at (" << (isVerticallyIncreasing ? x-1 : x) << ", " << (isVerticallyIncreasing ? y : y-1) << "), which is " << prevConstantDirectionPix
-						<< TestLog::EndMessage;
+					if (colorHasChanged)
+					{
+						log << TestLog::Message
+							<< "Failure: colors should be constant per " << (isVerticallyIncreasing ? "row" : "column")
+							<< " (since dithering is disabled), but the color at position (" << x << ", " << y << ") is " << clr
+							<< " and does not equal the color at (" << (isVerticallyIncreasing ? x-1 : x) << ", " << (isVerticallyIncreasing ? y : y-1) << "), which is " << prevConstantDirectionPix
+							<< TestLog::EndMessage;
 
-					return false;
+						return false;
+					}
+					else
+						colorHasChanged = true;
 				}
 
 				prevConstantDirectionPix = clr;
diff --git a/modules/gles3/functional/es3fMultisampleTests.cpp b/modules/gles3/functional/es3fMultisampleTests.cpp
index 7b751d5..6e174ce 100644
--- a/modules/gles3/functional/es3fMultisampleTests.cpp
+++ b/modules/gles3/functional/es3fMultisampleTests.cpp
@@ -219,6 +219,8 @@
 	void				randomizeViewport		(void);
 	void				readImage				(tcu::Surface& dst) const;
 
+	IVec2				getRenderTargetSize		(void) const 				{ return IVec2(m_renderWidth, m_renderHeight); }
+
 	int					m_numSamples;
 
 	int					m_viewportSize;
@@ -627,8 +629,18 @@
 
 		if (m_currentIteration >= m_maxNumIterations)
 		{
+			const IVec2 targetSize 			= getRenderTargetSize();
+			const int 	detectedNumSamples 	= (int)m_detectedColors.size() - 1; // One color is the background
+
 			log << TestLog::Message << "Failure: Number of distinct colors detected is lower than sample count+1" << TestLog::EndMessage;
-			m_context.getTestContext().setTestResult(QP_TEST_RESULT_FAIL, "Failed");
+
+			// For high resolution render targets the lack of samples is not likely detected by a human
+			// and for GLES 3.0 the application cannot observe the sample count directly. So, it only
+			// warrants a quality warning.
+			if ((targetSize.x() >= 2048 || targetSize.y() >= 2048) && (detectedNumSamples >= (m_numSamples/2)))
+				m_context.getTestContext().setTestResult(QP_TEST_RESULT_QUALITY_WARNING, "Measured sample count below the advertised count");
+			else
+				m_context.getTestContext().setTestResult(QP_TEST_RESULT_FAIL, "Failed");
 			return STOP;
 		}
 		else
diff --git a/modules/gles3/functional/es3fShaderLoopTests.cpp b/modules/gles3/functional/es3fShaderLoopTests.cpp
index d76bf6a..ff1e09b 100644
--- a/modules/gles3/functional/es3fShaderLoopTests.cpp
+++ b/modules/gles3/functional/es3fShaderLoopTests.cpp
@@ -30,6 +30,7 @@
  *//*--------------------------------------------------------------------*/
 
 #include "es3fShaderLoopTests.hpp"
+#include "glsShaderLibrary.hpp"
 #include "glsShaderRenderCase.hpp"
 #include "gluShaderUtil.hpp"
 #include "tcuStringTemplate.hpp"
@@ -1212,6 +1213,11 @@
 			}
 		}
 	}
+
+	// Additional smaller handwritten tests.
+	const std::vector<tcu::TestNode*> children = gls::ShaderLibrary(m_context.getTestContext(), m_context.getRenderContext(), m_context.getContextInfo()).loadShaderFile("shaders/loops.test");
+	for (int i = 0; i < (int)children.size(); i++)
+		addChild(children[i]);
 }
 
 } // Functional
diff --git a/modules/gles3/performance/es3pBufferDataUploadTests.cpp b/modules/gles3/performance/es3pBufferDataUploadTests.cpp
index cc82fd5..bcd79a5 100644
--- a/modules/gles3/performance/es3pBufferDataUploadTests.cpp
+++ b/modules/gles3/performance/es3pBufferDataUploadTests.cpp
@@ -2334,7 +2334,7 @@
 		m_dummyBufferID = 0;
 	}
 
-	m_zeroData.clear();
+	m_zeroData = std::vector<deUint8>();
 
 	BasicBufferCase<SampleType>::deinit();
 }
@@ -2534,7 +2534,7 @@
 
 void ReferenceMemcpyCase::deinit (void)
 {
-	m_dstBuf.clear();
+	m_dstBuf = std::vector<deUint8>();
 	BasicUploadCase<SingleOperationDuration>::deinit();
 }
 
@@ -3246,7 +3246,7 @@
 template <typename SampleType>
 void ModifyAfterBasicCase<SampleType>::deinit (void)
 {
-	m_zeroData.clear();
+	m_zeroData = std::vector<deUint8>();
 
 	BasicBufferCase<SampleType>::deinit();
 }
diff --git a/modules/gles31/functional/es31fCopyImageTests.cpp b/modules/gles31/functional/es31fCopyImageTests.cpp
index c55483b..8407d3a 100644
--- a/modules/gles31/functional/es31fCopyImageTests.cpp
+++ b/modules/gles31/functional/es31fCopyImageTests.cpp
@@ -207,6 +207,18 @@
 		return tcu::getTextureChannelClass(glu::mapGLInternalFormat(format).type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER;
 }
 
+bool isFixedPointFormat (deUint32 format)
+{
+	if (glu::isCompressedFormat(format))
+		return false;
+	else
+	{
+		const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(glu::mapGLInternalFormat(format).type);
+
+		return channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_FIXED_POINT || channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_FIXED_POINT;
+	}
+}
+
 bool isTextureTarget (deUint32 target)
 {
 	return target != GL_RENDERBUFFER;
@@ -652,7 +664,21 @@
 	{
 		vector<deUint8> texelBlock(format.getPixelSize());
 
-		genTexel(rng, moreRestrictiveFormat, format.getPixelSize(), 1, &(texelBlock[0]));
+		if (isFixedPointFormat(info.getFormat()))
+		{
+			// All zeroes is only bit pattern that fixed point values can be
+			// cleared to and that is valid floating point value.
+			if (isFloatFormat(moreRestrictiveFormat))
+				deMemset(&texelBlock[0], 0x0, texelBlock.size());
+			else
+			{
+				// Fixed point values can be only cleared to all 0 or 1.
+				const deInt32 fill = rng.getBool() ? 0xFF : 0x0;
+				deMemset(&texelBlock[0], fill, texelBlock.size());
+			}
+		}
+		else
+			genTexel(rng, moreRestrictiveFormat, format.getPixelSize(), 1, &(texelBlock[0]));
 
 		{
 			const tcu::ConstPixelBufferAccess texelAccess (format, 1, 1, 1, &(texelBlock[0]));
@@ -709,8 +735,6 @@
 	GLU_EXPECT_NO_ERROR(gl.getError(), "Failed to unbind renderbufer and framebuffer.");
 }
 
-
-
 void genImage (const glw::Functions&			gl,
 			   de::Random&						rng,
 			   deUint32							name,
diff --git a/modules/gles31/functional/es31fDebugTests.cpp b/modules/gles31/functional/es31fDebugTests.cpp
index 9f85e00..f9c87d4 100644
--- a/modules/gles31/functional/es31fDebugTests.cpp
+++ b/modules/gles31/functional/es31fDebugTests.cpp
@@ -1442,6 +1442,13 @@
 	const int				maxWait		= 10000; // ms
 	const int				warnWait	= 100;
 
+	// Clear log from earlier messages
+	{
+		GLint numMessages = 0;
+		gl.getIntegerv(GL_DEBUG_LOGGED_MESSAGES, &numMessages);
+		gl.getDebugMessageLog(numMessages, 0, DE_NULL, DE_NULL, DE_NULL, DE_NULL, DE_NULL, DE_NULL);
+	}
+
 	gl.enable(GL_DEBUG_OUTPUT);
 	gl.enable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
 	gl.debugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, DE_NULL, false);
diff --git a/modules/gles31/functional/es31fFunctionalTests.cpp b/modules/gles31/functional/es31fFunctionalTests.cpp
index 0bfbcb6..784e70c 100644
--- a/modules/gles31/functional/es31fFunctionalTests.cpp
+++ b/modules/gles31/functional/es31fFunctionalTests.cpp
@@ -172,6 +172,7 @@
 		addChild(new ShaderLibraryTest(m_context, "linkage_tessellation_geometry.test", "tessellation_geometry", "Tessellation and geometry shader"));
 		addChild(new ShaderLibraryTest(m_context, "linkage_shader_storage_block.test", "shader_storage_block", "Shader storage blocks"));
 		addChild(new ShaderLibraryTest(m_context, "linkage_io_block.test", "io_block", "Shader io blocks"));
+		addChild(new ShaderLibraryTest(m_context, "linkage_uniform.test", "uniform", "Uniform linkage"));
 	}
 };
 
diff --git a/modules/gles31/functional/es31fGeometryShaderTests.cpp b/modules/gles31/functional/es31fGeometryShaderTests.cpp
index fe55d52..88e1625 100644
--- a/modules/gles31/functional/es31fGeometryShaderTests.cpp
+++ b/modules/gles31/functional/es31fGeometryShaderTests.cpp
@@ -5956,7 +5956,7 @@
 	{
 		// limits with a corresponding glsl constant
 		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_input_components",				"", GL_MAX_GEOMETRY_INPUT_COMPONENTS,				"MaxGeometryInputComponents",		64));
-		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_output_components",				"", GL_MAX_GEOMETRY_OUTPUT_COMPONENTS,				"MaxGeometryOutputComponents",		128));
+		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_output_components",				"", GL_MAX_GEOMETRY_OUTPUT_COMPONENTS,				"MaxGeometryOutputComponents",		64));
 		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_image_uniforms",					"", GL_MAX_GEOMETRY_IMAGE_UNIFORMS,					"MaxGeometryImageUniforms",			0));
 		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_texture_image_units",			"", GL_MAX_GEOMETRY_TEXTURE_IMAGE_UNITS,			"MaxGeometryTextureImageUnits",		16));
 		queryGroup->addChild(new GeometryProgramLimitCase(m_context, "max_geometry_output_vertices",				"", GL_MAX_GEOMETRY_OUTPUT_VERTICES,				"MaxGeometryOutputVertices",		256));
diff --git a/modules/gles31/functional/es31fLayoutBindingTests.cpp b/modules/gles31/functional/es31fLayoutBindingTests.cpp
index 9c6b557..f925deb 100644
--- a/modules/gles31/functional/es31fLayoutBindingTests.cpp
+++ b/modules/gles31/functional/es31fLayoutBindingTests.cpp
@@ -201,8 +201,8 @@
 public:
 	enum
 	{
-		TEST_RENDER_WIDTH	= 256,
-		TEST_RENDER_HEIGHT	= 256,
+		MAX_TEST_RENDER_WIDTH	= 256,
+		MAX_TEST_RENDER_HEIGHT	= 256,
 		TEST_TEXTURE_SIZE	= 1,
 	};
 
@@ -221,6 +221,8 @@
 	virtual void 						init							(void);
 	virtual void 						deinit							(void);
 
+	int									getRenderWidth					(void) const { return de::min((int)MAX_TEST_RENDER_WIDTH, m_context.getRenderTarget().getWidth()); }
+	int									getRenderHeight					(void) const { return de::min((int)MAX_TEST_RENDER_HEIGHT, m_context.getRenderTarget().getHeight()); }
 protected:
 	virtual glu::ShaderProgram*			generateShaders					(void) const = 0;
 
@@ -519,7 +521,7 @@
 	const glw::Functions& gl = m_context.getRenderContext().getFunctions();
 
 	gl.useProgram(m_program->getProgram());
-	gl.viewport(0, 0, TEST_RENDER_WIDTH, TEST_RENDER_HEIGHT);
+	gl.viewport(0, 0, getRenderWidth(), getRenderHeight());
 	gl.clearColor(0.0f, 0.0f, 0.0f, 1.0f);
 	GLU_EXPECT_NO_ERROR(gl.getError(), "Failed to set render state");
 }
@@ -527,7 +529,7 @@
 bool LayoutBindingRenderCase::drawAndVerifyResult (const Vec4& expectedColor)
 {
 	const glw::Functions&	gl					= m_context.getRenderContext().getFunctions();
-	tcu::Surface			reference			(TEST_RENDER_WIDTH, TEST_RENDER_HEIGHT);
+	tcu::Surface			reference			(getRenderWidth(), getRenderHeight());
 
 	// the point of these test is to check layout_binding. For this purpose, we can use quite
 	// large thresholds.
@@ -544,7 +546,7 @@
 	GLU_EXPECT_NO_ERROR(gl.getError(), "Drawing failed");
 
 	// Verify
-	tcu::Surface result(TEST_RENDER_WIDTH, TEST_RENDER_HEIGHT);
+	tcu::Surface result(getRenderWidth(), getRenderHeight());
 	m_testCtx.getLog() << TestLog::Message << "Reading pixels" << TestLog::EndMessage;
 	glu::readPixels(m_context.getRenderContext(), 0, 0, result.getAccess());
 	GLU_EXPECT_NO_ERROR(gl.getError(), "Read pixels failed");
diff --git a/modules/gles31/functional/es31fOpaqueTypeIndexingTests.cpp b/modules/gles31/functional/es31fOpaqueTypeIndexingTests.cpp
index e31091b..88c4c3b 100644
--- a/modules/gles31/functional/es31fOpaqueTypeIndexingTests.cpp
+++ b/modules/gles31/functional/es31fOpaqueTypeIndexingTests.cpp
@@ -705,7 +705,7 @@
 		gl.getIntegerv(limitPnames[m_shaderType], &maxBlocks);
 		GLU_EXPECT_NO_ERROR(gl.getError(), "glGetIntegerv()");
 
-		if (maxBlocks < m_numInstances)
+		if (maxBlocks < 2 + m_numInstances)
 			throw tcu::NotSupportedError("Not enough shader storage blocks supported for shader type");
 	}
 }
diff --git a/modules/gles31/functional/es31fPrimitiveBoundingBoxTests.cpp b/modules/gles31/functional/es31fPrimitiveBoundingBoxTests.cpp
index dd6fca0..c1fa0af 100644
--- a/modules/gles31/functional/es31fPrimitiveBoundingBoxTests.cpp
+++ b/modules/gles31/functional/es31fPrimitiveBoundingBoxTests.cpp
@@ -4461,6 +4461,9 @@
 	if (!m_context.getContextInfo().isExtensionSupported("GL_EXT_primitive_bounding_box"))
 		throw tcu::NotSupportedError("Test requires GL_EXT_primitive_bounding_box extension");
 
+	if (!m_context.getContextInfo().isExtensionSupported("GL_EXT_tessellation_shader"))
+		throw tcu::NotSupportedError("Test requires GL_EXT_tessellation_shader extension");
+
 	m_testCtx.getLog()
 		<< tcu::TestLog::Message
 		<< "Testing call order of state setting functions have no effect on the rendering.\n"
diff --git a/modules/gles31/functional/es31fSSBOLayoutCase.cpp b/modules/gles31/functional/es31fSSBOLayoutCase.cpp
index b265614..226c9eb 100644
--- a/modules/gles31/functional/es31fSSBOLayoutCase.cpp
+++ b/modules/gles31/functional/es31fSSBOLayoutCase.cpp
@@ -1641,7 +1641,7 @@
 			const float		refVal		= *((const float*)ref + ndx);
 			const float		resVal		= *((const float*)res + ndx);
 
-			if (deFloatAbs(resVal - refVal) >= threshold)
+			if (!(deFloatAbs(resVal - refVal) <= threshold))
 				return false;
 		}
 	}
diff --git a/modules/gles31/functional/es31fShaderHelperInvocationTests.cpp b/modules/gles31/functional/es31fShaderHelperInvocationTests.cpp
index 125496b..a173d51 100644
--- a/modules/gles31/functional/es31fShaderHelperInvocationTests.cpp
+++ b/modules/gles31/functional/es31fShaderHelperInvocationTests.cpp
@@ -542,24 +542,16 @@
 
 static bool hasNeighborWithColor (const tcu::Surface& surface, int x, int y, tcu::RGBA color, tcu::RGBA threshold)
 {
-	static const IVec2 s_neighbors[] =
-	{
-		IVec2(-1, -1),
-		IVec2( 0, -1),
-		IVec2(+1, -1),
-		IVec2(-1,  0),
-		IVec2(+1,  0),
-		IVec2(-1, +1),
-		IVec2( 0, +1),
-		IVec2(+1, +1)
-	};
-
 	const int	w	= surface.getWidth();
 	const int	h	= surface.getHeight();
 
-	for (int sample = 0; sample < DE_LENGTH_OF_ARRAY(s_neighbors); sample++)
+	for (int dx = -1; dx < 2; dx++)
+	for (int dy = -1; dy < 2; dy++)
 	{
-		const IVec2	pos	= IVec2(x, y) + s_neighbors[sample];
+		const IVec2	pos	= IVec2(x + dx, y + dy);
+
+		if (dx == 0 && dy == 0)
+			continue;
 
 		if (de::inBounds(pos.x(), 0, w) && de::inBounds(pos.y(), 0, h))
 		{
@@ -598,8 +590,8 @@
 			if (nonZeroDeriv)
 				numNonZeroDeriv	+= 1;
 
-			if ((!isBg && !isFg) ||				// Neither of valid colors (ignoring blue channel that has derivate)
-				(nonZeroDeriv && !neighborBg))	// Has non-zero derivate, but sample not at primitive edge
+			if ((!isBg && !isFg) ||							// Neither of valid colors (ignoring blue channel that has derivate)
+				(nonZeroDeriv && !neighborBg && !isFg))		// Has non-zero derivate, but sample not at primitive edge or inside primitive
 				numInvalidPixels += 1;
 
 			if (isFg)
diff --git a/modules/gles31/functional/es31fShaderTextureSizeTests.cpp b/modules/gles31/functional/es31fShaderTextureSizeTests.cpp
index 8e15758..f28ed03 100644
--- a/modules/gles31/functional/es31fShaderTextureSizeTests.cpp
+++ b/modules/gles31/functional/es31fShaderTextureSizeTests.cpp
@@ -150,6 +150,14 @@
 	};
 
 	const glw::Functions&	gl					= m_context.getRenderContext().getFunctions();
+
+	// requirements
+	if (m_isArrayType && !m_context.getContextInfo().isExtensionSupported("GL_OES_texture_storage_multisample_2d_array"))
+		TCU_THROW(NotSupportedError, "Test requires OES_texture_storage_multisample_2d_array extension");
+
+	if (m_context.getRenderTarget().getWidth() < 1 || m_context.getRenderTarget().getHeight() < 1)
+		TCU_THROW(NotSupportedError, "rendertarget size must be at least 1x1");
+
 	glw::GLint				maxTextureSize		= 0;
 	glw::GLint				maxTextureLayers	= 0;
 	glw::GLint				maxSamples			= 0;
@@ -158,13 +166,8 @@
 	gl.getIntegerv(GL_MAX_ARRAY_TEXTURE_LAYERS, &maxTextureLayers);
 	gl.getInternalformativ(getTextureGLTarget(), getTextureGLInternalFormat(), GL_SAMPLES, 1, &maxSamples);
 
-	// requirements
-	if (m_isArrayType && !m_context.getContextInfo().isExtensionSupported("GL_OES_texture_storage_multisample_2d_array"))
-		throw tcu::NotSupportedError("Test requires OES_texture_storage_multisample_2d_array extension");
-	if (m_context.getRenderTarget().getWidth() < 1 || m_context.getRenderTarget().getHeight() < 1)
-		throw tcu::NotSupportedError("rendertarget size must be at least 1x1");
 	if (m_numSamples > maxSamples)
-		throw tcu::NotSupportedError("sample count is not supported");
+		TCU_THROW(NotSupportedError, "sample count is not supported");
 
 	// gen shade
 
diff --git a/modules/gles31/functional/es31fTessellationTests.cpp b/modules/gles31/functional/es31fTessellationTests.cpp
index 27d945c..78f2308 100644
--- a/modules/gles31/functional/es31fTessellationTests.cpp
+++ b/modules/gles31/functional/es31fTessellationTests.cpp
@@ -7216,14 +7216,14 @@
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_uniform_components",			"Test MAX_TESS_EVALUATION_UNIFORM_COMPONENTS",			GL_MAX_TESS_EVALUATION_UNIFORM_COMPONENTS,		1024));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_texture_image_units",				"Test MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS",			GL_MAX_TESS_CONTROL_TEXTURE_IMAGE_UNITS,		16));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_texture_image_units",			"Test MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS",			GL_MAX_TESS_EVALUATION_TEXTURE_IMAGE_UNITS,		16));
-		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_output_components",				"Test MAX_TESS_CONTROL_OUTPUT_COMPONENTS",				GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS,			128));
+		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_output_components",				"Test MAX_TESS_CONTROL_OUTPUT_COMPONENTS",				GL_MAX_TESS_CONTROL_OUTPUT_COMPONENTS,			64));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_patch_components",							"Test MAX_TESS_PATCH_COMPONENTS",						GL_MAX_TESS_PATCH_COMPONENTS,					120));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_total_output_components",			"Test MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS",		GL_MAX_TESS_CONTROL_TOTAL_OUTPUT_COMPONENTS,	4096));
-		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_output_components",				"Test MAX_TESS_EVALUATION_OUTPUT_COMPONENTS",			GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS,		128));
+		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_output_components",				"Test MAX_TESS_EVALUATION_OUTPUT_COMPONENTS",			GL_MAX_TESS_EVALUATION_OUTPUT_COMPONENTS,		64));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_uniform_blocks",					"Test MAX_TESS_CONTROL_UNIFORM_BLOCKS",					GL_MAX_TESS_CONTROL_UNIFORM_BLOCKS,				12));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_uniform_blocks",				"Test MAX_TESS_EVALUATION_UNIFORM_BLOCKS",				GL_MAX_TESS_EVALUATION_UNIFORM_BLOCKS,			12));
-		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_input_components",					"Test MAX_TESS_CONTROL_INPUT_COMPONENTS",				GL_MAX_TESS_CONTROL_INPUT_COMPONENTS,			128));
-		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_input_components",				"Test MAX_TESS_EVALUATION_INPUT_COMPONENTS",			GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS,		128));
+		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_input_components",					"Test MAX_TESS_CONTROL_INPUT_COMPONENTS",				GL_MAX_TESS_CONTROL_INPUT_COMPONENTS,			64));
+		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_input_components",				"Test MAX_TESS_EVALUATION_INPUT_COMPONENTS",			GL_MAX_TESS_EVALUATION_INPUT_COMPONENTS,		64));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_atomic_counter_buffers",			"Test MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS",			GL_MAX_TESS_CONTROL_ATOMIC_COUNTER_BUFFERS,		0));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_evaluation_atomic_counter_buffers",		"Test MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS",		GL_MAX_TESS_EVALUATION_ATOMIC_COUNTER_BUFFERS,	0));
 		queryGroup->addChild(new LimitQueryCase(m_context, "max_tess_control_atomic_counters",					"Test MAX_TESS_CONTROL_ATOMIC_COUNTERS",				GL_MAX_TESS_CONTROL_ATOMIC_COUNTERS,			0));
diff --git a/modules/gles31/functional/es31fTextureFormatTests.cpp b/modules/gles31/functional/es31fTextureFormatTests.cpp
index a964047..93c9bab 100644
--- a/modules/gles31/functional/es31fTextureFormatTests.cpp
+++ b/modules/gles31/functional/es31fTextureFormatTests.cpp
@@ -288,7 +288,14 @@
 	tcu::Vec4				colorA	(spec.valueMin.x(), spec.valueMax.y(), spec.valueMin.z(), spec.valueMax.w());
 	tcu::Vec4				colorB	(spec.valueMax.x(), spec.valueMin.y(), spec.valueMax.z(), spec.valueMin.w());
 
+	if (!m_context.getContextInfo().isExtensionSupported("GL_OES_texture_buffer")
+		&& !m_context.getContextInfo().isExtensionSupported("GL_EXT_texture_buffer"))
+	{
+		TCU_THROW(NotSupportedError, "Texture buffers not supported");
+	}
+
 	m_maxTextureBufferSize = m_context.getContextInfo().getInt(GL_MAX_TEXTURE_BUFFER_SIZE);
+
 	if (m_maxTextureBufferSize <= 0)
 		TCU_THROW(NotSupportedError, "GL_MAX_TEXTURE_BUFFER_SIZE > 0 required");
 
diff --git a/modules/glshared/glsTextureTestUtil.cpp b/modules/glshared/glsTextureTestUtil.cpp
index 7e7da21..c08b495 100644
--- a/modules/glshared/glsTextureTestUtil.cpp
+++ b/modules/glshared/glsTextureTestUtil.cpp
@@ -718,8 +718,8 @@
 	tcu::Vec3									triS[2]				= { sq.swizzle(0, 1, 2), sq.swizzle(3, 2, 1) };
 	tcu::Vec3									triT[2]				= { tq.swizzle(0, 1, 2), tq.swizzle(3, 2, 1) };
 	tcu::Vec3									triR[2]				= { rq.swizzle(0, 1, 2), rq.swizzle(3, 2, 1) };
-	float										triLod[2]			= { computeNonProjectedTriLod(params.lodMode, dstSize, srcSize, triS[0], triT[0]) + lodBias,
-																		computeNonProjectedTriLod(params.lodMode, dstSize, srcSize, triS[1], triT[1]) + lodBias};
+	float										triLod[2]			= { de::clamp(computeNonProjectedTriLod(params.lodMode, dstSize, srcSize, triS[0], triT[0]) + lodBias, params.minLod, params.maxLod),
+																		de::clamp(computeNonProjectedTriLod(params.lodMode, dstSize, srcSize, triS[1], triT[1]) + lodBias, params.minLod, params.maxLod) };
 
 	for (int y = 0; y < dst.getHeight(); y++)
 	{
diff --git a/modules/glshared/glsVertexArrayTests.cpp b/modules/glshared/glsVertexArrayTests.cpp
index 54cb855..6509f6d 100644
--- a/modules/glshared/glsVertexArrayTests.cpp
+++ b/modules/glshared/glsVertexArrayTests.cpp
@@ -1308,12 +1308,12 @@
 {
 public:
 	static char*	generateArray			(int seed, GLValue min, GLValue max, int count, int componentCount, int stride, Array::InputType type);
-	static char*	generateQuads			(int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, Array::InputType type, GLValue min, GLValue max);
+	static char*	generateQuads			(int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, Array::InputType type, GLValue min, GLValue max, float gridSize);
 	static char*	generatePerQuad			(int seed, int count, int componentCount, int stride, Array::Primitive primitive, Array::InputType type, GLValue min, GLValue max);
 
 private:
 	template<typename T>
-	static char*	createQuads		(int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, T min, T max);
+	static char*	createQuads		(int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, T min, T max, float gridSize);
 	template<typename T>
 	static char*	createPerQuads	(int seed, int count, int componentCount, int stride, Array::Primitive primitive, T min, T max);
 	static char*	createQuadsPacked (int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive);
@@ -1413,50 +1413,50 @@
 	return data;
 }
 
-char* RandomArrayGenerator::generateQuads (int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, Array::InputType type, GLValue min, GLValue max)
+char* RandomArrayGenerator::generateQuads (int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, Array::InputType type, GLValue min, GLValue max, float gridSize)
 {
 	char* data = DE_NULL;
 
 	switch (type)
 	{
 		case Array::INPUTTYPE_FLOAT:
-			data = createQuads<GLValue::Float>(seed, count, componentCount, offset, stride, primitive, min.fl, max.fl);
+			data = createQuads<GLValue::Float>(seed, count, componentCount, offset, stride, primitive, min.fl, max.fl, gridSize);
 			break;
 
 		case Array::INPUTTYPE_FIXED:
-			data = createQuads<GLValue::Fixed>(seed, count, componentCount, offset, stride, primitive, min.fi, max.fi);
+			data = createQuads<GLValue::Fixed>(seed, count, componentCount, offset, stride, primitive, min.fi, max.fi, gridSize);
 			break;
 
 		case Array::INPUTTYPE_DOUBLE:
-			data = createQuads<GLValue::Double>(seed, count, componentCount, offset, stride, primitive, min.d, max.d);
+			data = createQuads<GLValue::Double>(seed, count, componentCount, offset, stride, primitive, min.d, max.d, gridSize);
 			break;
 
 		case Array::INPUTTYPE_BYTE:
-			data = createQuads<GLValue::Byte>(seed, count, componentCount, offset, stride, primitive, min.b, max.b);
+			data = createQuads<GLValue::Byte>(seed, count, componentCount, offset, stride, primitive, min.b, max.b, gridSize);
 			break;
 
 		case Array::INPUTTYPE_SHORT:
-			data = createQuads<GLValue::Short>(seed, count, componentCount, offset, stride, primitive, min.s, max.s);
+			data = createQuads<GLValue::Short>(seed, count, componentCount, offset, stride, primitive, min.s, max.s, gridSize);
 			break;
 
 		case Array::INPUTTYPE_UNSIGNED_BYTE:
-			data = createQuads<GLValue::Ubyte>(seed, count, componentCount, offset, stride, primitive, min.ub, max.ub);
+			data = createQuads<GLValue::Ubyte>(seed, count, componentCount, offset, stride, primitive, min.ub, max.ub, gridSize);
 			break;
 
 		case Array::INPUTTYPE_UNSIGNED_SHORT:
-			data = createQuads<GLValue::Ushort>(seed, count, componentCount, offset, stride, primitive, min.us, max.us);
+			data = createQuads<GLValue::Ushort>(seed, count, componentCount, offset, stride, primitive, min.us, max.us, gridSize);
 			break;
 
 		case Array::INPUTTYPE_UNSIGNED_INT:
-			data = createQuads<GLValue::Uint>(seed, count, componentCount, offset, stride, primitive, min.ui, max.ui);
+			data = createQuads<GLValue::Uint>(seed, count, componentCount, offset, stride, primitive, min.ui, max.ui, gridSize);
 			break;
 
 		case Array::INPUTTYPE_INT:
-			data = createQuads<GLValue::Int>(seed, count, componentCount, offset, stride, primitive, min.i, max.i);
+			data = createQuads<GLValue::Int>(seed, count, componentCount, offset, stride, primitive, min.i, max.i, gridSize);
 			break;
 
 		case Array::INPUTTYPE_HALF:
-			data = createQuads<GLValue::Half>(seed, count, componentCount, offset, stride, primitive, min.h, max.h);
+			data = createQuads<GLValue::Half>(seed, count, componentCount, offset, stride, primitive, min.h, max.h, gridSize);
 			break;
 
 		case Array::INPUTTYPE_INT_2_10_10_10:
@@ -1545,13 +1545,20 @@
 }
 
 template<typename T>
-char* RandomArrayGenerator::createQuads (int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, T min, T max)
+T roundTo (const T& step, const T& value)
+{
+	return value - (value % step);
+}
+
+template<typename T>
+char* RandomArrayGenerator::createQuads (int seed, int count, int componentCount, int offset, int stride, Array::Primitive primitive, T min, T max, float gridSize)
 {
 	int componentStride = sizeof(T);
 	int quadStride = 0;
 
 	if (stride == 0)
 		stride = componentCount * componentStride;
+
 	DE_ASSERT(stride >= componentCount * componentStride);
 
 	switch (primitive)
@@ -1576,6 +1583,11 @@
 	{
 		case Array::PRIMITIVE_TRIANGLES:
 		{
+			const T	minQuadSize	= T::fromFloat(deFloatAbs(max.template to<float>() - min.template to<float>()) * gridSize);
+			const T	minDiff		= minValue<T>() > minQuadSize
+								? minValue<T>()
+								: minQuadSize;
+
 			for (int quadNdx = 0; quadNdx < count; ++quadNdx)
 			{
 				T x1, x2;
@@ -1585,22 +1597,22 @@
 				// attempt to find a good (i.e not extremely small) quad
 				for (int attemptNdx = 0; attemptNdx < 4; ++attemptNdx)
 				{
-					x1 = getRandom<T>(rnd, min, max);
-					x2 = getRandom<T>(rnd, minValue<T>(), abs<T>(max - x1));
+					x1 = roundTo(minDiff, getRandom<T>(rnd, min, max));
+					x2 = roundTo(minDiff, getRandom<T>(rnd, minDiff, abs<T>(max - x1)));
 
-					y1 = getRandom<T>(rnd, min, max);
-					y2 = getRandom<T>(rnd, minValue<T>(), abs<T>(max - y1));
+					y1 = roundTo(minDiff, getRandom<T>(rnd, min, max));
+					y2 = roundTo(minDiff, getRandom<T>(rnd, minDiff, abs<T>(max - y1)));
 
-					z = (componentCount > 2) ? (getRandom<T>(rnd, min, max)) : (T::create(0));
-					w = (componentCount > 3) ? (getRandom<T>(rnd, min, max)) : (T::create(1));
+					z = (componentCount > 2) ? roundTo(minDiff, (getRandom<T>(rnd, min, max))) : (T::create(0));
+					w = (componentCount > 3) ? roundTo(minDiff, (getRandom<T>(rnd, min, max))) : (T::create(1));
 
 					// no additional components, all is good
 					if (componentCount <= 2)
 						break;
 
 					// The result quad is too thin?
-					if ((deFloatAbs(x2.template to<float>() + z.template to<float>()) < minValue<T>().template to<float>()) ||
-						(deFloatAbs(y2.template to<float>() + w.template to<float>()) < minValue<T>().template to<float>()))
+					if ((deFloatAbs(x2.template to<float>() + z.template to<float>()) < minDiff.template to<float>()) ||
+						(deFloatAbs(y2.template to<float>() + w.template to<float>()) < minDiff.template to<float>()))
 						continue;
 
 					// all ok
@@ -2075,6 +2087,8 @@
 			const char*		data			= DE_NULL;
 			const size_t	stride			= (arraySpec.stride == 0) ? (arraySpec.componentCount * Array::inputTypeSize(arraySpec.inputType)) : (arraySpec.stride);
 			const size_t	bufferSize		= arraySpec.offset + stride * (m_spec.drawCount * primitiveSize - 1) + arraySpec.componentCount  * Array::inputTypeSize(arraySpec.inputType);
+			// Snap values to at least 3x3 grid
+			const float		gridSize		= 3.0f / (float)(de::min(m_renderCtx.getRenderTarget().getWidth(), m_renderCtx.getRenderTarget().getHeight()) - 1);
 
 			switch (m_spec.primitive)
 			{
@@ -2084,7 +2098,7 @@
 				case Array::PRIMITIVE_TRIANGLES:
 					if (arrayNdx == 0)
 					{
-						data = RandomArrayGenerator::generateQuads(seed, m_spec.drawCount, arraySpec.componentCount, arraySpec.offset, arraySpec.stride, m_spec.primitive, arraySpec.inputType, arraySpec.min, arraySpec.max);
+						data = RandomArrayGenerator::generateQuads(seed, m_spec.drawCount, arraySpec.componentCount, arraySpec.offset, arraySpec.stride, m_spec.primitive, arraySpec.inputType, arraySpec.min, arraySpec.max, gridSize);
 					}
 					else
 					{
diff --git a/modules/glshared/glsVertexArrayTests.hpp b/modules/glshared/glsVertexArrayTests.hpp
index 25a04a3..b7539d4 100644
--- a/modules/glshared/glsVertexArrayTests.hpp
+++ b/modules/glshared/glsVertexArrayTests.hpp
@@ -31,6 +31,7 @@
 #include "tcuTestLog.hpp"
 #include "gluShaderProgram.hpp"
 #include "deFloat16.h"
+#include "deMath.h"
 #include "tcuFloat.hpp"
 #include "tcuPixelFormat.hpp"
 #include "sglrContext.hpp"
@@ -236,17 +237,18 @@
 class GLValue
 {
 public:
-
 	template<class Type>
 	class WrappedType
 	{
 	public:
 		static WrappedType<Type>	create			(Type value)							{ WrappedType<Type> v; v.m_value = value; return v; }
+		static WrappedType<Type>	fromFloat		(float value)							{ WrappedType<Type> v; v.m_value = (Type)value; return v; }
 		inline Type					getValue		(void) const							{ return m_value; }
 
 		inline WrappedType<Type>	operator+		(const WrappedType<Type>& other) const	{ return WrappedType<Type>::create((Type)(m_value + other.getValue())); }
 		inline WrappedType<Type>	operator*		(const WrappedType<Type>& other) const	{ return WrappedType<Type>::create((Type)(m_value * other.getValue())); }
 		inline WrappedType<Type>	operator/		(const WrappedType<Type>& other) const	{ return WrappedType<Type>::create((Type)(m_value / other.getValue())); }
+		inline WrappedType<Type>	operator%		(const WrappedType<Type>& other) const	{ return WrappedType<Type>::create((Type)(m_value % other.getValue())); }
 		inline WrappedType<Type>	operator-		(const WrappedType<Type>& other) const	{ return WrappedType<Type>::create((Type)(m_value - other.getValue())); }
 
 		inline WrappedType<Type>&	operator+=		(const WrappedType<Type>& other)		{ m_value += other.getValue(); return *this; }
@@ -268,27 +270,62 @@
 		Type	m_value;
 	};
 
-	typedef WrappedType<deInt16>	Short;
-	typedef WrappedType<deUint16>	Ushort;
+	template<class Type>
+	class WrappedFloatType
+	{
+	public:
+		static WrappedFloatType<Type>	create			(Type value)							{ WrappedFloatType<Type> v; v.m_value = value; return v; }
+		static WrappedFloatType<Type>	fromFloat		(float value)							{ WrappedFloatType<Type> v; v.m_value = (Type)value; return v; }
+		inline Type						getValue		(void) const							{ return m_value; }
 
-	typedef WrappedType<deInt8>		Byte;
-	typedef WrappedType<deUint8>	Ubyte;
+		inline WrappedFloatType<Type>	operator+		(const WrappedFloatType<Type>& other) const	{ return WrappedFloatType<Type>::create((Type)(m_value + other.getValue())); }
+		inline WrappedFloatType<Type>	operator*		(const WrappedFloatType<Type>& other) const	{ return WrappedFloatType<Type>::create((Type)(m_value * other.getValue())); }
+		inline WrappedFloatType<Type>	operator/		(const WrappedFloatType<Type>& other) const	{ return WrappedFloatType<Type>::create((Type)(m_value / other.getValue())); }
+		inline WrappedFloatType<Type>	operator%		(const WrappedFloatType<Type>& other) const	{ return WrappedFloatType<Type>::create((Type)(deMod(m_value, other.getValue()))); }
+		inline WrappedFloatType<Type>	operator-		(const WrappedFloatType<Type>& other) const	{ return WrappedFloatType<Type>::create((Type)(m_value - other.getValue())); }
 
-	typedef WrappedType<float>		Float;
-	typedef WrappedType<double>		Double;
+		inline WrappedFloatType<Type>&	operator+=		(const WrappedFloatType<Type>& other)		{ m_value += other.getValue(); return *this; }
+		inline WrappedFloatType<Type>&	operator*=		(const WrappedFloatType<Type>& other)		{ m_value *= other.getValue(); return *this; }
+		inline WrappedFloatType<Type>&	operator/=		(const WrappedFloatType<Type>& other)		{ m_value /= other.getValue(); return *this; }
+		inline WrappedFloatType<Type>&	operator-=		(const WrappedFloatType<Type>& other)		{ m_value -= other.getValue(); return *this; }
 
-	typedef WrappedType<deInt32>	Int;
-	typedef WrappedType<deUint32>	Uint;
+		inline bool						operator==		(const WrappedFloatType<Type>& other) const	{ return m_value == other.m_value; }
+		inline bool						operator!=		(const WrappedFloatType<Type>& other) const	{ return m_value != other.m_value; }
+		inline bool						operator<		(const WrappedFloatType<Type>& other) const	{ return m_value < other.m_value; }
+		inline bool						operator>		(const WrappedFloatType<Type>& other) const	{ return m_value > other.m_value; }
+		inline bool						operator<=		(const WrappedFloatType<Type>& other) const	{ return m_value <= other.m_value; }
+		inline bool						operator>=		(const WrappedFloatType<Type>& other) const	{ return m_value >= other.m_value; }
+
+		inline							operator Type	(void) const							{ return m_value; }
+		template<class T>
+		inline T						to				(void) const							{ return (T)m_value; }
+	private:
+		Type	m_value;
+	};
+
+	typedef WrappedType<deInt16>		Short;
+	typedef WrappedType<deUint16>		Ushort;
+
+	typedef WrappedType<deInt8>			Byte;
+	typedef WrappedType<deUint8>		Ubyte;
+
+	typedef WrappedFloatType<float>		Float;
+	typedef WrappedFloatType<double>	Double;
+
+	typedef WrappedType<deInt32>		Int;
+	typedef WrappedType<deUint32>		Uint;
 
 	class Half
 	{
 	public:
 		static Half			create			(float value)				{ Half h; h.m_value = floatToHalf(value); return h; }
+		static Half			fromFloat		(float value)				{ Half h; h.m_value = floatToHalf(value); return h; }
 		inline deFloat16	getValue		(void) const				{ return m_value; }
 
 		inline Half			operator+		(const Half& other) const	{ return create(halfToFloat(m_value) + halfToFloat(other.getValue())); }
 		inline Half			operator*		(const Half& other) const	{ return create(halfToFloat(m_value) * halfToFloat(other.getValue())); }
 		inline Half			operator/		(const Half& other) const	{ return create(halfToFloat(m_value) / halfToFloat(other.getValue())); }
+		inline Half			operator%		(const Half& other) const	{ return create(deFloatMod(halfToFloat(m_value), halfToFloat(other.getValue()))); }
 		inline Half			operator-		(const Half& other) const	{ return create(halfToFloat(m_value) - halfToFloat(other.getValue())); }
 
 		inline Half&		operator+=		(const Half& other)			{ m_value = floatToHalf(halfToFloat(other.getValue()) + halfToFloat(m_value)); return *this; }
@@ -316,11 +353,13 @@
 	{
 	public:
 		static Fixed		create			(deInt32 value)				{ Fixed v; v.m_value = value; return v; }
+		static Fixed		fromFloat		(float value)				{ Fixed v; v.m_value = (deInt32)(value * 32768.0f); return v; }
 		inline deInt32		getValue		(void) const				{ return m_value; }
 
 		inline Fixed		operator+		(const Fixed& other) const	{ return create(m_value + other.getValue()); }
 		inline Fixed		operator*		(const Fixed& other) const	{ return create(m_value * other.getValue()); }
 		inline Fixed		operator/		(const Fixed& other) const	{ return create(m_value / other.getValue()); }
+		inline Fixed		operator%		(const Fixed& other) const	{ return create(m_value % other.getValue()); }
 		inline Fixed		operator-		(const Fixed& other) const	{ return create(m_value - other.getValue()); }
 
 		inline Fixed&		operator+=		(const Fixed& other)		{ m_value += other.getValue(); return *this; }
diff --git a/modules/internal/CMakeLists.txt b/modules/internal/CMakeLists.txt
index 2db2b50..cfe0326 100644
--- a/modules/internal/CMakeLists.txt
+++ b/modules/internal/CMakeLists.txt
@@ -23,11 +23,16 @@
 	ditSRGB8ConversionTest.cpp
 	ditTextureFormatTests.cpp
 	ditTextureFormatTests.hpp
+	ditAstcTests.cpp
+	ditAstcTests.hpp
+	ditVulkanTests.cpp
+	ditVulkanTests.hpp
 	)
 
 set(DE_INTERNAL_TESTS_LIBS
 	tcutil
 	referencerenderer
+	vkutil
 	)
 
 add_deqp_module(de-internal-tests "${DE_INTERNAL_TESTS_SRCS}" "${DE_INTERNAL_TESTS_LIBS}" ditTestPackageEntry.cpp)
diff --git a/modules/internal/ditAstcTests.cpp b/modules/internal/ditAstcTests.cpp
new file mode 100644
index 0000000..2146c7c
--- /dev/null
+++ b/modules/internal/ditAstcTests.cpp
@@ -0,0 +1,183 @@
+/*-------------------------------------------------------------------------
+ * drawElements Internal Test Module
+ * ---------------------------------
+ *
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief ASTC tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "ditAstcTests.hpp"
+
+#include "tcuCompressedTexture.hpp"
+#include "tcuAstcUtil.hpp"
+
+#include "deUniquePtr.hpp"
+#include "deStringUtil.hpp"
+
+namespace dit
+{
+
+using std::string;
+using std::vector;
+using namespace tcu;
+
+namespace
+{
+
+class AstcCase : public tcu::TestCase
+{
+public:
+								AstcCase		(tcu::TestContext& testCtx, CompressedTexFormat format);
+
+	IterateResult				iterate			(void);
+
+private:
+	const CompressedTexFormat	m_format;
+};
+
+static const string getASTCFormatShortName (CompressedTexFormat format)
+{
+	DE_ASSERT(isAstcFormat(format));
+	const IVec3 blockSize = getBlockPixelSize(format);
+	DE_ASSERT(blockSize.z() == 1);
+
+	return de::toString(blockSize.x()) + "x" + de::toString(blockSize.y()) + (tcu::isAstcSRGBFormat(format) ? "_srgb" : "");
+}
+
+AstcCase::AstcCase (tcu::TestContext& testCtx, CompressedTexFormat format)
+	: tcu::TestCase	(testCtx, getASTCFormatShortName(format).c_str(), "")
+	, m_format		(format)
+{
+}
+
+void testDecompress (CompressedTexFormat format, size_t numBlocks, const deUint8* data)
+{
+	const IVec3			blockPixelSize		= getBlockPixelSize(format);
+
+	for (int astcModeNdx = 0; astcModeNdx < TexDecompressionParams::ASTCMODE_LAST; astcModeNdx++)
+	{
+		const TexDecompressionParams	decompressionParams		((TexDecompressionParams::AstcMode)astcModeNdx);
+		const TextureFormat				uncompressedFormat		= getUncompressedFormat(format);
+		TextureLevel					texture					(uncompressedFormat, blockPixelSize.x()*(int)numBlocks, blockPixelSize.y());
+
+		decompress(texture.getAccess(), format, data, decompressionParams);
+	}
+}
+
+void verifyBlocksValid (CompressedTexFormat format, TexDecompressionParams::AstcMode mode, size_t numBlocks, const deUint8* data)
+{
+	for (size_t blockNdx = 0; blockNdx < numBlocks; blockNdx++)
+		TCU_CHECK(astc::isValidBlock(data + blockNdx*astc::BLOCK_SIZE_BYTES, format, mode));
+}
+
+inline size_t getNumBlocksFromBytes (size_t numBytes)
+{
+	TCU_CHECK(numBytes % astc::BLOCK_SIZE_BYTES == 0);
+	return (numBytes / astc::BLOCK_SIZE_BYTES);
+}
+
+AstcCase::IterateResult AstcCase::iterate (void)
+{
+	vector<deUint8> generatedData;
+
+	// Verify that can generate & decode data with all BlockTestType's
+	for (int blockTestTypeNdx = 0; blockTestTypeNdx < astc::BLOCK_TEST_TYPE_LAST; blockTestTypeNdx++)
+	{
+		const astc::BlockTestType	blockTestType	= (const astc::BlockTestType)blockTestTypeNdx;
+
+		if (astc::isBlockTestTypeHDROnly(blockTestType) && isAstcSRGBFormat(m_format))
+			continue;
+
+		generatedData.clear();
+		astc::generateBlockCaseTestData(generatedData, m_format, blockTestType);
+
+		testDecompress(m_format, getNumBlocksFromBytes(generatedData.size()), &generatedData[0]);
+
+		// All but random case should generate only valid blocks
+		if (blockTestType != astc::BLOCK_TEST_TYPE_RANDOM)
+		{
+			verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_HDR, getNumBlocksFromBytes(generatedData.size()), &generatedData[0]);
+
+			if (!astc::isBlockTestTypeHDROnly(blockTestType))
+				verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_LDR, getNumBlocksFromBytes(generatedData.size()), &generatedData[0]);
+		}
+	}
+
+	// Verify generating void extent blocks (format-independent)
+	{
+		const size_t		numBlocks		= 1024;
+
+		generatedData.resize(numBlocks*astc::BLOCK_SIZE_BYTES);
+		astc::generateDummyVoidExtentBlocks(&generatedData[0], numBlocks);
+
+		testDecompress(m_format, numBlocks, &generatedData[0]);
+
+		verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_LDR, numBlocks, &generatedData[0]);
+		verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_HDR, numBlocks, &generatedData[0]);
+	}
+
+	// Verify generating dummy normal blocks
+	{
+		const size_t		numBlocks			= 1024;
+		const IVec3			blockPixelSize		= getBlockPixelSize(m_format);
+
+		generatedData.resize(numBlocks*astc::BLOCK_SIZE_BYTES);
+		astc::generateDummyNormalBlocks(&generatedData[0], numBlocks, blockPixelSize.x(), blockPixelSize.y());
+
+		testDecompress(m_format, numBlocks, &generatedData[0]);
+
+		verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_LDR, numBlocks, &generatedData[0]);
+		verifyBlocksValid(m_format, TexDecompressionParams::ASTCMODE_HDR, numBlocks, &generatedData[0]);
+	}
+
+	// Verify generating random valid blocks
+	for (int astcModeNdx = 0; astcModeNdx < TexDecompressionParams::ASTCMODE_LAST; astcModeNdx++)
+	{
+		const TexDecompressionParams::AstcMode	mode		= (TexDecompressionParams::AstcMode)astcModeNdx;
+		const size_t							numBlocks	= 1024;
+
+		generatedData.resize(numBlocks*astc::BLOCK_SIZE_BYTES);
+		astc::generateRandomValidBlocks(&generatedData[0], numBlocks, m_format, mode, deInt32Hash(m_format) ^ deInt32Hash(mode));
+
+		testDecompress(m_format, numBlocks, &generatedData[0]);
+
+		verifyBlocksValid(m_format, mode, numBlocks, &generatedData[0]);
+	}
+
+	m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "All checks passed");
+	return STOP;
+}
+
+} // anonymous
+
+tcu::TestCaseGroup* createAstcTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	astcTests	(new tcu::TestCaseGroup(testCtx, "astc", "Tests for ASTC Utilities"));
+
+	for (int formatNdx = 0; formatNdx < COMPRESSEDTEXFORMAT_LAST; formatNdx++)
+	{
+		const CompressedTexFormat	format	= (CompressedTexFormat)formatNdx;
+
+		if (isAstcFormat(format))
+			astcTests->addChild(new AstcCase(testCtx, format));
+	}
+
+	return astcTests.release();
+}
+
+} // dit
diff --git a/modules/internal/ditAstcTests.hpp b/modules/internal/ditAstcTests.hpp
new file mode 100644
index 0000000..7207efc
--- /dev/null
+++ b/modules/internal/ditAstcTests.hpp
@@ -0,0 +1,36 @@
+#ifndef _DITASTCTESTS_HPP
+#define _DITASTCTESTS_HPP
+/*-------------------------------------------------------------------------
+ * drawElements Internal Test Module
+ * ---------------------------------
+ *
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief ASTC tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace dit
+{
+
+tcu::TestCaseGroup*	createAstcTests	(tcu::TestContext& testCtx);
+
+} // dit
+
+#endif // _DITASTCTESTS_HPP
diff --git a/modules/internal/ditDelibsTests.cpp b/modules/internal/ditDelibsTests.cpp
index 85f744a..665cd89 100644
--- a/modules/internal/ditDelibsTests.cpp
+++ b/modules/internal/ditDelibsTests.cpp
@@ -45,6 +45,7 @@
 #include "deInt32.h"
 #include "deMath.h"
 #include "deSha1.h"
+#include "deMemory.h"
 
 // decpp
 #include "deBlockBuffer.hpp"
@@ -60,6 +61,7 @@
 #include "deStringUtil.hpp"
 #include "deSpinBarrier.hpp"
 #include "deSTLUtil.hpp"
+#include "deAppendList.hpp"
 
 namespace dit
 {
@@ -160,6 +162,7 @@
 		addChild(new SelfCheckCase(m_testCtx, "int32",	"deInt32_selfTest()",	deInt32_selfTest));
 		addChild(new SelfCheckCase(m_testCtx, "math",	"deMath_selfTest()",	deMath_selfTest));
 		addChild(new SelfCheckCase(m_testCtx, "sha1",	"deSha1_selfTest()",	deSha1_selfTest));
+		addChild(new SelfCheckCase(m_testCtx, "memory",	"deMemory_selfTest()",	deMemory_selfTest));
 	}
 };
 
@@ -186,6 +189,7 @@
 		addChild(new SelfCheckCase(m_testCtx, "string_util",				"de::StringUtil_selfTest()",			de::StringUtil_selfTest));
 		addChild(new SelfCheckCase(m_testCtx, "spin_barrier",				"de::SpinBarrier_selfTest()",			de::SpinBarrier_selfTest));
 		addChild(new SelfCheckCase(m_testCtx, "stl_util",					"de::STLUtil_selfTest()",				de::STLUtil_selfTest));
+		addChild(new SelfCheckCase(m_testCtx, "append_list",				"de::AppendList_selfTest()",			de::AppendList_selfTest));
 	}
 };
 
diff --git a/modules/internal/ditFrameworkTests.cpp b/modules/internal/ditFrameworkTests.cpp
index 8725240..d4d1dd8 100644
--- a/modules/internal/ditFrameworkTests.cpp
+++ b/modules/internal/ditFrameworkTests.cpp
@@ -23,6 +23,9 @@
 
 #include "ditFrameworkTests.hpp"
 #include "ditTextureFormatTests.hpp"
+#include "ditAstcTests.hpp"
+#include "ditVulkanTests.hpp"
+
 #include "tcuFloatFormat.hpp"
 #include "tcuEither.hpp"
 #include "tcuTestLog.hpp"
@@ -911,6 +914,8 @@
 	addChild(new CaseListParserTests	(m_testCtx));
 	addChild(new ReferenceRendererTests	(m_testCtx));
 	addChild(createTextureFormatTests	(m_testCtx));
+	addChild(createAstcTests			(m_testCtx));
+	addChild(createVulkanTests			(m_testCtx));
 }
 
 } // dit
diff --git a/modules/internal/ditTextureFormatTests.cpp b/modules/internal/ditTextureFormatTests.cpp
index a7afdb1..6cfa441 100644
--- a/modules/internal/ditTextureFormatTests.cpp
+++ b/modules/internal/ditTextureFormatTests.cpp
@@ -426,6 +426,26 @@
 	0x41d80000, 0x40e00000, 0x41c80000, 0x00000000,
 };
 
+static const deUint8 s_unormShort1555In[] =
+{
+	0xf8, 0xc5, 0x1f, 0x6c,
+	0xf0, 0x2f, 0xf2, 0x95,
+};
+static const deUint32 s_unormShort1555FloatRef[] =
+{
+	0x3f800000, 0x3f0c6319, 0x3ef7bdef, 0x3f46318c,
+	0x00000000, 0x3f5ef7be, 0x00000000, 0x3f800000,
+	0x00000000, 0x3eb5ad6b, 0x3f800000, 0x3f042108,
+	0x3f800000, 0x3e25294a, 0x3ef7bdef, 0x3f14a529,
+};
+static const deUint32 s_unormShort1555IntRef[] =
+{
+	0x00000001, 0x00000011, 0x0000000f, 0x00000018,
+	0x00000000, 0x0000001b, 0x00000000, 0x0000001f,
+	0x00000000, 0x0000000b, 0x0000001f, 0x00000010,
+	0x00000001, 0x00000005, 0x0000000f, 0x00000012,
+};
+
 static const deUint8 s_unormInt101010In[] =
 {
 	0x81, 0xb3, 0x67, 0x51,
@@ -1032,6 +1052,7 @@
 	{ s_unormShort555In,			DE_LENGTH_OF_ARRAY(s_unormShort555In),				s_unormShort555FloatRef,			s_unormShort555IntRef,			s_unormShort555IntRef,			},
 	{ s_unormShort4444In,			DE_LENGTH_OF_ARRAY(s_unormShort4444In),				s_unormShort4444FloatRef,			s_unormShort4444IntRef,			s_unormShort4444IntRef,			},
 	{ s_unormShort5551In,			DE_LENGTH_OF_ARRAY(s_unormShort5551In),				s_unormShort5551FloatRef,			s_unormShort5551IntRef,			s_unormShort5551IntRef,			},
+	{ s_unormShort1555In,			DE_LENGTH_OF_ARRAY(s_unormShort1555In),				s_unormShort1555FloatRef,			s_unormShort1555IntRef,			s_unormShort1555IntRef,			},
 	{ s_unormInt101010In,			DE_LENGTH_OF_ARRAY(s_unormInt101010In),				s_unormInt101010FloatRef,			s_unormInt101010IntRef,			s_unormInt101010IntRef			},
 
 	// \note Same input data & int reference used for {U,S}NORM_INT_1010102_REV
@@ -1353,6 +1374,19 @@
 		}
 	}
 
+	void verifyInfoQueries (void)
+	{
+		const tcu::TextureChannelClass	chnClass	= tcu::getTextureChannelClass(m_format.type);
+		const tcu::TextureFormatInfo	fmtInfo		= tcu::getTextureFormatInfo(m_format);
+
+		if (tcu::isCombinedDepthStencilType(m_format.type))
+			TCU_CHECK(chnClass == tcu::TEXTURECHANNELCLASS_LAST);
+		else
+			TCU_CHECK(de::inBounds(chnClass, (tcu::TextureChannelClass)0, tcu::TEXTURECHANNELCLASS_LAST));
+
+		DE_UNREF(fmtInfo);
+	}
+
 	const TextureFormat		m_format;
 };
 
@@ -1375,6 +1409,8 @@
 
 		m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
 
+		verifyInfoQueries();
+
 		verifyRead(inputAccess);
 
 		// \todo [2015-10-12 pyry] Handle lossy conversion with *NORM_INT32
@@ -1408,6 +1444,8 @@
 
 		m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
 
+		verifyInfoQueries();
+
 		verifyRead(inputDepthAccess);
 
 		m_testCtx.getLog() << TestLog::Message << "Copying with getPixel() -> setPixel()" << TestLog::EndMessage;
@@ -1444,6 +1482,8 @@
 
 		m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
 
+		verifyInfoQueries();
+
 		verifyRead(inputStencilAccess);
 
 		m_testCtx.getLog() << TestLog::Message << "Copying with getPixel() -> setPixel()" << TestLog::EndMessage;
@@ -1482,6 +1522,8 @@
 
 		m_testCtx.setTestResult(QP_TEST_RESULT_PASS, "Pass");
 
+		verifyInfoQueries();
+
 		verifyRead(inputDepthAccess);
 		verifyRead(inputStencilAccess);
 
diff --git a/modules/internal/ditVulkanTests.cpp b/modules/internal/ditVulkanTests.cpp
new file mode 100644
index 0000000..28c416b
--- /dev/null
+++ b/modules/internal/ditVulkanTests.cpp
@@ -0,0 +1,43 @@
+/*-------------------------------------------------------------------------
+ * drawElements Internal Test Module
+ * ---------------------------------
+ *
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan framework tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "ditVulkanTests.hpp"
+#include "ditTestCase.hpp"
+
+#include "vkImageUtil.hpp"
+
+#include "deUniquePtr.hpp"
+
+namespace dit
+{
+
+tcu::TestCaseGroup* createVulkanTests (tcu::TestContext& testCtx)
+{
+	de::MovePtr<tcu::TestCaseGroup>	group	(new tcu::TestCaseGroup(testCtx, "vulkan", "Vulkan Framework Tests"));
+
+	group->addChild(new SelfCheckCase(testCtx, "image_util", "ImageUtil self-check tests", vk::imageUtilSelfTest));
+
+	return group.release();
+}
+
+} // dit
diff --git a/modules/internal/ditVulkanTests.hpp b/modules/internal/ditVulkanTests.hpp
new file mode 100644
index 0000000..6f1a05c
--- /dev/null
+++ b/modules/internal/ditVulkanTests.hpp
@@ -0,0 +1,36 @@
+#ifndef _DITVULKANTESTS_HPP
+#define _DITVULKANTESTS_HPP
+/*-------------------------------------------------------------------------
+ * drawElements Internal Test Module
+ * ---------------------------------
+ *
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ *//*!
+ * \file
+ * \brief Vulkan framework tests.
+ *//*--------------------------------------------------------------------*/
+
+#include "tcuDefs.hpp"
+#include "tcuTestCase.hpp"
+
+namespace dit
+{
+
+tcu::TestCaseGroup*	createVulkanTests	(tcu::TestContext& testCtx);
+
+} // dit
+
+#endif // _DITVULKANTESTS_HPP
diff --git a/scripts/build_android_mustpass.py b/scripts/build_android_mustpass.py
index 3c87553..aec4d3c 100644
--- a/scripts/build_android_mustpass.py
+++ b/scripts/build_android_mustpass.py
@@ -20,18 +20,12 @@
 #
 #-------------------------------------------------------------------------
 
-from build.common import *
+from build.common import DEQP_DIR
 from build.config import ANY_GENERATOR
-from build.build import build
-from build_caselists import Module, getBuildConfig, genCaseList, getCaseListPath, DEFAULT_BUILD_DIR, DEFAULT_TARGET
-from fnmatch import fnmatch
-from copy import copy
+from build_caselists import Module, getModuleByName, getBuildConfig, DEFAULT_BUILD_DIR, DEFAULT_TARGET
+from mustpass import Project, Package, Mustpass, Configuration, include, exclude, genMustpassLists
 
-import xml.etree.cElementTree as ElementTree
-import xml.dom.minidom as minidom
-
-CTS_DATA_DIR	= os.path.join(DEQP_DIR, "android", "cts")
-APK_NAME 		= "com.drawelements.deqp.apk"
+import os
 
 COPYRIGHT_DECLARATION = """
      Copyright (C) 2015 The Android Open Source Project
@@ -49,388 +43,15 @@
      limitations under the License.
      """
 
-GENERATED_FILE_WARNING = """
-     This file has been automatically generated. Edit with caution.
-     """
+CTS_DATA_DIR					= os.path.join(DEQP_DIR, "android", "cts")
 
-class Configuration:
-	def __init__ (self, name, glconfig, rotation, surfacetype, filters):
-		self.name			= name
-		self.glconfig		= glconfig
-		self.rotation		= rotation
-		self.surfacetype	= surfacetype
-		self.filters		= filters
+CTS_PROJECT						= Project(path = CTS_DATA_DIR, copyright = COPYRIGHT_DECLARATION)
 
-class Package:
-	def __init__ (self, module, configurations):
-		self.module			= module
-		self.configurations	= configurations
-
-class Mustpass:
-	def __init__ (self, version, packages):
-		self.version	= version
-		self.packages	= packages
-
-class Filter:
-	TYPE_INCLUDE = 0
-	TYPE_EXCLUDE = 1
-
-	def __init__ (self, type, filename):
-		self.type		= type
-		self.filename	= filename
-
-class TestRoot:
-	def __init__ (self):
-		self.children	= []
-
-class TestGroup:
-	def __init__ (self, name):
-		self.name		= name
-		self.children	= []
-
-class TestCase:
-	def __init__ (self, name):
-		self.name			= name
-		self.configurations	= []
-
-class GLESVersion:
-	def __init__(self, major, minor):
-		self.major = major
-		self.minor = minor
-
-	def encode (self):
-		return (self.major << 16) | (self.minor)
-
-def getModuleGLESVersion (module):
-	versions = {
-		'dEQP-EGL':		GLESVersion(2,0),
-		'dEQP-GLES2':	GLESVersion(2,0),
-		'dEQP-GLES3':	GLESVersion(3,0),
-		'dEQP-GLES31':	GLESVersion(3,1)
-	}
-	return versions[module.name]
-
-def getSrcDir (mustpass):
-	return os.path.join(CTS_DATA_DIR, mustpass.version, "src")
-
-def getTmpDir (mustpass):
-	return os.path.join(CTS_DATA_DIR, mustpass.version, "tmp")
-
-def getModuleShorthand (module):
-	assert module.name[:5] == "dEQP-"
-	return module.name[5:].lower()
-
-def getCaseListFileName (package, configuration):
-	return "%s-%s.txt" % (getModuleShorthand(package.module), configuration.name)
-
-def getDstCaseListPath (mustpass, package, configuration):
-	return os.path.join(CTS_DATA_DIR, mustpass.version, getCaseListFileName(package, configuration))
-
-def getCTSPackageName (package):
-	return "com.drawelements.deqp." + getModuleShorthand(package.module)
-
-def getCommandLine (config):
-	return "--deqp-gl-config-name=%s --deqp-screen-rotation=%s --deqp-surface-type=%s --deqp-watchdog=enable" % (config.glconfig, config.rotation, config.surfacetype)
-
-def readCaseList (filename):
-	cases = []
-	with open(filename, 'rb') as f:
-		for line in f:
-			if line[:6] == "TEST: ":
-				cases.append(line[6:].strip())
-	return cases
-
-def getCaseList (buildCfg, generator, module):
-	build(buildCfg, generator, [module.binName])
-	genCaseList(buildCfg, generator, module, "txt")
-	return readCaseList(getCaseListPath(buildCfg, module, "txt"))
-
-def readPatternList (filename):
-	ptrns = []
-	with open(filename, 'rb') as f:
-		for line in f:
-			line = line.strip()
-			if len(line) > 0 and line[0] != '#':
-				ptrns.append(line)
-	return ptrns
-
-def applyPatterns (caseList, patterns, filename, op):
-	matched			= set()
-	errors			= []
-	curList			= copy(caseList)
-	trivialPtrns	= [p for p in patterns if p.find('*') < 0]
-	regularPtrns	= [p for p in patterns if p.find('*') >= 0]
-
-	# Apply trivial (just case paths)
-	allCasesSet		= set(caseList)
-	for path in trivialPtrns:
-		if path in allCasesSet:
-			if path in matched:
-				errors.append((path, "Same case specified more than once"))
-			matched.add(path)
-		else:
-			errors.append((path, "Test case not found"))
-
-	curList = [c for c in curList if c not in matched]
-
-	for pattern in regularPtrns:
-		matchedThisPtrn = set()
-
-		for case in curList:
-			if fnmatch(case, pattern):
-				matchedThisPtrn.add(case)
-
-		if len(matchedThisPtrn) == 0:
-			errors.append((pattern, "Pattern didn't match any cases"))
-
-		matched	= matched | matchedThisPtrn
-		curList = [c for c in curList if c not in matched]
-
-	for pattern, reason in errors:
-		print "ERROR: %s: %s" % (reason, pattern)
-
-	if len(errors) > 0:
-		die("Found %s invalid patterns while processing file %s" % (len(errors), filename))
-
-	return [c for c in caseList if op(c in matched)]
-
-def applyInclude (caseList, patterns, filename):
-	return applyPatterns(caseList, patterns, filename, lambda b: b)
-
-def applyExclude (caseList, patterns, filename):
-	return applyPatterns(caseList, patterns, filename, lambda b: not b)
-
-def readPatternLists (mustpass):
-	lists = {}
-	for package in mustpass.packages:
-		for cfg in package.configurations:
-			for filter in cfg.filters:
-				if not filter.filename in lists:
-					lists[filter.filename] = readPatternList(os.path.join(getSrcDir(mustpass), filter.filename))
-	return lists
-
-def applyFilters (caseList, patternLists, filters):
-	res = copy(caseList)
-	for filter in filters:
-		ptrnList = patternLists[filter.filename]
-		if filter.type == Filter.TYPE_INCLUDE:
-			res = applyInclude(res, ptrnList, filter.filename)
-		else:
-			assert filter.type == Filter.TYPE_EXCLUDE
-			res = applyExclude(res, ptrnList, filter.filename)
-	return res
-
-def appendToHierarchy (root, casePath):
-	def findChild (node, name):
-		for child in node.children:
-			if child.name == name:
-				return child
-		return None
-
-	curNode		= root
-	components	= casePath.split('.')
-
-	for component in components[:-1]:
-		nextNode = findChild(curNode, component)
-		if not nextNode:
-			nextNode = TestGroup(component)
-			curNode.children.append(nextNode)
-		curNode = nextNode
-
-	if not findChild(curNode, components[-1]):
-		curNode.children.append(TestCase(components[-1]))
-
-def buildTestHierachy (caseList):
-	root = TestRoot()
-	for case in caseList:
-		appendToHierarchy(root, case)
-	return root
-
-def buildTestCaseMap (root):
-	caseMap = {}
-
-	def recursiveBuild (curNode, prefix):
-		curPath = prefix + curNode.name
-		if isinstance(curNode, TestCase):
-			caseMap[curPath] = curNode
-		else:
-			for child in curNode.children:
-				recursiveBuild(child, curPath + '.')
-
-	for child in root.children:
-		recursiveBuild(child, '')
-
-	return caseMap
-
-def include (filename):
-	return Filter(Filter.TYPE_INCLUDE, filename)
-
-def exclude (filename):
-	return Filter(Filter.TYPE_EXCLUDE, filename)
-
-def prettifyXML (doc):
-	doc.insert(0, ElementTree.Comment(COPYRIGHT_DECLARATION))
-	doc.insert(1, ElementTree.Comment(GENERATED_FILE_WARNING))
-	uglyString	= ElementTree.tostring(doc, 'utf-8')
-	reparsed	= minidom.parseString(uglyString)
-	return reparsed.toprettyxml(indent='\t', encoding='utf-8')
-
-def genCTSPackageXML (package, root):
-	def isLeafGroup (testGroup):
-		numGroups	= 0
-		numTests	= 0
-
-		for child in testGroup.children:
-			if isinstance(child, TestCase):
-				numTests += 1
-			else:
-				numGroups += 1
-
-		assert numGroups + numTests > 0
-
-		if numGroups > 0 and numTests > 0:
-			die("Mixed groups and cases in %s" % testGroup.name)
-
-		return numGroups == 0
-
-	def makeConfiguration (parentElem, configuration):
-		return ElementTree.SubElement(parentElem, "TestInstance", glconfig=configuration.glconfig, rotation=configuration.rotation, surfacetype=configuration.surfacetype)
-
-	def makeTestCase (parentElem, testCase):
-		caseElem = ElementTree.SubElement(parentElem, "Test", name=testCase.name)
-		for config in testCase.configurations:
-			makeConfiguration(caseElem, config)
-		return caseElem
-
-	def makeTestGroup (parentElem, testGroup):
-		groupElem = ElementTree.SubElement(parentElem, "TestCase" if isLeafGroup(testGroup) else "TestSuite", name=testGroup.name)
-		for child in testGroup.children:
-			if isinstance(child, TestCase):
-				makeTestCase(groupElem, child)
-			else:
-				makeTestGroup(groupElem, child)
-		return groupElem
-
-	pkgElem = ElementTree.Element("TestPackage",
-								  name				= package.module.name,
-								  appPackageName	= getCTSPackageName(package),
-								  testType			= "deqpTest")
-
-	pkgElem.set("xmlns:deqp", "http://drawelements.com/deqp")
-	pkgElem.set("deqp:glesVersion", str(getModuleGLESVersion(package.module).encode()))
-
-	for child in root.children:
-		makeTestGroup(pkgElem, child)
-
-	return pkgElem
-
-def genSpecXML (mustpass):
-	mustpassElem = ElementTree.Element("Mustpass", version = mustpass.version)
-
-	for package in mustpass.packages:
-		packageElem = ElementTree.SubElement(mustpassElem, "TestPackage", name = package.module.name)
-
-		for config in package.configurations:
-			configElem = ElementTree.SubElement(packageElem, "Configuration",
-												name			= config.name,
-												caseListFile	= getCaseListFileName(package, config),
-												commandLine		= getCommandLine(config))
-
-	return mustpassElem
-
-def addOptionElement (parent, optionName, optionValue):
-	ElementTree.SubElement(parent, "option", name=optionName, value=optionValue)
-
-def genAndroidTestXml (mustpass):
-	INSTALLER_CLASS = "com.android.compatibility.common.tradefed.targetprep.ApkInstaller"
-	RUNNER_CLASS = "com.drawelements.deqp.runner.DeqpTestRunner"
-	configElement = ElementTree.Element("configuration")
-	preparerElement = ElementTree.SubElement(configElement, "target_preparer")
-	preparerElement.set("class", INSTALLER_CLASS)
-	addOptionElement(preparerElement, "cleanup-apks", "true")
-	addOptionElement(preparerElement, "test-file-name", APK_NAME)
-
-	for package in mustpass.packages:
-		for config in package.configurations:
-			testElement = ElementTree.SubElement(configElement, "test")
-			testElement.set("class", RUNNER_CLASS)
-			addOptionElement(testElement, "deqp-package", package.module.name)
-			addOptionElement(testElement, "deqp-caselist-file", getCaseListFileName(package,config))
-			# \todo [2015-10-16 kalle]: Replace with just command line? - requires simplifications in the runner/tests as well.
-			addOptionElement(testElement, "deqp-gl-config-name", config.glconfig)
-			addOptionElement(testElement, "deqp-surface-type", config.surfacetype)
-			addOptionElement(testElement, "deqp-screen-rotation", config.rotation)
-
-	return configElement
-
-
-def genMustpass (mustpass, moduleCaseLists):
-	print "Generating mustpass '%s'" % mustpass.version
-
-	patternLists = readPatternLists(mustpass)
-
-	for package in mustpass.packages:
-		allCasesInPkg		= moduleCaseLists[package.module]
-		matchingByConfig	= {}
-		allMatchingSet		= set()
-
-		for config in package.configurations:
-			filtered	= applyFilters(allCasesInPkg, patternLists, config.filters)
-			dstFile		= getDstCaseListPath(mustpass, package, config)
-
-			print "  Writing deqp caselist: " + dstFile
-			writeFile(dstFile, "\n".join(filtered) + "\n")
-
-			matchingByConfig[config]	= filtered
-			allMatchingSet				= allMatchingSet | set(filtered)
-
-		allMatchingCases	= [c for c in allCasesInPkg if c in allMatchingSet] # To preserve ordering
-		root				= buildTestHierachy(allMatchingCases)
-		testCaseMap			= buildTestCaseMap(root)
-
-		for config in package.configurations:
-			for case in matchingByConfig[config]:
-				testCaseMap[case].configurations.append(config)
-
-		# NOTE: CTS v2 does not need package XML files. Remove when transition is complete.
-		packageXml	= genCTSPackageXML(package, root)
-		xmlFilename	= os.path.join(CTS_DATA_DIR, mustpass.version, getCTSPackageName(package) + ".xml")
-
-		print "  Writing CTS caselist: " + xmlFilename
-		writeFile(xmlFilename, prettifyXML(packageXml))
-
-	specXML			= genSpecXML(mustpass)
-	specFilename	= os.path.join(CTS_DATA_DIR, mustpass.version, "mustpass.xml")
-
-	print "  Writing spec: " + specFilename
-	writeFile(specFilename, prettifyXML(specXML))
-
-	# TODO: Which is the best selector mechanism?
-	if (mustpass.version == "mnc"):
-		androidTestXML		= genAndroidTestXml(mustpass)
-		androidTestFilename	= os.path.join(CTS_DATA_DIR, "AndroidTest.xml")
-
-		print "  Writing AndroidTest.xml: " + androidTestFilename
-		writeFile(androidTestFilename, prettifyXML(androidTestXML))
-
-	print "Done!"
-
-def genMustpassLists (mustpassLists, generator, buildCfg):
-	moduleCaseLists = {}
-
-	# Getting case lists involves invoking build, so we want to cache the results
-	for mustpass in mustpassLists:
-		for package in mustpass.packages:
-			if not package.module in moduleCaseLists:
-				moduleCaseLists[package.module] = getCaseList(buildCfg, generator, package.module)
-
-	for mustpass in mustpassLists:
-		genMustpass(mustpass, moduleCaseLists)
-
-EGL_MODULE						= Module(name = "dEQP-EGL", dirName = "egl", binName = "deqp-egl")
-GLES2_MODULE					= Module(name = "dEQP-GLES2", dirName = "gles2", binName = "deqp-gles2")
-GLES3_MODULE					= Module(name = "dEQP-GLES3", dirName = "gles3", binName = "deqp-gles3")
-GLES31_MODULE					= Module(name = "dEQP-GLES31", dirName = "gles31", binName = "deqp-gles31")
+EGL_MODULE						= getModuleByName("dEQP-EGL")
+GLES2_MODULE					= getModuleByName("dEQP-GLES2")
+GLES3_MODULE					= getModuleByName("dEQP-GLES3")
+GLES31_MODULE					= getModuleByName("dEQP-GLES31")
+VULKAN_MODULE					= getModuleByName("dEQP-VK")
 
 # Lollipop
 
@@ -578,7 +199,7 @@
 
 # Master
 
-MASTER_EGL_COMMON_FILTERS		= [include("egl-master.txt")]
+MASTER_EGL_COMMON_FILTERS		= [include("egl-master.txt"), exclude("egl-internal-api-tests.txt")]
 MASTER_EGL_PKG					= Package(module = EGL_MODULE, configurations = [
 		# Master
 		Configuration(name			= "master",
@@ -708,11 +329,13 @@
 	])
 
 MUSTPASS_LISTS				= [
-		Mustpass(version = "lmp",		packages = [LMP_GLES3_PKG, LMP_GLES31_PKG]),
-		Mustpass(version = "lmp-mr1",	packages = [LMP_MR1_GLES3_PKG, LMP_MR1_GLES31_PKG]),
-		Mustpass(version = "mnc",		packages = [MNC_EGL_PKG, MNC_GLES2_PKG, MNC_GLES3_PKG, MNC_GLES31_PKG]),
-		Mustpass(version = "master",	packages = [MASTER_EGL_PKG, MASTER_GLES2_PKG, MASTER_GLES3_PKG, MASTER_GLES31_PKG])
+		Mustpass(project = CTS_PROJECT, version = "lmp",		packages = [LMP_GLES3_PKG, LMP_GLES31_PKG]),
+		Mustpass(project = CTS_PROJECT, version = "lmp-mr1",	packages = [LMP_MR1_GLES3_PKG, LMP_MR1_GLES31_PKG]),
+		Mustpass(project = CTS_PROJECT, version = "mnc",		packages = [MNC_EGL_PKG, MNC_GLES2_PKG, MNC_GLES3_PKG, MNC_GLES31_PKG]),
+		Mustpass(project = CTS_PROJECT, version = "master",		packages = [MASTER_EGL_PKG, MASTER_GLES2_PKG, MASTER_GLES3_PKG, MASTER_GLES31_PKG])
 	]
 
+BUILD_CONFIG				= getBuildConfig(DEFAULT_BUILD_DIR, DEFAULT_TARGET, "Debug")
+
 if __name__ == "__main__":
-	genMustpassLists(MUSTPASS_LISTS, ANY_GENERATOR, getBuildConfig(DEFAULT_BUILD_DIR, DEFAULT_TARGET, "Debug"))
+	genMustpassLists(MUSTPASS_LISTS, ANY_GENERATOR, BUILD_CONFIG)
diff --git a/scripts/build_caselists.py b/scripts/build_caselists.py
index ec9ea01..329eaaa 100644
--- a/scripts/build_caselists.py
+++ b/scripts/build_caselists.py
@@ -38,11 +38,12 @@
 		self.binName	= binName
 
 MODULES = [
-	Module("dE-IT",			"internal",		"de-internal-tests"),
-	Module("dEQP-EGL",		"egl",			"deqp-egl"),
-	Module("dEQP-GLES2",	"gles2",		"deqp-gles2"),
-	Module("dEQP-GLES3",	"gles3",		"deqp-gles3"),
-	Module("dEQP-GLES31",	"gles31",		"deqp-gles31"),
+	Module("dE-IT",			"internal",								"de-internal-tests"),
+	Module("dEQP-EGL",		"egl",									"deqp-egl"),
+	Module("dEQP-GLES2",	"gles2",								"deqp-gles2"),
+	Module("dEQP-GLES3",	"gles3",								"deqp-gles3"),
+	Module("dEQP-GLES31",	"gles31",								"deqp-gles31"),
+	Module("dEQP-VK",		"../external/vulkancts/modules/vulkan",	"deqp-vk"),
 ]
 
 DEFAULT_BUILD_DIR	= os.path.join(tempfile.gettempdir(), "deqp-caselists", "{targetName}-{buildType}")
diff --git a/scripts/mustpass.py b/scripts/mustpass.py
new file mode 100644
index 0000000..328e663
--- /dev/null
+++ b/scripts/mustpass.py
@@ -0,0 +1,456 @@
+# -*- coding: utf-8 -*-
+
+#-------------------------------------------------------------------------
+# drawElements Quality Program utilities
+# --------------------------------------
+#
+# Copyright 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+#-------------------------------------------------------------------------
+
+from build.common import *
+from build.config import ANY_GENERATOR
+from build.build import build
+from build_caselists import Module, getModuleByName, getBuildConfig, genCaseList, getCaseListPath, DEFAULT_BUILD_DIR, DEFAULT_TARGET
+from fnmatch import fnmatch
+from copy import copy
+
+import xml.etree.cElementTree as ElementTree
+import xml.dom.minidom as minidom
+
+APK_NAME 		= "com.drawelements.deqp.apk"
+
+GENERATED_FILE_WARNING = """
+     This file has been automatically generated. Edit with caution.
+     """
+
+class Project:
+	def __init__ (self, path, copyright = None):
+		self.path		= path
+		self.copyright	= copyright
+
+class Configuration:
+	def __init__ (self, name, filters, glconfig = None, rotation = None, surfacetype = None):
+		self.name			= name
+		self.glconfig		= glconfig
+		self.rotation		= rotation
+		self.surfacetype	= surfacetype
+		self.filters		= filters
+
+class Package:
+	def __init__ (self, module, configurations):
+		self.module			= module
+		self.configurations	= configurations
+
+class Mustpass:
+	def __init__ (self, project, version, packages):
+		self.project	= project
+		self.version	= version
+		self.packages	= packages
+
+class Filter:
+	TYPE_INCLUDE = 0
+	TYPE_EXCLUDE = 1
+
+	def __init__ (self, type, filename):
+		self.type		= type
+		self.filename	= filename
+
+class TestRoot:
+	def __init__ (self):
+		self.children	= []
+
+class TestGroup:
+	def __init__ (self, name):
+		self.name		= name
+		self.children	= []
+
+class TestCase:
+	def __init__ (self, name):
+		self.name			= name
+		self.configurations	= []
+
+class GLESVersion:
+	def __init__(self, major, minor):
+		self.major = major
+		self.minor = minor
+
+	def encode (self):
+		return (self.major << 16) | (self.minor)
+
+def getModuleGLESVersion (module):
+	versions = {
+		'dEQP-EGL':		GLESVersion(2,0),
+		'dEQP-GLES2':	GLESVersion(2,0),
+		'dEQP-GLES3':	GLESVersion(3,0),
+		'dEQP-GLES31':	GLESVersion(3,1)
+	}
+	return versions[module.name] if module.name in versions else None
+
+def getSrcDir (mustpass):
+	return os.path.join(mustpass.project.path, mustpass.version, "src")
+
+def getTmpDir (mustpass):
+	return os.path.join(mustpass.project.path, mustpass.version, "tmp")
+
+def getModuleShorthand (module):
+	assert module.name[:5] == "dEQP-"
+	return module.name[5:].lower()
+
+def getCaseListFileName (package, configuration):
+	return "%s-%s.txt" % (getModuleShorthand(package.module), configuration.name)
+
+def getDstCaseListPath (mustpass, package, configuration):
+	return os.path.join(mustpass.project.path, mustpass.version, getCaseListFileName(package, configuration))
+
+def getCTSPackageName (package):
+	return "com.drawelements.deqp." + getModuleShorthand(package.module)
+
+def getCommandLine (config):
+	cmdLine = ""
+
+	if config.glconfig != None:
+		cmdLine += "--deqp-gl-config-name=%s " % config.glconfig
+
+	if config.rotation != None:
+		cmdLine += "--deqp-screen-rotation=%s " % config.rotation
+
+	if config.surfacetype != None:
+		cmdLine += "--deqp-surface-type=%s " % config.surfacetype
+
+	cmdLine += "--deqp-watchdog=enable"
+
+	return cmdLine
+
+def readCaseList (filename):
+	cases = []
+	with open(filename, 'rb') as f:
+		for line in f:
+			if line[:6] == "TEST: ":
+				cases.append(line[6:].strip())
+	return cases
+
+def getCaseList (buildCfg, generator, module):
+	build(buildCfg, generator, [module.binName])
+	genCaseList(buildCfg, generator, module, "txt")
+	return readCaseList(getCaseListPath(buildCfg, module, "txt"))
+
+def readPatternList (filename):
+	ptrns = []
+	with open(filename, 'rb') as f:
+		for line in f:
+			line = line.strip()
+			if len(line) > 0 and line[0] != '#':
+				ptrns.append(line)
+	return ptrns
+
+def applyPatterns (caseList, patterns, filename, op):
+	matched			= set()
+	errors			= []
+	curList			= copy(caseList)
+	trivialPtrns	= [p for p in patterns if p.find('*') < 0]
+	regularPtrns	= [p for p in patterns if p.find('*') >= 0]
+
+	# Apply trivial (just case paths)
+	allCasesSet		= set(caseList)
+	for path in trivialPtrns:
+		if path in allCasesSet:
+			if path in matched:
+				errors.append((path, "Same case specified more than once"))
+			matched.add(path)
+		else:
+			errors.append((path, "Test case not found"))
+
+	curList = [c for c in curList if c not in matched]
+
+	for pattern in regularPtrns:
+		matchedThisPtrn = set()
+
+		for case in curList:
+			if fnmatch(case, pattern):
+				matchedThisPtrn.add(case)
+
+		if len(matchedThisPtrn) == 0:
+			errors.append((pattern, "Pattern didn't match any cases"))
+
+		matched	= matched | matchedThisPtrn
+		curList = [c for c in curList if c not in matched]
+
+	for pattern, reason in errors:
+		print "ERROR: %s: %s" % (reason, pattern)
+
+	if len(errors) > 0:
+		die("Found %s invalid patterns while processing file %s" % (len(errors), filename))
+
+	return [c for c in caseList if op(c in matched)]
+
+def applyInclude (caseList, patterns, filename):
+	return applyPatterns(caseList, patterns, filename, lambda b: b)
+
+def applyExclude (caseList, patterns, filename):
+	return applyPatterns(caseList, patterns, filename, lambda b: not b)
+
+def readPatternLists (mustpass):
+	lists = {}
+	for package in mustpass.packages:
+		for cfg in package.configurations:
+			for filter in cfg.filters:
+				if not filter.filename in lists:
+					lists[filter.filename] = readPatternList(os.path.join(getSrcDir(mustpass), filter.filename))
+	return lists
+
+def applyFilters (caseList, patternLists, filters):
+	res = copy(caseList)
+	for filter in filters:
+		ptrnList = patternLists[filter.filename]
+		if filter.type == Filter.TYPE_INCLUDE:
+			res = applyInclude(res, ptrnList, filter.filename)
+		else:
+			assert filter.type == Filter.TYPE_EXCLUDE
+			res = applyExclude(res, ptrnList, filter.filename)
+	return res
+
+def appendToHierarchy (root, casePath):
+	def findChild (node, name):
+		for child in node.children:
+			if child.name == name:
+				return child
+		return None
+
+	curNode		= root
+	components	= casePath.split('.')
+
+	for component in components[:-1]:
+		nextNode = findChild(curNode, component)
+		if not nextNode:
+			nextNode = TestGroup(component)
+			curNode.children.append(nextNode)
+		curNode = nextNode
+
+	if not findChild(curNode, components[-1]):
+		curNode.children.append(TestCase(components[-1]))
+
+def buildTestHierachy (caseList):
+	root = TestRoot()
+	for case in caseList:
+		appendToHierarchy(root, case)
+	return root
+
+def buildTestCaseMap (root):
+	caseMap = {}
+
+	def recursiveBuild (curNode, prefix):
+		curPath = prefix + curNode.name
+		if isinstance(curNode, TestCase):
+			caseMap[curPath] = curNode
+		else:
+			for child in curNode.children:
+				recursiveBuild(child, curPath + '.')
+
+	for child in root.children:
+		recursiveBuild(child, '')
+
+	return caseMap
+
+def include (filename):
+	return Filter(Filter.TYPE_INCLUDE, filename)
+
+def exclude (filename):
+	return Filter(Filter.TYPE_EXCLUDE, filename)
+
+def insertXMLHeaders (mustpass, doc):
+	if mustpass.project.copyright != None:
+		doc.insert(0, ElementTree.Comment(mustpass.project.copyright))
+	doc.insert(1, ElementTree.Comment(GENERATED_FILE_WARNING))
+
+def prettifyXML (doc):
+	uglyString	= ElementTree.tostring(doc, 'utf-8')
+	reparsed	= minidom.parseString(uglyString)
+	return reparsed.toprettyxml(indent='\t', encoding='utf-8')
+
+def genCTSPackageXML (mustpass, package, root):
+	def isLeafGroup (testGroup):
+		numGroups	= 0
+		numTests	= 0
+
+		for child in testGroup.children:
+			if isinstance(child, TestCase):
+				numTests += 1
+			else:
+				numGroups += 1
+
+		assert numGroups + numTests > 0
+
+		if numGroups > 0 and numTests > 0:
+			die("Mixed groups and cases in %s" % testGroup.name)
+
+		return numGroups == 0
+
+	def makeConfiguration (parentElem, config):
+		attributes = {}
+
+		if config.glconfig != None:
+			attributes['glconfig'] = config.glconfig
+
+		if config.rotation != None:
+			attributes['rotation'] = config.rotation
+
+		if config.surfacetype != None:
+			attributes['surfacetype'] = config.surfacetype
+
+		return ElementTree.SubElement(parentElem, "TestInstance", attributes)
+
+	def makeTestCase (parentElem, testCase):
+		caseElem = ElementTree.SubElement(parentElem, "Test", name=testCase.name)
+		for config in testCase.configurations:
+			makeConfiguration(caseElem, config)
+		return caseElem
+
+	def makeTestGroup (parentElem, testGroup):
+		groupElem = ElementTree.SubElement(parentElem, "TestCase" if isLeafGroup(testGroup) else "TestSuite", name=testGroup.name)
+		for child in testGroup.children:
+			if isinstance(child, TestCase):
+				makeTestCase(groupElem, child)
+			else:
+				makeTestGroup(groupElem, child)
+		return groupElem
+
+	pkgElem = ElementTree.Element("TestPackage",
+								  name				= package.module.name,
+								  appPackageName	= getCTSPackageName(package),
+								  testType			= "deqpTest")
+
+	pkgElem.set("xmlns:deqp", "http://drawelements.com/deqp")
+	insertXMLHeaders(mustpass, pkgElem)
+
+	glesVersion = getModuleGLESVersion(package.module)
+
+	if glesVersion != None:
+		pkgElem.set("deqp:glesVersion", str(glesVersion.encode()))
+
+	for child in root.children:
+		makeTestGroup(pkgElem, child)
+
+	return pkgElem
+
+def genSpecXML (mustpass):
+	mustpassElem = ElementTree.Element("Mustpass", version = mustpass.version)
+	insertXMLHeaders(mustpass, mustpassElem)
+
+	for package in mustpass.packages:
+		packageElem = ElementTree.SubElement(mustpassElem, "TestPackage", name = package.module.name)
+
+		for config in package.configurations:
+			configElem = ElementTree.SubElement(packageElem, "Configuration",
+												name			= config.name,
+												caseListFile	= getCaseListFileName(package, config),
+												commandLine		= getCommandLine(config))
+
+	return mustpassElem
+
+def addOptionElement (parent, optionName, optionValue):
+	ElementTree.SubElement(parent, "option", name=optionName, value=optionValue)
+
+def genAndroidTestXml (mustpass):
+	INSTALLER_CLASS = "com.android.compatibility.common.tradefed.targetprep.ApkInstaller"
+	RUNNER_CLASS = "com.drawelements.deqp.runner.DeqpTestRunner"
+	configElement = ElementTree.Element("configuration")
+	preparerElement = ElementTree.SubElement(configElement, "target_preparer")
+	preparerElement.set("class", INSTALLER_CLASS)
+	addOptionElement(preparerElement, "cleanup-apks", "true")
+	addOptionElement(preparerElement, "test-file-name", APK_NAME)
+
+	for package in mustpass.packages:
+		for config in package.configurations:
+			testElement = ElementTree.SubElement(configElement, "test")
+			testElement.set("class", RUNNER_CLASS)
+			addOptionElement(testElement, "deqp-package", package.module.name)
+			addOptionElement(testElement, "deqp-caselist-file", getCaseListFileName(package,config))
+			# \todo [2015-10-16 kalle]: Replace with just command line? - requires simplifications in the runner/tests as well.
+			if config.glconfig != None:
+				addOptionElement(testElement, "deqp-gl-config-name", config.glconfig)
+
+			if config.surfacetype != None:
+				addOptionElement(testElement, "deqp-surface-type", config.surfacetype)
+
+			if config.rotation != None:
+				addOptionElement(testElement, "deqp-screen-rotation", config.rotation)
+
+	insertXMLHeaders(mustpass, configElement)
+
+	return configElement
+
+def genMustpass (mustpass, moduleCaseLists):
+	print "Generating mustpass '%s'" % mustpass.version
+
+	patternLists = readPatternLists(mustpass)
+
+	for package in mustpass.packages:
+		allCasesInPkg		= moduleCaseLists[package.module]
+		matchingByConfig	= {}
+		allMatchingSet		= set()
+
+		for config in package.configurations:
+			filtered	= applyFilters(allCasesInPkg, patternLists, config.filters)
+			dstFile		= getDstCaseListPath(mustpass, package, config)
+
+			print "  Writing deqp caselist: " + dstFile
+			writeFile(dstFile, "\n".join(filtered) + "\n")
+
+			matchingByConfig[config]	= filtered
+			allMatchingSet				= allMatchingSet | set(filtered)
+
+		allMatchingCases	= [c for c in allCasesInPkg if c in allMatchingSet] # To preserve ordering
+		root				= buildTestHierachy(allMatchingCases)
+		testCaseMap			= buildTestCaseMap(root)
+
+		for config in package.configurations:
+			for case in matchingByConfig[config]:
+				testCaseMap[case].configurations.append(config)
+
+		# NOTE: CTS v2 does not need package XML files. Remove when transition is complete.
+		packageXml	= genCTSPackageXML(mustpass, package, root)
+		xmlFilename	= os.path.join(mustpass.project.path, mustpass.version, getCTSPackageName(package) + ".xml")
+
+		print "  Writing CTS caselist: " + xmlFilename
+		writeFile(xmlFilename, prettifyXML(packageXml))
+
+	specXML			= genSpecXML(mustpass)
+	specFilename	= os.path.join(mustpass.project.path, mustpass.version, "mustpass.xml")
+
+	print "  Writing spec: " + specFilename
+	writeFile(specFilename, prettifyXML(specXML))
+
+	# TODO: Which is the best selector mechanism?
+	if (mustpass.version == "mnc"):
+		androidTestXML		= genAndroidTestXml(mustpass)
+		androidTestFilename	= os.path.join(mustpass.project.path, "AndroidTest.xml")
+
+		print "  Writing AndroidTest.xml: " + androidTestFilename
+		writeFile(androidTestFilename, prettifyXML(androidTestXML))
+
+	print "Done!"
+
+def genMustpassLists (mustpassLists, generator, buildCfg):
+	moduleCaseLists = {}
+
+	# Getting case lists involves invoking build, so we want to cache the results
+	for mustpass in mustpassLists:
+		for package in mustpass.packages:
+			if not package.module in moduleCaseLists:
+				moduleCaseLists[package.module] = getCaseList(buildCfg, generator, package.module)
+
+	for mustpass in mustpassLists:
+		genMustpass(mustpass, moduleCaseLists)